branch_name
stringclasses
149 values
text
stringlengths
23
89.3M
directory_id
stringlengths
40
40
languages
listlengths
1
19
num_files
int64
1
11.8k
repo_language
stringclasses
38 values
repo_name
stringlengths
6
114
revision_id
stringlengths
40
40
snapshot_id
stringlengths
40
40
refs/heads/master
<file_sep>package com.example; import org.hibernate.Session; import org.hibernate.cfg.Configuration; import org.hibernate.SessionFactory; import java.time.LocalTime; import java.util.stream.Stream; public class TestJdbc { public static void main(String args) { int d; System.out.println(LocalTime.now()); // System.out.println(Stream.of("green", "yellow", "blue").max(String::compareTo).filter(s -> s.endsWith("n")).orElse("yellow")); // String jdbcUrl = "jdbc:mysql://localhost:3306/hb_student_tracker?useSSL=FALSE "; // String user = "hbstudent"; // String pass = "<PASSWORD>"; // create session factory SessionFactory factory = new Configuration() .configure("hibernate.cfg.xml") .addAnnotatedClass(Student.class) .buildSessionFactory(); // maven will be a bitch and break this unless the cfg file is in src/main/resources // create session Session session = factory.getCurrentSession(); try { // create a student object System.out.println("Creating a new student object"); Student tempStudent = new Student("will", "jones", "<EMAIL>"); // start a transaction session.beginTransaction(); // save the student object session.save(tempStudent); System.out.println("Saving the student... "); // commit transaction session.getTransaction().commit(); System.out.println("Done"); } catch (Exception e) { e.printStackTrace(); } finally { factory.close(); } } }
b60ac33ba3a1ca621ca22feb3f4d7c67cffba55e
[ "Java" ]
1
Java
wilj0nes/HibernatePractice
6ca73d1abef2a2ff293206e75a9fcd755851c3d8
002bf8e1f6faa9efa93f27b224b1a042da8504c1
refs/heads/master
<file_sep>#!/bin/bash # Get options tname="cloud.json" tfile="$(cd "$(dirname "${tname}")"; pwd)/$(basename "${tname}")" kname="minecraft" # Determine if running on Windows (affects template file argument to aws cli) platform=`uname` if [[ ${platform} == *"MINGW"* ]]; then echo "Using Windows file path" tfile=`cygpath -w ${tfile} | sed -e 's/[\/]/\/\//g'` else echo "Using Linux file path" fi echo $tfile # Delete old keypair aws ec2 delete-key-pair --key-name ${kname} --region us-east-1 # Create and save EC2 key pair aws ec2 create-key-pair --key-name ${kname} --output text --region us-east-1 | sed 's/.*BEGIN.*-$/-----BEGIN RSA PRIVATE KEY-----/' | sed "s/.*${kname}$/-----END RSA PRIVATE KEY-----/" > ${kname}.pem chmod 600 ${kname}.pem # Load file content cdata0=`./encode.sh install/common0.sh` cdata1=`./encode.sh install/common1.sh` cdata2=`./encode.sh install/common2.sh` idata=`./encode.sh install/install.sh` # Build command cmd="aws cloudformation create-stack --stack-name minecraft --template-body \"file://${tfile}\" --capabilities CAPABILITY_IAM --region us-east-1 --parameters ParameterKey=KeyName,ParameterValue=${kname}" if [[ -n "$cdata0" ]]; then cmd="${cmd} ParameterKey=CommonData0,ParameterValue=\"${cdata0}\"" fi if [[ -n "$cdata1" ]]; then cmd="${cmd} ParameterKey=CommonData1,ParameterValue=\"${cdata1}\"" fi if [[ -n "$cdata2" ]]; then cmd="${cmd} ParameterKey=CommonData2,ParameterValue=\"${cdata2}\"" fi if [[ -n "$idata" ]]; then cmd="${cmd} ParameterKey=InstallData,ParameterValue=\"${idata}\"" fi # Execute cmd eval $cmd <file_sep>#!/bin/bash # Take in file as argument f=$1 # Check for actual content if [ -z ${f} ]; then exit 0 fi # Convert line endings to unix dos2unix $f # Convert file to base 64 base64 $f <file_sep># Installs init script into /etc/init.d function install_init() { install_log "Installing MSM init file" install -b "$dl_dir/msm.init" /etc/init.d/msm || install_error "Couldn't install init file" install_log "Making MSM accessible as the command 'msm'" ln -s /etc/init.d/msm /usr/local/bin/msm } # Enables init script in default runlevels function enable_init() { # OVERLOAD THIS install_error "No function defined for enable_init" } # Updates rest of MSM using init script updater function update_msm() { install_log "Asking MSM to update itself" /etc/init.d/msm update --noinput } # Updates rest of MSM using init script updater function setup_jargroup() { install_log "Setup default jar groups" /etc/init.d/msm jargroup create minecraft minecraft } function install_complete() { install_log "Done. Type 'msm help' to get started. Have fun!" } function install_msm() { config_installation add_minecraft_user update_system_packages install_dependencies create_msm_directories download_latest_files patch_latest_files install_config install_cron install_init enable_init update_msm setup_jargroup install_complete } <file_sep># aws-minecraft Let's do this. <file_sep># Fetches latest msm.conf, cron job, and init script function download_latest_files() { if [ ! -d "$dl_dir" ]; then install_error "Temporary download directory was not created properly" fi install_log "Downloading latest MSM configuration file" wget ${UPDATE_URL}/msm.conf \ -O "$dl_dir/msm.conf.orig" || install_error "Couldn't download configuration file" install_log "Downloading latest MSM cron file" wget ${UPDATE_URL}/cron/msm \ -O "$dl_dir/msm.cron.orig" || install_error "Couldn't download cron file" install_log "Downloading latest MSM version" wget ${UPDATE_URL}/init/msm \ -O "$dl_dir/msm.init.orig" || install_error "Couldn't download init file" } # Patches msm.conf and cron job to use specified username and directory function patch_latest_files() { # patch config file install_log "Patching MSM configuration file" sed 's#USERNAME="minecraft"#USERNAME="'$msm_user'"#g' "$dl_dir/msm.conf.orig" | \ sed "s#/opt/msm#$msm_dir#g" | \ sed "s#UPDATE_URL=.*\$#UPDATE_URL=\"$UPDATE_URL\"#" >"$dl_dir/msm.conf" # patch cron file install_log "Patching MSM cron file" awk '{ if ($0 !~ /^#/) sub(/minecraft/, "'$msm_user'"); print }' \ "$dl_dir/msm.cron.orig" >"$dl_dir/msm.cron" # patch init file install_log "Patching MSM init file" cp "$dl_dir/msm.init.orig" "$dl_dir/msm.init" } # Installs msm.conf into /etc function install_config() { install_log "Installing MSM configuration file" install -b -m0644 "$dl_dir/msm.conf" /etc/msm.conf if [ ! -e /etc/msm.conf ]; then install_error "Couldn't install configuration file" fi } # Installs msm.cron into /etc/cron.d function install_cron() { install_log "Installing MSM cron file" install -m0644 "$dl_dir/msm.cron" /etc/cron.d/msm || install_error "Couldn't install cron file" /etc/init.d/cron reload } <file_sep>msm_dir="/opt/msm" msm_user="minecraft" msm_user_system=false dl_dir="$(mktemp -d -t msm-XXX)" # Outputs an MSM INSTALL log line function install_log() { echo -e "\n\033[1;32mMSM INSTALL: $*\033[m" } # Outputs an MSM INSTALL ERROR log line and exits with status code 1 function install_error() { echo -e "\n\033[1;37;41mMSM INSTALL ERROR: $*\033[m" exit 1 } ### NOTE: all the below functions are overloadable for system-specific installs ### NOTE: some of the below functions MUST be overloaded due to system-specific installs function config_installation() { install_log "Configure installation" echo -n "Install directory [${msm_dir}]: " echo -n "New server user to be created [${msm_user}]: " echo -n "Add new user as system account? [${msm_user_system}]: " } # Runs a system software update to make sure we're using all fresh packages function update_system_packages() { # OVERLOAD THIS install_error "No function definition for update_system_packages" } # Installs additional dependencies (screen, rsync, zip, wget) using system package manager function install_dependencies() { # OVERLOAD THIS install_error "No function definition for install_dependencies" } # Verifies existence of or adds user for Minecraft server (default "minecraft") function add_minecraft_user() { install_log "Creating default user '${msm_user}'" if $msm_user_system; then useradd ${msm_user} --home "$msm_dir" else useradd ${msm_user} --system --home "$msm_dir" fi } # Verifies existence and permissions of msm server directory (default /opt/msm) function create_msm_directories() { install_log "Creating MSM directories" if [ ! -d "$msm_dir" ]; then mkdir -p "$msm_dir" || install_error "Couldn't create directory '$msm_dir'" fi chown -R $msm_user:$msm_user "$msm_dir" || install_error "Couldn't change file ownership for '$msm_dir'" }
08f7d99dc5989f5dd6052f021a5e18ace335ae83
[ "Markdown", "Shell" ]
6
Shell
bryantrobbins/aws-minecraft
2d00e3ace22fb9eef5418a01d76b9c8725fb4c05
25563d016d49ad2c4f5100f49b8906cfc74b8f1e
refs/heads/master
<file_sep>/** * Created by: MetaMagic * Date: 11/06/2018 * Organization: MetaMagic */ import {Component, OnInit} from '@angular/core'; import { HttpClient } from '@angular/common/http'; @Component({ selector: 'tempui', templateUrl: 'tempui.component.html' }) export class TempuiComponent implements OnInit{ tempuiModel:TempuiModel; constructor(private http: HttpClient) { this.tempuiModel=new TempuiModel(); } onBlur_TextInput1(eventData:any){} input_TextInput1(eventData:any){} focus_TextInput1(eventData:any){} onBlur_EmailId(eventData:any){} input_EmailId(eventData:any){} focus_EmailId(eventData:any){} onSelection_CheckBox(eventData:any){} onBlur_textInput2(eventData:any){} input_textInput2(eventData:any){} focus_textInput2(eventData:any){} onBlur_Password(eventData:any){} input_Password(eventData:any){} focus_Password(eventData:any){} onClick_SubmitBtn(eventData:any){} ngOnInit(){ } } export class TempuiModel{ TextInput1: string; EmailId: string; CheckBox: string; textInput2: string; Password: string; }
6b0b8c50e01286c43437fb1193cce589b188be9f
[ "TypeScript" ]
1
TypeScript
ashwiniagre1/aimtestone
70cfc8b98bc5488d7bead61abbf8ac78ade061b8
c147544e62d6870952620d1d0c38f93b4a4d6305
refs/heads/main
<file_sep>numpy==1.14.5 opencv-python==3.4.1.15 os mysql <file_sep>import cv2 import numpy as np from os import listdir from os.path import isfile,join import os from datetime import datetime import mysql.connector data_path = "C:/faces2/" mypath = os.listdir(data_path) for i in mypath: sub_directory = data_path+i onlyfiles = [f for f in listdir(sub_directory) if isfile(join(sub_directory,f))] #onlyfiles = [f for f in listdir(data_path) if isfile(join(data_path,f))] Traning_Data, Labels = [],[] for i in mypath: sub_directory1 = data_path + i for id, files in enumerate(onlyfiles): image_path = sub_directory1 + "/" + onlyfiles[id] images = cv2.imread(image_path,cv2.IMREAD_GRAYSCALE) Traning_Data.append(np.asarray(images,dtype=np.uint8)) Labels.append(id) Labels = np.asarray(Labels, dtype=np.int32) model = cv2.face.LBPHFaceRecognizer_create() model.train(np.asarray(Traning_Data),np.asarray(Labels)) model.write("C:/Users/'Dell/AppData/Local/Programs/Python/Python37-32/Lib/site-packages/cv2/data/trainer/trainer1.yml") print("Model Training Complete!!!")<file_sep># Face-Recognition-Technique-on-bank-locker-System This is a Project on maintaining Security in Bank Lockers or at any system using Face Recognition which is developed using OPENCV. ------------------------------------------------------------------------------------------------------------------------------------------ Download these files and create the virtual environment for the project. Then in activated virtual environment install all the requirememts as mentioned in requirement.txt file. ------------------------------------------------------------------------------------------------------------------------------------------ First we create dataset which contains n number of human faces then we train the dataset by using LBPHRecognizer_create algorithm. When face accuracy is above the 85%. It is two factor authenication.In first phase face accuracy is above the 85% then one time password is sent then bank locker get open. ------------------------------------------------------------------------------------------------------------------------------------------ <file_sep>import cv2 import os import mysql.connector import datetime face_classifier = cv2.CascadeClassifier("C:/Users/'Dell/AppData/Local/Programs/Python/Python37-32/Lib/site-packages/cv2/data/haarcascade_frontalface_default.xml") eye_classifier = cv2.CascadeClassifier("C:/Users/'Dell/AppData/Local/Programs/Python/Python37-32/Lib/site-packages/cv2/data/haarcascade_eye.xml") def face_extractor(img): gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) faces = face_classifier.detectMultiScale(gray,1.3,5) if faces is(): return None for(x,y,w,h) in faces: cropped_face =img[y:y+h, x:x+w] return cropped_face mydb = mysql.connector.connect( host="localhost", user="Avinash", passwd="<PASSWORD>", database ="banklocker" ) name1 = input("Enter The Name Customer") nameofnominess1 = input("Enter The Nmae of Nominess") address1 = input("Enter The Address") MOBILE12 = input("Enter The Mobile No.") NOMINESSMOBILE21 = input("Enter The Nominees Mobile No.") account_no1 = input("Enter The Account No.") emailid1 = input("Enter The emailid") aadharcard1 = (input("Enter The Aadhar Card No.")) yob = int(input("Enter The Year of Birth")) mob = int(input("Enter The Month of Birth")) dob = int(input("Enter The Date of Birth")) dateofbirth1 = datetime.date(yob,mob,dob) gender1 = input("Enter The Gender") mycursor = mydb.cursor() add_customer ="INSERT INTO customers (name, nameofnominess, address,account_no, MOBILE1, NOMINESSMOBILE2,gender ,dateofbirth,emailid,aadharcard) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" data_customer=(name1,nameofnominess1,address1,account_no1,MOBILE12,NOMINESSMOBILE21,gender1,dateofbirth1,emailid1,aadharcard1) mycursor.execute(add_customer,data_customer) mydb.commit() face_id = input("Enter User Id") x = "C:/faces2/picture"+str(face_id) os.makedirs(x) count = 0 cap = cv2.VideoCapture(0) while True: ret, frame = cap.read() if face_extractor(frame) is not None: count=count+1 face =cv2.resize(face_extractor(frame),(200,200)) face =cv2.cvtColor(face,cv2.COLOR_BGR2GRAY) file_name_path = x+"/"+"user"+'.'+str(count)+'.jpg' cv2.imwrite(file_name_path,face) cv2.putText(face,str(count),(50,50),cv2.FONT_HERSHEY_COMPLEX,1,(0,255,0),2) cv2.imshow('Face Cropper',face) else: print("Face is not Found") pass if(cv2.waitKey(1)==13 or count==100): break cap.release() cv2.destroyAllWindows() def eye_extractor(img): gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) eyes = eye_classifier.detectMultiScale(gray,1.1,3) if eyes is(): return None for(x,y,w,h) in eyes: cropped_eye =img[y:y+h, x:x+w] return cropped_eye cap = cv2.VideoCapture(0) j =face_id count =100 while True: ret,frame = cap.read() if eye_extractor(frame) is not None: count = count + 1 eye = cv2.resize(eye_extractor(frame), (200, 200)) eye = cv2.cvtColor(eye, cv2.COLOR_BGR2GRAY) file_name_path = x+"/"+"user."+ str(count) + '.jpg' cv2.imwrite(file_name_path, eye) cv2.putText(eye, str(count), (50, 50), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 255, 0), 2) cv2.imshow("Eye Cropper",eye) else: print("Eye is not Found") pass if (cv2.waitKey(1) == 13 or count == 200): break cap.release() cv2.destroyAllWindows() print("collecting sample complete!!!") <file_sep>import cv2 from datetime import datetime import math,random import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText import mysql.connector recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read("C:/Users/'Dell/AppData/Local/Programs/Python/Python37-32/Lib/site-packages/cv2/data/trainer/trainer1.yml") face_classifier = cv2.CascadeClassifier("C:/Users/'Dell/AppData/Local/Programs/Python/Python37-32/Lib/site-packages/cv2/data/haarcascade_frontalface_default.xml") eye_classifier = cv2.CascadeClassifier("C:/Users/'Dell/AppData/Local/Programs/Python/Python37-32/Lib/site-packages/cv2/data/haarcascade_eye.xml") def face_detector(img, size = .5): gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) faces = face_classifier.detectMultiScale(gray,1.3,5) if faces is(): return img,[] for(x,y,w,h) in faces: cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,255),2) roi = img[y:y+h,x:x+w] roi = cv2.resize(roi, (200,200)) return img,roi def eye_detector(img, size = .5): gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) eyes = eye_classifier.detectMultiScale(gray,1.3,5) if eyes is(): return img,[] for(x,y,w,h) in eyes: cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,255),2) roi = img[y:y+h,x:x+w] roi = cv2.resize(roi, (200,200)) return img,roi def generateotp(): digits ='0123456789' otp ='' for i in range(4): otp += digits[math.floor(random.random()*10)] return otp i=0 aadharcard1 = int(input("Enter The Aadhar Card")) account = input("Enter The Account no") mydb = mysql.connector.connect( host="localhost", user="Avinash", passwd="<PASSWORD>", database ="banklocker" ) mycursor = mydb.cursor() sql =("SELECT aadharcard,lockerid,emailid FROM customers WHERE account_no=%s") mycursor.execute(sql,(account,)) myresult = mycursor.fetchone() if myresult==None: print("Please Enter Your Valid Account Number") else: i = str(myresult[1]) now = datetime.now() datetime_format =now.strftime('%y-%m-%d %H:%M:%S') login = 'login' if (aadharcard1 != int(myresult[0])): print("Please Enter Your Valid Aadhar Card Number") else: email = '<EMAIL>' password = '<PASSWORD>' senderemail = str(myresult[2]) mess = str(generateotp()) message = mess msg = MIMEMultipart() msg['From'] = email msg['To'] = senderemail msg['Subject'] = 'otp' msg.attach(MIMEText(message, 'plain')) s = smtplib.SMTP('smtp.gmail.com', 587) s.starttls() s.login(email, password) text = msg.as_string() s.sendmail(email, senderemail, text) s.quit() correctotp = input("Enter The Otp") if (correctotp != mess): print("Please Enter The Valid Otp") else: cap = cv2.VideoCapture(0) while True: ret,frame =cap.read() image,face= face_detector(frame) images,eye = eye_detector(frame) try: face = cv2.cvtColor(face,cv2.COLOR_BGR2GRAY) eye = cv2.cvtColor(eye,cv2.COLOR_BGR2GRAY) result1 = recognizer.predict(eye) result = recognizer.predict(face) if result[1] < 500 and result1[1]<500: confidence = int(100 * (1 - (result[1]) / 300)) confidence1 = int(100 * (1 - (result1[1]) / 300)) display_string = str(confidence)+"% Confidence it is User "+str(confidence1)+"%" cv2.putText(image,display_string,(100,120),cv2.FONT_HERSHEY_COMPLEX,1,(255,120,255),2) cv2.imshow("Face and Eye Cropper",image) if (confidence>75 and confidence1>70): cv2.putText(image, "Access Grant", (250, 450), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 255,0), 2) cv2.imshow("Face and Eye Cropper",image) if(i==0): add_customer = "INSERT INTO transcations (locker_id,transcation_datetime,transcationinout) VALUES(%s,%s,%s)" data_customer = (i, datetime_format, login) mycursor.execute(add_customer, data_customer) mydb.commit() i=1 else: cv2.putText(image, "Face and Eye is Not Match", (125, 450), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 255), 2) cv2.putText(images, "Face and Eye is Not Match", (125,450 ), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 255), 2) cv2.imshow("Face and Eye Cropper", image) except: cv2.putText(image, "Face and Eye Not Found", (125,450), cv2.FONT_HERSHEY_COMPLEX, 1, (255, 0, 0), 2) cv2.putText(images,"Face and Eye Not Found", (125, 450), cv2.FONT_HERSHEY_COMPLEX, 1, (255, 0, 0), 2) cv2.imshow("Face and Eye Cropper", image) pass if cv2.waitKey(1)==13: cap.release() cv2.destroyAllWindows() break if i==1: now = datetime.now() datetime_format = now.strftime('%y-%m-%d %H:%M:%S') logout = 'logout' add_customer = "INSERT INTO transcations (locker_id,transcation_datetime,transcationinout) VALUES(%s,%s,%s)" data_customer = (i, datetime_format,logout) mycursor.execute(add_customer, data_customer) mydb.commit() email = '<EMAIL>' password = '<PASSWORD>' senderemail = str(myresult[2]) message = 'Locker Access' msg = MIMEMultipart() msg['From'] = email msg['To'] = senderemail msg['Subject'] = 'Access Locker' msg.attach(MIMEText(message, 'plain')) s = smtplib.SMTP('smtp.gmail.com', 587) s.starttls() s.login(email, password) text = msg.as_string() s.sendmail(email, senderemail, text) s.quit() if(i==1): email = '<EMAIL>' password = '<PASSWORD>' senderemail = str(myresult[2]) message = str(myresult[1]) msg = MIMEMultipart() msg['From'] = email msg['To'] = senderemail msg['Subject'] = 'Information Access Locker Customer' msg.attach(MIMEText(message, 'plain')) s = smtplib.SMTP('smtp.gmail.com', 587) s.starttls() s.login(email, password) text = msg.as_string() s.sendmail(email, senderemail, text) s.quit()
cf956f97a9d4a3b9f0c474f28eb8a0dd37708621
[ "Markdown", "Python", "Text" ]
5
Text
avinashbabel/Face-Recognition-Technique-on-bank-locker-System
c40d41c09b157b8afb7e5cb4b18ae0ec4c16d44f
db53197a8b4dc12f9e47a3cf6aeadecf846fd26b
refs/heads/master
<repo_name>mthurlin/typescript-bazel-issue<file_sep>/utils.ts export function compileOutput(data: string): string { return data.toUpperCase() + "\n" }<file_sep>/README.md Combining ts_library / nodejs_binary / npm_package_bin does not work as expected I'm not sure if I'm doing something wrong, or if there is an issue in either `nodejs_binary` or `npm_package_bin`. I'm trying to run a TypeScript script as part of my build. So, what I want to do is to transpile (and typecheck) a couple of TypeScript files to JavaScript. Then, I want to run these files as a build script (providing some input files and accessing the resulting output files). In this repo, I have a minimal repro of the issue. The two typescript files form my build script (reading the text in the input file and outputting it in upper case into the output file). Running it through `nodejs_binary` works as expected: ```bazel run //:compile -- `pwd`/data.txt `pwd`/output.txt && cat output.txt``` But, running it programmatically via Bazel and `npm_package_bin` fails: `bazel build //:output` The files are being run as raw TypeScript, causing syntax errors in node. I have tried various combinations of entry files and filegroups. I can get it to work using a `filegroup` with `output_group="es5_sources"`, providing the filegroup label as `entry_point` to `nodejs_binary`, but that only works as long as there is only one file. (That's why I have `utils.ts` in this minimal repro) <file_sep>/mycompiler.ts import * as fs from "fs"; import {compileOutput} from "./utils"; const inputFilename: string = process.argv[2]; const outputFilename: string = process.argv[3]; if (outputFilename && inputFilename) { const data: string = fs.readFileSync(inputFilename, {encoding: "utf8"}); fs.writeFileSync(outputFilename, compileOutput(data), {encoding: "utf8"}); } else { console.log("Args:", process.argv); }
f6410753f5fdf6fb1b42b550adb3727973c01280
[ "Markdown", "TypeScript" ]
3
TypeScript
mthurlin/typescript-bazel-issue
2cc50efdffe02230da8c6891fbd36c595d634abb
00e1e82b793aa864496423daaf8a138544a2135d
refs/heads/master
<file_sep>asdfsdafsadf dasfsadf sdafasf sdafasdfasdf sdafasdf <file_sep>#include <stdio.h> #include <math.h> int check(int a, int b, int c) { double d, e, f; d = pow(a, 2); e = pow(b, 2); f = pow(c, 2); if ((d + e == f) || (d + f == e) || (e + f == d)) { return 1; } return 0; } int print(int a, int b, int c) { printf("%d %d %d\n", a, b, c); return 0; } int main() { for (int a = 1; a < 101; a++) { for (int b = 1; b < 101; b++) { for (int c = 1; c < 101; c++) { if (check(a, b, c)) print(a, b, c); } } } }
56aa5a2a418fc9421d87e7f590d99ff37e9847f3
[ "Markdown", "C++" ]
2
Markdown
named1717/triangle
33420a7658b4b31d594695e6fc5102071402d4d8
803c42d997242f8aaf3572ff71caaf58e18aaa2f
refs/heads/master
<repo_name>orva/dotfiles<file_sep>/xdg_config/swaybar-data/kisubox.toml [[outputs]] type = "timestamp" format = "%a %Y-%m-%d - %H:%M:%S" accuracy = "seconds" <file_sep>/zsh_setup/exports.zsh bindkey -e export PATH=$PATH:$HOME/bin export PATH=$PATH:$HOME/.local/bin export PATH=$PATH:$HOME/.cargo/bin export PATH=$PATH:$HOME/.dotdata export PATH=$PATH:$HOME/.dotfiles/bin export BROWSER=firefox export TIMEWARRIORDB=$HOME/.dotdata/timewarrior if [[ -n "$DESKTOP_SESSION" && -z "$SSH_AUTH_SOCK" ]]; then if hash gnome-keyring-daemon 2> /dev/null; then eval "$(gnome-keyring-daemon --start)" export SSH_AUTH_SOCK fi fi if [[ -n $SSH_CONNECTION ]]; then export EDITOR='vim' else if hash nvim 2> /dev/null; then export EDITOR='nvim' else export EDITOR='vim' fi fi # Forcefully set DEBUGINFOD_URLS, for some reason these are not loaded # to zsh even though everything looks like they should.. if [[ -n $DEBUGINFOD_URLS ]]; then export DEBUGINFOD_URLS="https://debuginfod.archlinux.org" fi # Add dash of colors and syntax highlighting to man pages if hash bat 2> /dev/null; then export MANPAGER="sh -c 'col -bx | bat -l man -p'" fi <file_sep>/bin/toggle-keymap #!/bin/bash set -o nounset set -o errexit set -o pipefail current=$(setxkbmap -query | grep layout | awk '{print $2}') notify() { msg=$1 notify-send "$msg" \ --icon=preferences-desktop-keyboard \ --expire-time=100 \ --hint=string:x-canonical-private-synchronous:sleep } case "$current" in fi) setxkbmap -layout us -option ctrl:nocaps notify "us" ;; *) setxkbmap -layout fi -option ctrl:nocaps notify "fi" esac <file_sep>/zsh_setup/fzf.zsh if hash fzf 2> /dev/null; then if [[ -d "/usr/share/fzf/shell" ]]; then source /usr/share/fzf/shell/key-bindings.zsh elif [[ -d "/usr/share/fzf" ]]; then source /usr/share/fzf/key-bindings.zsh source /usr/share/fzf/completion.zsh elif [[ -d "$HOME/.fzf" ]]; then # shellcheck source=/dev/null source "$HOME/.fzf/shell/key-bindings.zsh" # shellcheck source=/dev/null source "$HOME/.fzf/shell/completion.zsh" fi fi <file_sep>/zsh_setup/nvm.zsh export NVM_DIR="$HOME/.nvm" if [[ ! -d "$NVM_DIR" ]] then echo "Installing nvm (git clone) to $NVM_DIR" git clone https://github.com/nvm-sh/nvm.git "$NVM_DIR" 2> /dev/null pushd "$NVM_DIR" > /dev/null || exit LATEST_RELEASE=$(git tag --sort=v:refname | tail -n 1) echo "Checking out $LATEST_RELEASE which SHOULD be latest release" git checkout "$LATEST_RELEASE" 2> /dev/null popd > /dev/null || exit echo "Done" fi [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm # [ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion <file_sep>/xdg_config/swaybar-data/papaya.toml [[outputs]] type = "battery" [[outputs]] type = "timestamp" format = "%a %Y-%m-%d - %H:%M" accuracy = "minutes" <file_sep>/bin/microphone-toggle.sh #!/bin/bash set -o errexit set -o nounset set -o pipefail SOURCES=$(pactl list short sources | cut -f 1) while read -r SOURCE do pactl set-source-mute "$SOURCE" toggle done <<< "$SOURCES" <file_sep>/zsh_setup/powerlevel10k.zsh P10K_DIR="$HOME/.p10k.git" if [[ ! -d "$P10K_DIR" ]]; then git clone --depth=1 https://github.com/romkatv/powerlevel10k.git "$P10K_DIR" fi # shellcheck source=/dev/null source "$P10K_DIR/powerlevel10k.zsh-theme" unset P10K_DIR # Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc. # Initialization code that may require console input (password prompts, [y/n] # confirmations, etc.) must go above this block; everything else may go below. # if [[ -r "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" ]]; then # source "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" # fi # To customize prompt, run `p10k configure` or edit ~/.p10k.zsh. P10K_USR_CONF="$HOME/.p10k.zsh" [[ ! -f "$P10K_USR_CONF" ]] || source "$P10K_USR_CONF" unset P10K_USR_CONF ## History file configuration [ -z "$HISTFILE" ] && HISTFILE="$HOME/.zsh_history" HISTSIZE=50000 SAVEHIST=10000 ## History command configuration setopt extended_history # record timestamp of command in HISTFILE setopt hist_expire_dups_first # delete duplicates first when HISTFILE size exceeds HISTSIZE setopt hist_ignore_dups # ignore duplicated commands history list setopt hist_ignore_space # ignore commands that start with space setopt hist_verify # show command with history expansion to user before running it setopt inc_append_history # add commands to HISTFILE in order of execution setopt share_history # share command history data autoload -U compinit && compinit alias ls='ls --color=auto' <file_sep>/bin/volume.sh #!/bin/bash set -o errexit set -o nounset set -o pipefail CMD="${1-unset}" if [[ "$CMD" != "up" ]] && [[ "$CMD" != "down" ]] && [[ "$CMD" != "mute-toggle" ]]; then echo "usage: $0 [up|down|mute-toggle]" exit 1 fi SINKS=$(pactl list short sinks | egrep -vi "(hdmi|nuforce)" | cut -f 1) while read -r SINK do if [[ "$CMD" == "up" ]]; then pactl set-sink-mute "$SINK" 0 pactl set-sink-volume "$SINK" +5% elif [[ "$CMD" == "down" ]]; then pactl set-sink-mute "$SINK" 0 pactl set-sink-volume "$SINK" -5% elif [[ "$CMD" == "mute-toggle" ]]; then pactl set-sink-mute "$SINK" toggle fi done <<< "$SINKS" <file_sep>/bin/start-guile-server #!/usr/bin/env bash set -xe SOCKET=$(git rev-parse --show-toplevel)/.guile-repl.socket if test -f $SOCKET; then rm $SOCKET; fi guile --listen=$SOCKET rm $SOCKET
14c983c285b92af3f957da87ab367d14fea086a3
[ "TOML", "Shell" ]
10
TOML
orva/dotfiles
f9244f9f3a07ee6bf77fadb62f2aa491113eed72
7e7c9be67752502ed63d5ee010ed15fee6ca1b2c
refs/heads/main
<file_sep>package shape; import java.util.Random; import java.awt.Color; import java.awt.Graphics; import java.util.ArrayList; import java.util.List; public class ShapeFactory { private Random random; List<Shape> shapeList; private Random randomWidth; private Random randomCirc; private Random randomColor; private SortingClass sortShape; public ShapeFactory() { random = new Random(); shapeList = new ArrayList <Shape>(); randomWidth = new Random(); randomCirc = new Random(); randomColor = new Random(); sortShape = new SortingClass(); } public void createShapeList() { int shapeNum; int upperX = 23; int upperY = 23; int offSet = 55; for (int i = 0; i < 6; i++) { shapeNum = random.nextInt(3); if(shapeNum == 0) { Color myColor1 = new Color(randomColor.nextInt(200), randomColor.nextInt(200), randomColor.nextInt(200)); shapeList.add(new Rectangle(upperX, upperY, randomWidth.nextInt(30) + 20, randomWidth.nextInt(30) + 20, myColor1)); }else if(shapeNum == 1) { Color myColor2 = new Color(randomColor.nextInt(200), randomColor.nextInt(200), randomColor.nextInt(200)); shapeList.add(new Square(upperX, upperY, randomWidth.nextInt(30) + 20, myColor2)); }else{ Color myColor3 = new Color(randomColor.nextInt(200), randomColor.nextInt(200), randomColor.nextInt(200)); shapeList.add(new Circle(upperX, upperY, randomCirc.nextInt(15) + 15, myColor3)); } upperX += offSet; upperY += offSet; } } public List<Shape> getShapeList() { return shapeList; } public void emptyListHelper() { this.shapeList.clear(); } public void sortHelper() { List<Shape> tmpList = new ArrayList <Shape>(this.shapeList); emptyListHelper(); for (Shape shape: sortShape.sortingMethod(tmpList)) { this.shapeList.add(shape); } } } <file_sep>package shape; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.List; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JPanel; public class MyPanel extends JPanel implements ActionListener { JButton loadButton; JButton sortButton; ShapeFactory shapes = new ShapeFactory(); public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2d = (Graphics2D) g; for (Shape shape: shapes.getShapeList()) { g2d.setColor(shape.getColor()); shape.drawShape(g2d); } } MyPanel(){ JFrame frame = new JFrame("Shape Sorting"); loadButton = new JButton("Load"); loadButton.setBounds(200, 10, 100, 25); loadButton.addActionListener((ActionListener) this); frame.add(loadButton); sortButton = new JButton("Sort"); sortButton.setBounds(300, 10, 100, 25); sortButton.addActionListener((ActionListener) this); frame.add(sortButton); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.add(this); frame.setSize(600, 600); frame.setLocationRelativeTo(null); frame.setVisible(true); } @Override public void actionPerformed(ActionEvent o) { if(o.getSource() == loadButton){ shapes.emptyListHelper(); shapes.createShapeList(); repaint(); } if(o.getSource() == sortButton){ shapes.sortHelper(); repaint(); } } /** * This is the main method of the class. * @param args */ public static void main(String[] args) { new MyPanel(); } } <file_sep>package shape; import java.awt.Color; import java.awt.Graphics; public class Square extends Rectangle{ public Square (int upperX, int upperY, int width, Color shapeColor) { super (upperX, upperY, width, width, shapeColor); } } <file_sep>package shape; import java.awt.Color; import java.awt.Graphics; public class Rectangle implements Shape, Comparable <Shape> { private int upperX; private int upperY; private int width; private int height; private Color shapeColor; public Rectangle (int upperX, int upperY, int width, int height, Color shapeColor) { this.upperX = upperX; this.upperY = upperY; this.width = width; this.height = height; this.shapeColor = shapeColor; } @Override public int getX(){ return this.upperX; } @Override public int getY(){ return this.upperY; } public int getWidth() { return width; } public int getHeight() { return height; } @Override public void setXandY(int x, int y){ this.upperX = x; this.upperY = y; } @Override public Color getColor() { return shapeColor; } @Override public int area() { return width*height; } @Override public void drawShape(Graphics form) { form.fillRect(upperX, upperY, width, height); } @Override public int compareTo(Shape o) { if(this.area() > o.area()){ return 1; } else if(this.area() < o.area()){ return -1; } return 0; } }
3fe9256d5cd9b8480ac0197f44d3c09a87119cf6
[ "Java" ]
4
Java
eJustIS-1/Visual-Sorting
8d0fc1881fd380c9e0618b6b5eb159814b9b0632
1586902de29d2f16273acede84c2c57ab573d4cc
refs/heads/master
<repo_name>mvlchain/mvltoken<file_sep>/test/mvltoken.js const chai = require('chai'); const chaiAsPromised = require('chai-as-promised'); const assert = chai.assert; const {proceedTime, snapshot, revert} = require('./util'); const moment = require('moment'); chai.use(chaiAsPromised); chai.should(); const MVLToken = artifacts.require("MVLToken"); const SafeMathMock = artifacts.require('SafeMathMock'); contract('MVLToken', (accounts) => { let token, safeMath; before(async () => { token = await MVLToken.deployed(); safeMath = await SafeMathMock.new(); await token.enableTransfer(true); }); describe('basic info', () => { it("should put 3e28 MVL to the first account", async () => { const balance = await token.balanceOf.call(accounts[0]); balance.equals(web3.toBigNumber(3e28)).should.be.true; }); it("check name and symbol", async () => { const name = await token.name(); name.should.equal('Mass Vehicle Ledger Token'); const sym = await token.symbol(); sym.should.equal('MVL'); }); it("should have total supply of 3e28", async () => { const sppl = await token.totalSupply(); sppl.equals(web3.toBigNumber(3e28)).should.be.true; }); }); describe("transfer", () => { it("should transfer token", async () => { const acc1 = accounts[0]; const acc2 = accounts[1]; const amount = 1e18; let acc1balance, acc2balance; let acc1balanceAfter, acc2balanceAfter; const token = await MVLToken.deployed(); acc1balance = await token.balanceOf.call(acc1); acc2balance = await token.balanceOf.call(acc2); await token.transfer(acc2, amount, {from: acc1}); acc1balanceAfter = await token.balanceOf.call(acc1); acc2balanceAfter = await token.balanceOf.call(acc2); acc2balanceAfter.equals(acc2balance.add(amount)).should.be.true; acc1balanceAfter.equals(acc1balance.minus(amount)).should.be.true; }); it("transfer is possible only for valid destination", async () => { await token.transfer(0x0, 10).should.be.rejectedWith(Error); await token.transfer(token.address, 10).should.be.rejectedWith(Error); const owner = await token.owner.call(); console.log('owner=', owner); await token.transfer(owner, 10).should.be.rejectedWith(Error); }); it("cannot transfer 0 amount", async () => { await token.transfer(accounts[1], 0, {from: accounts[0]}).should.be.rejectedWith(Error); }); it("cannot transfer from 0x0", async () => { await token.transfer(accounts[1], 1, {from: 0}).should.be.rejectedWith(Error); }); it("shouldn't transfer token if not enough balance", async () => { let token; const acc1 = accounts[2]; const acc2 = accounts[3]; const amount = 1e18; token = await MVLToken.deployed(); await token.transfer(acc2, amount, {from: acc1}).should.be.rejectedWith(Error); }); it("can't transfer before official release date if not owner", async () => { const DISTRIBUTE_DATE = 1527768000; // 2018-05-31T21:00:00+09:00 const aaa = await token.DISTRIBUTE_DATE(); console.log('distribute', aaa.toString()); const b = await web3.eth.getBlock(web3.eth.blockNumber); console.log('b', b.timestamp); const from = accounts[1]; const to = accounts[2]; const amount = web3.toWei(1, 'ether'); // 1 MVL (1e18) const aa = await token.balanceOf(from); console.log(aa.minus(amount).toString()); await token.transfer(to, amount, {from}).should.be.rejectedWith(Error); }); it("can transfer before official release date if owner", async () => { const from = accounts[0]; const to = accounts[3]; const amount = web3.toWei(200, 'ether'); // 200 MVL await token.transfer(to, amount, {from}); const balance = await token.balanceOf.call(to); balance.equals(web3.toBigNumber(amount)).should.be.true; }); }); describe("approval", () => { let allowance; it("should approve certain amount", async () => { // proceed time after distribute date const DISTRIBUTE_DATE = 1527768000; await proceedTime(moment.unix(DISTRIBUTE_DATE + 1)); // setup token amount await token.transfer(accounts[1], web3.toBigNumber(web3.toWei(1000000, 'ether')), {from: accounts[0]}); const from = accounts[1]; const spender = accounts[2]; const amount = web3.toBigNumber(web3.toWei(500000, 'ether')); // 0.5mil MVL await token.approve(spender, amount, {from}); allowance = await token.allowance(from, spender); console.log(allowance, allowance.toString()); allowance.equals(amount).should.be.true; }); it("can set allowance to zero", async () => { const from = accounts[1]; const spender = accounts[2]; await token.approve(spender, 0, {from}); let allowance; allowance = await token.allowance(from, spender); allowance.equals(web3.toBigNumber(0)).should.be.true; // restore const amount = web3.toBigNumber(web3.toWei(500000, 'ether')); // 0.5mil MVL await token.approve(spender, amount, {from}); allowance = await token.allowance(from, spender); allowance.equals(amount).should.be.true; }); it("shouldn't accept re-call approve function if it is already set", async () => { const from = accounts[1]; const spender = accounts[2]; const amount = web3.toBigNumber(web3.toWei(500000, 'ether')); // 0.5mil MVL await token.approve(spender, amount, {from}).should.be.rejectedWith(Error); }); it("should increase allowance", async () => { const from = accounts[1]; const spender = accounts[2]; const increase = web3.toBigNumber(web3.toWei(500000, 'ether')); // 0.5mil MVL await token.increaseApproval(spender, increase, {from}); allowance = await token.allowance(from, spender); allowance.equals(web3.toBigNumber(1e24)).should.be.true; // allowance should be 1mil }); it("should decrease allowance", async () => { const from = accounts[1]; const spender = accounts[2]; const increase = web3.toBigNumber(web3.toWei(300000, 'ether')); // 0.3mil MVL await token.decreaseApproval(spender, increase, {from}); allowance = await token.allowance(from, spender); allowance.equals(web3.toBigNumber(7e23)).should.be.true; // allowance should be 0.7mil }); it("cannot transfer from 0x0", async () => { const spender = accounts[2]; const to = accounts[7]; await token.transferFrom(0x0, to, 1, {from: spender}).should.be.rejectedWith(Error); }); it("should transfer token by allowed spender", async () => { const from = accounts[1]; const spender = accounts[2]; const to = accounts[7]; // get original balances const oldBalances = []; await Promise.all([from, spender, to].map(async (acc, i) => { const balance = await token.balanceOf.call(acc); oldBalances[i] = balance; })); // delegate transfer const amount = web3.toBigNumber(web3.toWei(500000, 'ether')); // 0.5mil MVL await token.transferFrom(from, to, amount, {from: spender}); // check balances again const newBalances = []; await Promise.all([from, spender, to].map(async (acc, i) => { const balance = await token.balanceOf.call(acc); newBalances[i] = balance; })); (oldBalances[0].sub(newBalances[0])).equals(amount).should.be.true; (newBalances[2].sub(oldBalances[2])).equals(amount).should.be.true; oldBalances[1].equals(newBalances[1]).should.be.true; }); it("shouldn't transfer token more than allowed by spender", async () => { // delegate transfer const from = accounts[1]; const spender = accounts[2]; const to = accounts[7]; const amount = web3.toBigNumber(web3.toWei(700000, 'ether')); // 0.7mil MVL await token.transferFrom(from, to, amount, {from: spender}).should.be.rejectedWith(Error); }); it("should transfer more under allowance", async () => { const from = accounts[1]; const spender = accounts[2]; const to = accounts[7]; // get original balances const oldBalances = []; await Promise.all([from, spender, to].map(async (acc, i) => { const balance = await token.balanceOf.call(acc); oldBalances[i] = balance; })); // delegate transfer const amount = web3.toBigNumber(web3.toWei(200000, 'ether')); // 0.2mil MVL await token.transferFrom(from, to, amount, {from: spender}); // check balances again const newBalances = []; await Promise.all([from, spender, to].map(async (acc, i) => { const balance = await token.balanceOf.call(acc); newBalances[i] = balance; })); (oldBalances[0].sub(newBalances[0])).equals(amount).should.be.true; (newBalances[2].sub(oldBalances[2])).equals(amount).should.be.true; oldBalances[1].equals(newBalances[1]).should.be.true; }); it("should decrease value more than allowance for setting it to zero", async () => { const from = accounts[1]; const spender = accounts[2]; const decrease = web3.toBigNumber(web3.toWei(300000000, 'ether')); // 300mil MVL await token.decreaseApproval(spender, decrease, {from}); allowance = await token.allowance(from, spender); allowance.equals(web3.toBigNumber(0)).should.be.true; // allowance should be 0 }); it("should not be able to transfer token by spender anymore", async () => { // delegate transfer const from = accounts[1]; const spender = accounts[2]; const to = accounts[7]; await token.transferFrom(from, to, 1, {from: spender}).should.be.rejectedWith(Error); }); it("should be able to set another spender", async () => { const from = accounts[1]; const spender = accounts[4]; const amount = web3.toBigNumber(web3.toWei(10000, 'ether')); // 10000 MVL await token.approve(spender, amount, {from}); allowance = await token.allowance(from, spender); console.log(allowance, allowance.toString()); allowance.equals(amount).should.be.true; }); it("should transfer token by another spender", async () => { const from = accounts[1]; const spender = accounts[4]; const to = accounts[7]; // get original balances const oldBalances = []; await Promise.all([from, spender, to].map(async (acc, i) => { const balance = await token.balanceOf.call(acc); oldBalances[i] = balance; })); // delegate transfer const amount = web3.toBigNumber(web3.toWei(10000, 'ether')); // 10000 MVL await token.transferFrom(from, to, amount, {from: spender}); // check balances again const newBalances = []; await Promise.all([from, spender, to].map(async (acc, i) => { const balance = await token.balanceOf.call(acc); newBalances[i] = balance; })); (oldBalances[0].sub(newBalances[0])).equals(amount).should.be.true; (newBalances[2].sub(oldBalances[2])).equals(amount).should.be.true; oldBalances[1].equals(newBalances[1]).should.be.true; // allowance should be adjusted const allowance2 = await token.allowance(from, spender); allowance2.equals(web3.toBigNumber(0)).should.be.true; }); it("should not transfer token by another spender more than allowed", async () => { const from = accounts[1]; const spender = accounts[4]; const to = accounts[7]; // delegate transfer const amount = web3.toBigNumber(web3.toWei(10000, 'ether')); // 10000 MVL await token.transferFrom(from, to, amount, {from: spender}).should.be.rejectedWith(Error); }); }); describe("bonus lock", () => { /*********************/ /* Bonus lock test 1 */ /*********************/ it("should setup the lock policy", async () => { // for each month, 25% vesting const beneficiary = accounts[3]; // const lockAmount = web3.toWei(100, 'ether'); // 100 MVL (1e20) // const startTime = moment.parseZone('2018-07-01T00:00:00+00:00').unix(); // const stepTime = moment.duration(1, 'month')/1000; // in sec // const unlockStep = 4; // await token.setTokenLockPolicy(beneficiary, lockAmount, startTime, stepTime, unlockStep, {from: accounts[0]}); await token.addTokenLock(beneficiary, web3.toWei(25, 'ether'), moment.parseZone('2018-07-01T00:00:00+00:00').unix()); await token.addTokenLock(beneficiary, web3.toWei(25, 'ether'), moment.parseZone('2018-07-31T00:00:00+00:00').unix()); // add Sep's token lock ahead of Aug. For testing latestReleaseTime update await token.addTokenLock(beneficiary, web3.toWei(25, 'ether'), moment.parseZone('2018-09-29T00:00:00+00:00').unix()); await token.addTokenLock(beneficiary, web3.toWei(25, 'ether'), moment.parseZone('2018-08-30T00:00:00+00:00').unix()); const locked = await token.getMinLockedAmount(beneficiary); locked.equals(web3.toBigNumber(100e18)).should.be.true; // time warp after release date await proceedTime(moment.parseZone('2018-06-01T01:00:00+00:00')); }); it("unlocked account's lock should be 0 ", async () => { const unlockedAccount = accounts[4]; const lockedAmount = await token.getMinLockedAmount(unlockedAccount); lockedAmount.equals(web3.toBigNumber(0)).should.be.true; // amount should be 0 }); it("cannot set the lock for 0 addr", async () => { await token.addTokenLock(0x0, 25, moment.parseZone('2018-07-01T00:00:00+00:00').unix(), {from: accounts[0]}).should.be.rejectedWith(Error); }); it("cannot set the lock 0", async () => { const account = accounts[4]; await token.addTokenLock(account, 0, moment.parseZone('2018-07-01T00:00:00+00:00').unix(), {from: accounts[0]}).should.be.rejectedWith(Error); }); it("cannot set the past lock", async () => { const account = accounts[4]; await token.addTokenLock(account, 1, moment.parseZone('2018-05-01T00:00:00+00:00').unix(), {from: accounts[0]}).should.be.rejectedWith(Error); }); it("block set token lock policy for unauthorized user", async () => { await token.addTokenLock(accounts[5], 25, moment.parseZone('2018-07-01T00:00:00+00:00').unix(), {from: accounts[3]}).should.be.rejectedWith(Error); }); it("should not be able to transfer token including bonus", async () => { const from = accounts[3]; const to = accounts[4]; // 10 MVL was bonus const amount = web3.toWei(110, 'ether'); await token.transfer(to, amount, {from}).should.be.rejectedWith(Error); }); it("should be able to transfer token under locked values", async () => { const from = accounts[3]; const to = accounts[4]; // 10 mvl was bonus const amount = web3.toWei(90, 'ether'); // 90MVL await token.transfer(to, amount, {from}); const balance1 = await token.balanceOf.call(from); const balance2 = await token.balanceOf.call(to); balance1.equals(web3.toBigNumber(web3.toWei(110, 'ether'))).should.be.true; balance2.equals(web3.toBigNumber(web3.toWei(90, 'ether'))).should.be.true; }); it("should be able to transfer token when part of it released", async () => { // time warp to 1month later await proceedTime(moment.parseZone('2018-07-01T01:00:00+00:00')); const from = accounts[3]; const to = accounts[4]; // 10 mvl was bonus const amount = web3.toWei(20, 'ether'); // 10MVL(no locked) + 10MVL(part of bonus. 25 MVL was released) await token.transfer(to, amount, {from}); const balance1 = await token.balanceOf.call(from); const balance2 = await token.balanceOf.call(to); balance1.equals(web3.toBigNumber(web3.toWei(90, 'ether'))).should.be.true; balance2.equals(web3.toBigNumber(web3.toWei(110, 'ether'))).should.be.true; }); it("should not be able to transfer more than allowed now", async () => { const from = accounts[3]; const to = accounts[4]; let balance1 = await token.balanceOf.call(from); console.log('balance1=', balance1.div(web3.toBigNumber(1e18)).toString()); const locked = await token.getMinLockedAmount(from); console.log('locked=', locked.div(web3.toBigNumber(1e18)).toString()); locked.equals(web3.toBigNumber(web3.toWei(75, 'ether'))).should.be.true; // just 1wei amount above to allowance const amount = balance1.minus(locked).add(1); console.log('amount=',amount.toString()); token.transfer(to, amount, {from}).should.be.rejectedWith(Error); }); it("should not be able to transfer more than allowed now 2", async () => { // time warp to 1month later again await proceedTime(moment.parseZone('2018-08-01T01:00:00+00:00')); const from = accounts[3]; const to = accounts[4]; let balance1 = await token.balanceOf.call(from); console.log('balance1=', balance1); const locked = await token.getMinLockedAmount(from); console.log('locked=', locked.div(web3.toBigNumber(1e18)).toString()); locked.equals(web3.toBigNumber(web3.toWei(50, 'ether'))).should.be.true; const amount = balance1.minus(locked).add(1); console.log('amount=', amount.toString()); await token.transfer(to, amount, {from}).should.be.rejectedWith(Error); }); it("should transfer under locked amount", async () => { const from = accounts[3]; const to = accounts[4]; const amount = 1e18; // 1 MVL await token.transfer(to, amount, {from}); // check balance const balance1 = await token.balanceOf.call(from); const balance2 = await token.balanceOf.call(to); balance1.equals(web3.toBigNumber(89e18)).should.be.true; balance2.equals(web3.toBigNumber(111e18)).should.be.true; }); it("should not be able to transfer more than allowed now 3", async () => { // time warp to 1month later again await proceedTime(moment.parseZone('2018-09-01T00:00:01+00:00')); const from = accounts[3]; const to = accounts[4]; let balance1 = await token.balanceOf.call(from); console.log('balance1=', balance1); const locked = await token.getMinLockedAmount(from); console.log('locked=', locked.div(web3.toBigNumber(1e18)).toString()); locked.equals(web3.toBigNumber(web3.toWei(25, 'ether'))).should.be.true; const amount = balance1.minus(locked).add(1); console.log('amount=', amount.toString()); await token.transfer(to, amount, {from}).should.be.rejectedWith(Error); }); it("should transfer under locked amount", async () => { const from = accounts[3]; const to = accounts[4]; const amount = 29e18; // 29 MVL await token.transfer(to, amount, {from}); // check balance const balance1 = await token.balanceOf.call(from); const balance2 = await token.balanceOf.call(to); balance1.equals(web3.toBigNumber(60e18)).should.be.true; balance2.equals(web3.toBigNumber(140e18)).should.be.true; }); it("should not be able to transfer more than allowed now 3", async () => { // time warp to right before all lock released await proceedTime(moment.parseZone('2018-09-28T23:59:00+00:00')); const from = accounts[3]; const to = accounts[4]; let balance1 = await token.balanceOf.call(from); console.log('balance1=', balance1); const locked = await token.getMinLockedAmount(from); console.log('locked=', locked.div(web3.toBigNumber(1e18)).toString()); locked.equals(web3.toBigNumber(web3.toWei(25, 'ether'))).should.be.true; const amount = balance1.minus(locked).add(1); console.log('amount=', amount.toString()); await token.transfer(to, amount, {from}).should.be.rejectedWith(Error); }); it("should be able to send all tokens", async () => { // time warp to right after all lock released await proceedTime(moment.parseZone('2018-09-29T00:00:01+00:00')); const from = accounts[3]; const to = accounts[4]; const amount = await token.balanceOf.call(from); await token.transfer(to, amount, {from}); // check balance const balance1 = await token.balanceOf.call(from); const balance2 = await token.balanceOf.call(to); balance1.equals(web3.toBigNumber(0)).should.be.true; balance2.equals(web3.toBigNumber(200e18)).should.be.true; }); /*********************/ /* Bonus lock test 2 */ /*********************/ it("should setup the different bonus policy", async () => { const beneficiary = accounts[4]; const lockAmount = web3.toWei(100, 'ether'); // 100 MVL (1e20) // const startTime = moment.parseZone('2018-10-01T00:00:00+00:00').unix(); // const stepTime = moment.duration(3, 'month')/1000; // in sec // const unlockStep = 1; // await token.setTokenLockPolicy(beneficiary, lockAmount, startTime, stepTime, unlockStep, {from: accounts[0]}); await token.addTokenLock(beneficiary, lockAmount, moment.parseZone('2018-10-01T00:00:00+00:00').add(moment.duration(3, 'month')/1000, 'seconds').unix()); }); it("should not be able to transfer locked amount before release date", async () => { await proceedTime(moment.parseZone('2018-10-02T00:00:00+00:00')); const from = accounts[4]; const to = accounts[5]; const amount = 101e18; await token.transfer(amount, to, {from}).should.be.rejectedWith(Error); }); it("should be able to transfer token under locked amount before release time", async () => { const from = accounts[4]; const to = accounts[5]; const amount = 99e18; await token.transfer(to, amount, {from}); // check balance const balance1 = await token.balanceOf.call(from); const balance2 = await token.balanceOf.call(to); balance1.equals(web3.toBigNumber(101e18)).should.be.true; balance2.equals(web3.toBigNumber(99e18)).should.be.true; }); it("should be able to transfer all tokens after release time", async () => { await proceedTime(moment.parseZone('2018-12-30T00:00:01+00:00')); const from = accounts[4]; const to = accounts[5]; const amount = 101e18; await token.transfer(to, amount, {from}); // check balance const balance1 = await token.balanceOf.call(from); const balance2 = await token.balanceOf.call(to); balance1.equals(web3.toBigNumber(0)).should.be.true; balance2.equals(web3.toBigNumber(200e18)).should.be.true; }); it("lock 100 tokens", async () => { const from = await token.owner(); const b = accounts[4]; await token.transfer(b, web3.toWei(100, 'ether'), {from}); // token.setTokenLockPolicy(b, web3.toWei(100, 'ether'), m.unix(), 86400, 3); await token.addTokenLock(b, web3.toWei(33, 'ether'), moment.parseZone('2019-01-01T00:00:00+09:00').unix()); await token.addTokenLock(b, web3.toWei(33, 'ether'), moment.parseZone('2019-01-02T00:00:00+09:00').unix()); await token.addTokenLock(b, web3.toWei(34, 'ether'), moment.parseZone('2019-01-03T00:00:00+09:00').unix()); const a = await token.getMinLockedAmount(accounts[4]); console.log('minlocked', a.toString()); a.equals(web3.toBigNumber(web3.toWei(100, 'ether'))).should.be.true; }); it("should unlock 33 tokens after 1day", async () => { await proceedTime(moment.parseZone('2019-01-01T00:00:01+09:00')); const a = await token.getMinLockedAmount(accounts[4]); console.log('minlocked', a.toString()); a.equals(web3.toBigNumber(web3.toWei(67, 'ether'))).should.be.true; }); it("should unlock 33 tokens after 2day", async () => { await proceedTime(moment.parseZone('2019-01-02T00:00:01+09:00')); const a = await token.getMinLockedAmount(accounts[4]); console.log('minlocked', a.toString()); a.equals(web3.toWei(34, 'ether')).should.be.true; }); it("should unlock all tokens after 3day", async () => { await proceedTime(moment.parseZone('2019-01-03T00:00:01+09:00')); const a = await token.getMinLockedAmount(accounts[4]); console.log('minlocked', a.toString()); a.equals(web3.toBigNumber(0)).should.be.true; }); }); describe("transfer control", () => { /*************************/ /* transfer control test */ /*************************/ it("shouldn't be blocked by random account", async () => { const owner = await token.owner(); console.log('owner', owner); for (let i=1; i < accounts.length; i++) { const from = accounts[i]; if (owner === from) { continue; } await token.enableTransfer(false, {from}).should.be.rejectedWith(Error); await token.enableTransfer(true, {from}).should.be.rejectedWith(Error); await token.enableTransfer(2**256-1, {from}).should.be.rejectedWith(Error); await token.enableTransfer("true", {from}).should.be.rejectedWith(Error); await token.enableTransfer("false", {from}).should.be.rejectedWith(Error); } }); it("should block transfer when transferEnabled is false", async () => { const owner = await token.owner(); const from = await accounts[1]; const to = await accounts[9]; await token.enableTransfer(false, {from: owner}); // turn off transfer // try to move token const aa = await token.balanceOf(from); await token.transfer(to, 1, {from}).should.be.rejectedWith(Error); }); it("should be able to move token again after transfer enabled", async () => { const owner = await token.owner(); const from = await accounts[1]; const to = await accounts[9]; // check balance const balance11 = await token.balanceOf.call(from); const balance12 = await token.balanceOf.call(to); await token.enableTransfer(true, {from: owner}); // turn on transfer // try to move token await token.transfer(to, 100, {from}); const balance21 = await token.balanceOf.call(from); const balance22 = await token.balanceOf.call(to); balance11.minus(100).equals(balance21).should.be.true; balance12.add(100).equals(balance22).should.be.true; }); }); describe("burn", () => { /*************/ /* burn test */ /*************/ it("shouldn't be burnt by random account who isn't not the owner", async () => { const owner = await token.owner(); console.log('owner', owner); for (let i=1; i < accounts.length; i++) { const from = accounts[i]; if (owner === from) { continue; } // console.log('1, from=',from, 'i=',i); await token.burn(3e27, {from}).should.be.rejectedWith(Error); // console.log('2, from=',from, 'i=',i); await token.burn(1, {from}).should.be.rejectedWith(Error); // console.log('3, from=',from, 'i=',i); await token.burn(1e18, {from}).should.be.rejectedWith(Error); } }); it("should be able to burn", async () => { const owner = await token.owner.call(); let oldOwnerAmount = await token.balanceOf.call(owner); const burnAmount = web3.toBigNumber(web3.toWei(10000000, 'ether')); // 10000000 MVL // console.log(ownerAmount.toString()); // console.log(burnAmount.toString()); await token.burn(burnAmount, {from: owner}); const totalSupply = await token.totalSupply(); totalSupply.equals(web3.toBigNumber(3e28).minus(burnAmount)).should.be.true; const ownerAmount = await token.balanceOf.call(owner); ownerAmount.equals(oldOwnerAmount.minus(burnAmount)).should.be.true; }); it("shouldn't burn more than the owner has", async () => { const owner = await token.owner.call(); const ownerAmount = await token.balanceOf.call(owner); const burnAmount = ownerAmount.add(web3.toWei(100, 'ether')); // ownerAmount + 100MVL await token.burn(burnAmount, {from: owner}).should.be.rejectedWith(Error); }); }); describe("big amount", () => { /*******************/ /* big number test */ /*******************/ it("should be able to transfer very large amount", async () => { const owner = await token.owner.call(); const to = accounts[8]; // const ownerAmount = await token.balanceOf.call(owner); // console.log(web3.fromWei(ownerAmount, 'ether').toString()); const balance11 = await token.balanceOf.call(owner); const balance12 = await token.balanceOf.call(to); const amount = web3.toBigNumber(2e27); await token.transfer(to, amount, {from: owner}); const balance21 = await token.balanceOf.call(owner); const balance22 = await token.balanceOf.call(to); balance11.minus(balance21).equals(amount).should.be.true; balance22.minus(balance12).equals(amount).should.be.true; }); it("should not be able to transfer very large amount", async () => { const owner = await token.owner.call(); const to = accounts[8]; const amount = web3.toBigNumber(1e77); await token.transfer(to, amount, {from: owner}).should.be.rejectedWith(Error); }); }); describe("admin setup", () => { /***************/ /* setup admin */ /***************/ it("cannot setup admin with 0x0", async () => { const admin = 0x0; const owner = await token.owner(); await token.setAdmin(admin, {from: owner}).should.be.rejectedWith(Error); }); it("setup admin", async () => { const admin = accounts[9]; const owner = await token.owner(); await token.setAdmin(admin, {from: owner}); const newAdmin = await token.admin(); newAdmin.should.equal(admin); // get approved amount const allowance = await token.allowance(owner, admin); // all amount should be allowed allowance.equals(web3.toBigNumber(3e28)).should.be.true; }); it("change admin", async () => { const admin = accounts[8]; const oldAdmin = await token.admin(); const owner = await token.owner(); await token.setAdmin(admin, {from: owner}); const newAdmin = await token.admin(); newAdmin.should.equal(admin); // get approved amount const allowance = await token.allowance(owner, admin); // all amount should be allowed allowance.equals(web3.toBigNumber(3e28)).should.be.true; // old admin is not allowed const allowance2 = await token.allowance(owner, oldAdmin); allowance2.equals(web3.toBigNumber(0)).should.be.true; }); it("block change admin to the same one", async () => { const admin = await token.admin(); const from = await token.owner(); await token.setAdmin(admin, {from}).should.be.rejectedWith(Error); }); it("block change admin to owner", async () => { const admin = await token.owner(); const from = await token.owner(); await token.setAdmin(admin, {from}).should.be.rejectedWith(Error); }); }); describe("misc", () => { /*******************************/ /* default payable revert test */ /*******************************/ it("should reject send eth directly to token contract", async () => { try { await web3.eth.sendTransaction({to: token.address, gas: 500000, from: accounts[3]}).should.be.rejectedWith(Error); chai.expect.fail(false, true); } catch (err) { // originally expected err } }); /*************************/ /* unlock all token test */ /*************************/ it("should unlock all tokens", async () => { const owner = await token.owner(); await token.unlockAllTokens({from: owner}); const nolock = await token.noTokenLocked(); nolock.should.be.true; const account = accounts[5]; const balance = await token.balanceOf(account); // lock all balance // const d = await token.DISTRIBUTE_DATE(); // await token.setTokenLockPolicy(account, balance, d.add(web3.toBigNumber(365*86400*1000)), 365*86400*1000, 10); // 10 yrs, 1 year interval await token.addTokenLock(account, balance, moment.parseZone('2028-01-01T00:00:00+00:00').unix()); const locked = await token.getMinLockedAmount(account); locked.equals(balance).should.be.true; // even if somebody's portion is locked, it should be able to xfer now const to = accounts[6]; const balance11 = await token.balanceOf.call(account); const balance12 = await token.balanceOf.call(to); token.transfer(to, web3.toWei(10, 'ether'), {from: account}); // check balance const balance21 = await token.balanceOf.call(account); const balance22 = await token.balanceOf.call(to); balance11.minus(balance21).equals(web3.toBigNumber(web3.toWei(10, 'ether'))); balance22.minus(balance12).equals(web3.toBigNumber(web3.toWei(10, 'ether'))); }); /***********************/ /* ownership xfer test */ /***********************/ it("should transfer ownership", async () => { const oldOwner = await token.owner.call(); const newOwner = accounts[1]; await token.transferOwnership(newOwner, {from: oldOwner}); // owner check const owner = await token.owner.call(); owner.should.equal(newOwner); // permission test await token.enableTransfer(false, {from: oldOwner}).should.be.rejectedWith(Error); await token.enableTransfer(true, {from: newOwner}); }); it("block change owner to admin", async () => { const admin = await token.admin(); const owner = await token.owner(); await token.transferOwnership(admin, {from: owner}).should.be.rejectedWith(Error); }); it("should check invalid address when xfer owner", async () => { const owner = await token.owner.call(); await token.transferOwnership(0x0, {from: owner}).should.be.rejectedWith(Error); await token.transferOwnership(owner, {from: owner}).should.be.rejectedWith(Error); // owner didn't change }); }); }); <file_sep>/migrations/2_deploy_token.js const MVLToken = artifacts.require("MVLToken"); module.exports = function(deployer) { return deployer.deploy(MVLToken); }; <file_sep>/README.md # MVL Token smart contracts ### MVL Token MVL Token is ERC-20 token conforms following ERC-20 https://github.com/ethereum/EIPs/blob/master/EIPS/eip-20.md ##### Constants - `name`: 'Mass Vehicle Ledger' - `symbol`: 'MVL' - `decimals`: (uint8)18 - `totalSupply`: (uint256)3e28 ##### Methods - `balanceOf`: `view (address) => (uint256)` - `transfer`: `(address, uint256) => (bool)` - `transferFrom`: `(address _from, address _to, uint256) => (bool)` - `approve`: `(address, uint256) => (bool)` - `allowance`: `(address _owner, address _spender) => (uint256)` - `increaseApproval`: `(address _spender, uint256 _addedValue) => (bool)` - `decreaseApproval`: `(address _spender, uint256 _subtractedValue) => (bool)` ##### Events - `Transfer(address indexed _from, address indexed _to, uint256 _value)` - `Approval(address indexed _owner, address indexed _spender, uint256 _value)` ##### Note on N+M spend attack There is an attack vector which exploits `approve` function to spend unwanted by `spender`. The discussion is [here](https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729). Since EIP20 fixed its interface, so the [suggestion of changing API](https://docs.google.com/document/d/1YLPtQxZu1UAvO9cZ1O2RPXBbT0mooh4DYKjA_jp-RLM/edit) isn't accepted. Therefore, MVL token considers the work-around solution, hinted from [MiniMeToken](https://github.com/Giveth/minime/blob/master/contracts/MiniMeToken.sol) The main idea of this, enforce token holder to call `approve(spender, 0)` before call it again with positive integer value. This is implemented [here](https://github.com/mvlchain/mvltoken/blob/master/contracts/token/MVLToken.sol#L55) and tested [here](https://github.com/mvlchain/mvltoken/blob/master/test/mvltoken.js#L113). ### Build Environment - Solidity v0.4.19 (solc or solc-js both work) - Truffle v4.1.3 (core: 4.1.3) - Node.js v8.11.1 (for dependency mgmt.) - Docker (18.03.0-ce, build 0520e24, for [oyente](https://github.com/melonproject/oyente)) ### Test Environment & status Build status [![CircleCI](https://circleci.com/gh/mvlchain/mvltoken/tree/master.svg?style=svg)](https://circleci.com/gh/mvlchain/mvltoken/tree/master) Test & Coverage status [![Coverage Status](https://coveralls.io/repos/github/mvlchain/mvltoken/badge.svg?branch=master)](https://coveralls.io/github/mvlchain/mvltoken?branch=master) - Truffle on CircleCI - Every commit on master branch of this repository triggers [CircleCI](https://circleci.com/) hook. - [Truffle](http://truffleframework.com/) suite runs test - [CircleCI config file](https://github.com/mvlchain/mvltoken/blob/master/.circleci/config.yml) ##### Commands - install dependencies ``` $ yarn install --frozen-lockfile --production=false ``` - test command ``` $ truffle test ```
b8b322ad9e6b6a3d9df0ff1716205ed90cebe002
[ "JavaScript", "Markdown" ]
3
JavaScript
mvlchain/mvltoken
e0b7bf2b1166ee795c06f558caddf72304108579
eff66ca9aa1717f019707131f19aea6786efc936
refs/heads/master
<repo_name>wizzdi/order-service<file_sep>/src/main/java/com/flexicore/order/interfaces/IOrderRepository.java package com.flexicore.order.interfaces; import com.flexicore.interfaces.PluginRepository; import com.flexicore.order.model.Order; import com.flexicore.order.model.Order_; import com.flexicore.order.request.OrderFiltering; import com.flexicore.organization.model.Organization; import com.flexicore.organization.model.Organization_; import com.flexicore.organization.model.Supplier; import com.flexicore.organization.model.Supplier_; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.Join; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import java.util.List; import java.util.Set; import java.util.stream.Collectors; public interface IOrderRepository extends PluginRepository { static <T extends Order> void addOrderPredicates(List<Predicate> preds, CriteriaBuilder cb, Root<T> r, OrderFiltering orderFiltering) { if (orderFiltering.getExternalIds() != null && !orderFiltering.getExternalIds().isEmpty()) { preds.add(r.get(Order_.externalId).in( orderFiltering.getExternalIds())); } if (orderFiltering.getConsumingOrganizations() != null && !orderFiltering.getConsumingOrganizations().isEmpty()) { Set<String> ids = orderFiltering.getConsumingOrganizations() .parallelStream().map(f -> f.getId()) .collect(Collectors.toSet()); Join<T, Organization> join = r.join(Order_.consumingOrganization); preds.add(join.get(Organization_.id).in(ids)); } if (orderFiltering.getSuppliers() != null && !orderFiltering.getSuppliers().isEmpty()) { Set<String> ids = orderFiltering.getSuppliers().parallelStream() .map(f -> f.getId()).collect(Collectors.toSet()); Join<T, Supplier> join = r.join(Order_.supplier); preds.add(join.get(Supplier_.id).in(ids)); } } } <file_sep>/src/main/java/com/flexicore/order/request/SendOrder.java package com.flexicore.order.request; import com.fasterxml.jackson.annotation.JsonIgnore; import com.flexicore.order.interfaces.IOrderApiService; import com.flexicore.order.model.Order; import com.flexicore.order.model.OrderApiConfig; import com.flexicore.order.model.OrderItem; import java.util.List; public class SendOrder { private String orderId; private String orderApiConfigId; @JsonIgnore private Order order; @JsonIgnore private OrderApiConfig orderApiConfig; public String getOrderId() { return orderId; } public <T extends SendOrder> T setOrderId(String orderId) { this.orderId = orderId; return (T) this; } public String getOrderApiConfigId() { return orderApiConfigId; } public <T extends SendOrder> T setOrderApiConfigId(String orderApiConfigId) { this.orderApiConfigId = orderApiConfigId; return (T) this; } @JsonIgnore public Order getOrder() { return order; } public <T extends SendOrder> T setOrder(Order order) { this.order = order; return (T) this; } @JsonIgnore public OrderApiConfig getOrderApiConfig() { return orderApiConfig; } public <T extends SendOrder> T setOrderApiConfig( OrderApiConfig orderApiConfig) { this.orderApiConfig = orderApiConfig; return (T) this; } } <file_sep>/src/main/java/com/flexicore/order/request/CreateOrder.java package com.flexicore.order.request; import com.fasterxml.jackson.annotation.JsonIgnore; import com.flexicore.organization.model.Organization; import com.flexicore.organization.model.Supplier; import java.time.OffsetDateTime; public class CreateOrder { private String name; private String description; private String externalId; private String consumingOrganizationId; @JsonIgnore private Organization consumingOrganization; private String supplierId; @JsonIgnore private Supplier supplier; private OffsetDateTime orderDate; private OffsetDateTime orderSentDate; private Integer ordinal; public String getName() { return name; } public <T extends CreateOrder> T setName(String name) { this.name = name; return (T) this; } public String getDescription() { return description; } public <T extends CreateOrder> T setDescription(String description) { this.description = description; return (T) this; } public String getExternalId() { return externalId; } public <T extends CreateOrder> T setExternalId(String externalId) { this.externalId = externalId; return (T) this; } public String getConsumingOrganizationId() { return consumingOrganizationId; } public <T extends CreateOrder> T setConsumingOrganizationId( String consumingOrganizationId) { this.consumingOrganizationId = consumingOrganizationId; return (T) this; } @JsonIgnore public Organization getConsumingOrganization() { return consumingOrganization; } public <T extends CreateOrder> T setConsumingOrganization( Organization consumingOrganization) { this.consumingOrganization = consumingOrganization; return (T) this; } public String getSupplierId() { return supplierId; } public <T extends CreateOrder> T setSupplierId(String supplierId) { this.supplierId = supplierId; return (T) this; } @JsonIgnore public Supplier getSupplier() { return supplier; } public <T extends CreateOrder> T setSupplier(Supplier supplier) { this.supplier = supplier; return (T) this; } public OffsetDateTime getOrderDate() { return orderDate; } public <T extends CreateOrder> T setOrderDate(OffsetDateTime orderDate) { this.orderDate = orderDate; return (T) this; } public OffsetDateTime getOrderSentDate() { return orderSentDate; } public <T extends CreateOrder> T setOrderSentDate( OffsetDateTime orderSentDate) { this.orderSentDate = orderSentDate; return (T) this; } public Integer getOrdinal() { return ordinal; } public <T extends CreateOrder> T setOrdinal(Integer ordinal) { this.ordinal = ordinal; return (T) this; } } <file_sep>/src/main/java/com/flexicore/order/data/OrderRepository.java package com.flexicore.order.data; import com.flexicore.annotations.plugins.PluginInfo; import com.flexicore.interfaces.AbstractRepositoryPlugin; import com.flexicore.model.QueryInformationHolder; import com.flexicore.order.interfaces.IOrderRepository; import com.flexicore.order.model.Order; import com.flexicore.order.model.Order_; import com.flexicore.order.request.OrderFiltering; import com.flexicore.security.SecurityContext; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import java.util.ArrayList; import java.util.List; import org.pf4j.Extension; import org.springframework.stereotype.Component; @PluginInfo(version = 1) @Extension @Component public class OrderRepository extends AbstractRepositoryPlugin implements IOrderRepository { public List<Order> listAllOrders(OrderFiltering orderFiltering, SecurityContext securityContext) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Order> q = cb.createQuery(Order.class); Root<Order> r = q.from(Order.class); List<Predicate> preds = new ArrayList<>(); IOrderRepository.addOrderPredicates(preds, cb, r, orderFiltering); QueryInformationHolder<Order> queryInformationHolder = new QueryInformationHolder<>( orderFiltering, Order.class, securityContext); return getAllFiltered(queryInformationHolder, preds, cb, q, r); } public long countAllOrders(OrderFiltering orderFiltering, SecurityContext securityContext) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Long> q = cb.createQuery(Long.class); Root<Order> r = q.from(Order.class); List<Predicate> preds = new ArrayList<>(); IOrderRepository.addOrderPredicates(preds, cb, r, orderFiltering); QueryInformationHolder<Order> queryInformationHolder = new QueryInformationHolder<>( orderFiltering, Order.class, securityContext); return countAllFiltered(queryInformationHolder, preds, cb, q, r); } public int getCurrentOrdinal(SecurityContext securityContext) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Order> q = cb.createQuery(Order.class); Root<Order> r = q.from(Order.class); q.select(r) .where(cb.equal(r.get(Order_.tenant), securityContext.getTenantToCreateIn())) .orderBy(cb.desc(r.get(Order_.ordinal))); TypedQuery<Order> query = em.createQuery(q); query.setFirstResult(0).setMaxResults(1); List<Order> resultList = query.getResultList(); return resultList.isEmpty() ? 0 : resultList.get(0).getOrdinal(); } } <file_sep>/src/main/java/com/flexicore/order/request/SupplyTimeFiltering.java package com.flexicore.order.request; import com.flexicore.model.FilteringInformationHolder; public class SupplyTimeFiltering extends FilteringInformationHolder { } <file_sep>/src/main/java/com/flexicore/order/data/OrderItemRepository.java package com.flexicore.order.data; import com.flexicore.annotations.plugins.PluginInfo; import com.flexicore.interfaces.AbstractRepositoryPlugin; import com.flexicore.model.QueryInformationHolder; import com.flexicore.order.interfaces.IOrderItemRepository; import com.flexicore.order.model.Order; import com.flexicore.order.model.OrderItem; import com.flexicore.order.model.OrderItem_; import com.flexicore.order.model.Order_; import com.flexicore.order.request.OrderItemFiltering; import com.flexicore.product.model.Product; import com.flexicore.product.model.Product_; import com.flexicore.security.SecurityContext; import javax.persistence.criteria.*; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.pf4j.Extension; import org.springframework.stereotype.Component; @PluginInfo(version = 1) @Extension @Component public class OrderItemRepository extends AbstractRepositoryPlugin implements IOrderItemRepository { public List<OrderItem> listAllOrderItems( OrderItemFiltering orderItemFiltering, SecurityContext securityContext) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<OrderItem> q = cb.createQuery(OrderItem.class); Root<OrderItem> r = q.from(OrderItem.class); List<Predicate> preds = new ArrayList<>(); IOrderItemRepository.addOrderItemPredicates(preds, cb, r, orderItemFiltering); QueryInformationHolder<OrderItem> queryInformationHolder = new QueryInformationHolder<>(orderItemFiltering, OrderItem.class, securityContext); return getAllFiltered(queryInformationHolder, preds, cb, q, r); } public long countAllOrderItems(OrderItemFiltering orderItemFiltering, SecurityContext securityContext) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Long> q = cb.createQuery(Long.class); Root<OrderItem> r = q.from(OrderItem.class); List<Predicate> preds = new ArrayList<>(); IOrderItemRepository.addOrderItemPredicates(preds, cb, r, orderItemFiltering); QueryInformationHolder<OrderItem> queryInformationHolder = new QueryInformationHolder<>(orderItemFiltering, OrderItem.class, securityContext); return countAllFiltered(queryInformationHolder, preds, cb, q, r); } } <file_sep>/src/main/java/com/flexicore/order/request/CreateOrderApiConfig.java package com.flexicore.order.request; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.databind.annotation.JsonTypeIdResolver; import com.flexicore.data.jsoncontainers.CrossLoaderResolver; import com.flexicore.organization.model.Supplier; @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, property = "type") @JsonTypeIdResolver(CrossLoaderResolver.class) public class CreateOrderApiConfig { private String name; private String description; private String supplierId; @JsonIgnore private Supplier supplier; private String host; private String username; private String password; public String getName() { return name; } public <T extends CreateOrderApiConfig> T setName(String name) { this.name = name; return (T) this; } public String getDescription() { return description; } public <T extends CreateOrderApiConfig> T setDescription(String description) { this.description = description; return (T) this; } public String getHost() { return host; } public <T extends CreateOrderApiConfig> T setHost(String host) { this.host = host; return (T) this; } public String getUsername() { return username; } public <T extends CreateOrderApiConfig> T setUsername(String username) { this.username = username; return (T) this; } public String getPassword() { return password; } public <T extends CreateOrderApiConfig> T setPassword(String password) { this.password = password; return (T) this; } public String getSupplierId() { return supplierId; } public <T extends CreateOrderApiConfig> T setSupplierId(String supplierId) { this.supplierId = supplierId; return (T) this; } @JsonIgnore public Supplier getSupplier() { return supplier; } public <T extends CreateOrderApiConfig> T setSupplier(Supplier supplier) { this.supplier = supplier; return (T) this; } } <file_sep>/src/main/java/com/flexicore/order/service/OrderService.java package com.flexicore.order.service; import com.flexicore.annotations.plugins.PluginInfo; import com.flexicore.data.jsoncontainers.PaginationResponse; import com.flexicore.model.Baseclass; import com.flexicore.order.data.OrderRepository; import com.flexicore.order.model.Order; import com.flexicore.order.request.CreateOrder; import com.flexicore.order.request.OrderFiltering; import com.flexicore.order.request.UpdateOrder; import com.flexicore.organization.model.Organization; import com.flexicore.organization.model.Supplier; import com.flexicore.security.SecurityContext; import javax.ws.rs.BadRequestException; import java.util.*; import java.util.stream.Collectors; import org.pf4j.Extension; import org.springframework.stereotype.Component; import org.springframework.beans.factory.annotation.Autowired; @PluginInfo(version = 1) @Extension @Component public class OrderService implements com.flexicore.order.interfaces.IOrderService { @PluginInfo(version = 1) @Autowired private OrderRepository orderRepository; @Override public PaginationResponse<Order> getAllOrders( OrderFiltering orderFiltering, SecurityContext securityContext) { List<Order> list = listAllOrders(orderFiltering, securityContext); long count = orderRepository.countAllOrders(orderFiltering, securityContext); return new PaginationResponse<>(list, orderFiltering, count); } @Override public List<Order> listAllOrders(OrderFiltering orderFiltering, SecurityContext securityContext) { return orderRepository.listAllOrders(orderFiltering, securityContext); } @Override public void validate(OrderFiltering orderFiltering, SecurityContext securityContext) { Set<String> consumingOrganizationIds = orderFiltering .getConsumingOrganizationIds(); Map<String, Organization> organizationMap = !consumingOrganizationIds .isEmpty() ? listByIds(Organization.class, consumingOrganizationIds, securityContext).parallelStream().collect( Collectors.toMap(f -> f.getId(), f -> f)) : new HashMap<>(); consumingOrganizationIds.removeAll(organizationMap.keySet()); if (!consumingOrganizationIds.isEmpty()) { throw new BadRequestException("No Organizations with ids " + consumingOrganizationIds); } orderFiltering.setConsumingOrganizations(new ArrayList<>( organizationMap.values())); Set<String> supplierIds = orderFiltering.getSupplierIds(); Map<String, Supplier> supplierMap = !supplierIds.isEmpty() ? listByIds(Supplier.class, supplierIds, securityContext) .parallelStream().collect( Collectors.toMap(f -> f.getId(), f -> f)) : new HashMap<>(); supplierIds.removeAll(supplierMap.keySet()); if (!supplierIds.isEmpty()) { throw new BadRequestException("No Suppliers with ids " + supplierIds); } orderFiltering.setSuppliers(new ArrayList<>(supplierMap.values())); } @Override public void validate(CreateOrder createOrder, SecurityContext securityContext) { this.validateUpsertOrder(createOrder, securityContext); int ordinal = orderRepository.getCurrentOrdinal(securityContext) + 1; createOrder.setOrdinal(ordinal); } @Override public void validate(UpdateOrder updateOrder, SecurityContext securityContext) { Order Order = orderRepository.getByIdOrNull(updateOrder.getId(), Order.class, null, securityContext); if (Order == null) { throw new BadRequestException("no Order with id " + updateOrder.getId()); } updateOrder.setOrder(Order); this.validateUpsertOrder(updateOrder, securityContext); } private void validateUpsertOrder(CreateOrder createOrder, SecurityContext securityContext) { String consumingOrganizationId = createOrder .getConsumingOrganizationId(); Organization consumingOrganization = consumingOrganizationId != null ? getByIdOrNull(consumingOrganizationId, Organization.class, null, securityContext) : null; if (consumingOrganization == null && consumingOrganizationId != null) { throw new BadRequestException("No Organization with id " + consumingOrganizationId); } createOrder.setConsumingOrganization(consumingOrganization); String supplierId = createOrder.getSupplierId(); Supplier supplier = supplierId != null ? getByIdOrNull(supplierId, Supplier.class, null, securityContext) : null; if (supplier == null && supplierId != null) { throw new BadRequestException("No Supplier with id " + supplierId); } createOrder.setSupplier(supplier); } @Override public Order createOrder(CreateOrder createOrder, SecurityContext securityContext) { Order order = createOrderNoMerge(createOrder, securityContext); orderRepository.merge(order); return order; } @Override public Order updateOrder(UpdateOrder updateOrder, SecurityContext securityContext) { Order order = updateOrder.getOrder(); if (updateOrderNoMerge(order, updateOrder)) { orderRepository.merge(order); } return order; } @Override public Order createOrderNoMerge(CreateOrder createOrder, SecurityContext securityContext) { Order order = new Order(createOrder.getName(), securityContext); updateOrderNoMerge(order, createOrder); return order; } @Override public boolean updateOrderNoMerge(Order order, CreateOrder createOrder) { boolean update = false; if (createOrder.getName() != null && !createOrder.getName().equals(order.getName())) { order.setName(createOrder.getName()); update = true; } if (createOrder.getDescription() != null && !createOrder.getDescription().equals(order.getDescription())) { order.setDescription(createOrder.getDescription()); update = true; } if (createOrder.getExternalId() != null && !createOrder.getExternalId().equals(order.getExternalId())) { order.setExternalId(createOrder.getExternalId()); update = true; } if (createOrder.getConsumingOrganization() != null && (order.getConsumingOrganization() == null || !createOrder .getConsumingOrganization().getId() .equals(order.getConsumingOrganization().getId()))) { order.setConsumingOrganization(createOrder .getConsumingOrganization()); update = true; } if (createOrder.getSupplier() != null && (order.getSupplier() == null || !createOrder.getSupplier() .getId().equals(order.getSupplier().getId()))) { order.setSupplier(createOrder.getSupplier()); update = true; } if (createOrder.getOrderDate() != null && (order.getOrderDate() == null || !createOrder.getOrderDate() .equals(order.getOrderDate()))) { order.setOrderDate(createOrder.getOrderDate()); update = true; } if (createOrder.getOrderSentDate() != null && (order.getOrderSentDate() == null || !createOrder .getOrderSentDate().equals(order.getOrderSentDate()))) { order.setOrderSentDate(createOrder.getOrderSentDate()); update = true; } if (createOrder.getOrdinal() != null && createOrder.getOrdinal() != order.getOrdinal()) { order.setOrdinal(createOrder.getOrdinal()); update = true; } return update; } public <T extends Baseclass> List<T> listByIds(Class<T> c, Set<String> ids, SecurityContext securityContext) { return orderRepository.listByIds(c, ids, securityContext); } public <T extends Baseclass> T getByIdOrNull(String id, Class<T> c, List<String> batchString, SecurityContext securityContext) { return orderRepository.getByIdOrNull(id, c, batchString, securityContext); } } <file_sep>/src/main/java/com/flexicore/order/rest/SupplyTimeRESTService.java package com.flexicore.order.rest; import com.flexicore.annotations.OperationsInside; import com.flexicore.annotations.plugins.PluginInfo; import com.flexicore.data.jsoncontainers.PaginationResponse; import com.flexicore.annotations.ProtectedREST; import com.flexicore.interfaces.RestServicePlugin; import com.flexicore.order.model.SupplyTime; import com.flexicore.order.request.CreateSupplyTime; import com.flexicore.order.request.SupplyTimeFiltering; import com.flexicore.order.request.UpdateSupplyTime; import com.flexicore.order.service.SupplyTimeService; import com.flexicore.security.SecurityContext; import io.swagger.v3.oas.annotations.OpenAPIDefinition; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.tags.Tag; import javax.interceptor.Interceptors; import javax.ws.rs.*; import javax.ws.rs.core.Context; import org.pf4j.Extension; import org.springframework.stereotype.Component; import org.springframework.beans.factory.annotation.Autowired; @PluginInfo(version = 1) @OperationsInside @ProtectedREST @Path("plugins/SupplyTime") @OpenAPIDefinition(tags = { @Tag(name = "SupplyTime", description = "SupplyTime Api"), @Tag(name = "SupplyTimeItem", description = "SupplyTimeItem Api") }) @Tag(name = "SupplyTime") @Extension @Component public class SupplyTimeRESTService implements RestServicePlugin { @PluginInfo(version = 1) @Autowired private SupplyTimeService service; @POST @Produces("application/json") @Path("/createSupplyTime") @Operation(summary = "createSupplyTime", description = "Creates SupplyTime") public SupplyTime createSupplyTime( @HeaderParam("authenticationKey") String authenticationKey, CreateSupplyTime creationContainer, @Context SecurityContext securityContext) { service.validate(creationContainer, securityContext); return service.createSupplyTime(creationContainer, securityContext); } @POST @Produces("application/json") @Operation(summary = "getAllSupplyTimes", description = "Lists all SupplyTimes Filtered") @Path("getAllSupplyTimes") public PaginationResponse<SupplyTime> getAllSupplyTimes( @HeaderParam("authenticationKey") String authenticationKey, SupplyTimeFiltering filtering, @Context SecurityContext securityContext) { service.validate(filtering, securityContext); return service.getAllSupplyTimes(filtering, securityContext); } @POST @Produces("application/json") @Path("/updateSupplyTime") @Operation(summary = "updateSupplyTime", description = "Updates SupplyTime") public SupplyTime updateSupplyTime( @HeaderParam("authenticationKey") String authenticationKey, UpdateSupplyTime updateContainer, @Context SecurityContext securityContext) { SupplyTime SupplyTime = service.getByIdOrNull(updateContainer.getId(), SupplyTime.class, null, securityContext); if (SupplyTime == null) { throw new BadRequestException("no SupplyTime with id " + updateContainer.getId()); } updateContainer.setSupplyTime(SupplyTime); service.validate(updateContainer, securityContext); return service.updateSupplyTime(updateContainer, securityContext); } }<file_sep>/src/main/java/com/flexicore/order/rest/OrderItemRESTService.java package com.flexicore.order.rest; import com.flexicore.annotations.OperationsInside; import com.flexicore.annotations.plugins.PluginInfo; import com.flexicore.data.jsoncontainers.PaginationResponse; import com.flexicore.annotations.ProtectedREST; import com.flexicore.interfaces.RestServicePlugin; import com.flexicore.order.model.OrderItem; import com.flexicore.order.request.CreateOrderItem; import com.flexicore.order.request.OrderItemFiltering; import com.flexicore.order.request.UpdateOrderItem; import com.flexicore.order.service.OrderItemService; import com.flexicore.security.SecurityContext; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.tags.Tag; import javax.interceptor.Interceptors; import javax.ws.rs.*; import javax.ws.rs.core.Context; import org.pf4j.Extension; import org.springframework.stereotype.Component; import org.springframework.beans.factory.annotation.Autowired; @PluginInfo(version = 1) @OperationsInside @ProtectedREST @Path("plugins/OrderItem") @Tag(name = "OrderItem") @Extension @Component public class OrderItemRESTService implements RestServicePlugin { @PluginInfo(version = 1) @Autowired private OrderItemService service; @POST @Produces("application/json") @Path("/createOrderItem") @Operation(summary = "createOrderItem", description = "Creates OrderItem") public OrderItem createOrderItem( @HeaderParam("authenticationKey") String authenticationKey, CreateOrderItem creationContainer, @Context SecurityContext securityContext) { service.validate(creationContainer, securityContext); return service.createOrderItem(creationContainer, securityContext); } @POST @Produces("application/json") @Operation(summary = "getAllOrderItems", description = "Lists all OrderItems Filtered") @Path("getAllOrderItems") public PaginationResponse<OrderItem> getAllOrderItems( @HeaderParam("authenticationKey") String authenticationKey, OrderItemFiltering filtering, @Context SecurityContext securityContext) { service.validate(filtering, securityContext); return service.getAllOrderItems(filtering, securityContext); } @POST @Produces("application/json") @Path("/updateOrderItem") @Operation(summary = "updateOrderItem", description = "Updates OrderItem") public OrderItem updateOrderItem( @HeaderParam("authenticationKey") String authenticationKey, UpdateOrderItem updateContainer, @Context SecurityContext securityContext) { OrderItem OrderItem = service.getByIdOrNull(updateContainer.getId(), OrderItem.class, null, securityContext); if (OrderItem == null) { throw new BadRequestException("no OrderItem with id " + updateContainer.getId()); } updateContainer.setOrderItem(OrderItem); service.validate(updateContainer, securityContext); return service.updateOrderItem(updateContainer, securityContext); } }<file_sep>/src/main/java/com/flexicore/order/request/OrderItemFiltering.java package com.flexicore.order.request; import com.fasterxml.jackson.annotation.JsonIgnore; import com.flexicore.model.FilteringInformationHolder; import com.flexicore.order.model.Order; import com.flexicore.product.model.Product; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; public class OrderItemFiltering extends FilteringInformationHolder { private Set<String> orderIds = new HashSet<>(); @JsonIgnore private List<Order> orders = new ArrayList<>(); private Set<String> productIds = new HashSet<>(); @JsonIgnore private List<Product> products = new ArrayList<>(); public Set<String> getOrderIds() { return orderIds; } public <T extends OrderItemFiltering> T setOrderIds(Set<String> orderIds) { this.orderIds = orderIds; return (T) this; } @JsonIgnore public List<Order> getOrders() { return orders; } public <T extends OrderItemFiltering> T setOrders(List<Order> orders) { this.orders = orders; return (T) this; } public Set<String> getProductIds() { return productIds; } public <T extends OrderItemFiltering> T setProductIds(Set<String> productIds) { this.productIds = productIds; return (T) this; } @JsonIgnore public List<Product> getProducts() { return products; } public <T extends OrderItemFiltering> T setProducts(List<Product> products) { this.products = products; return (T) this; } } <file_sep>/src/main/java/com/flexicore/order/interfaces/ISupplyTimeRepository.java package com.flexicore.order.interfaces; import com.flexicore.interfaces.PluginRepository; import com.flexicore.order.model.SupplyTime; import com.flexicore.order.request.SupplyTimeFiltering; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import java.util.List; public interface ISupplyTimeRepository extends PluginRepository { static <T extends SupplyTime> void addSupplyTimePredicates( List<Predicate> preds, CriteriaBuilder cb, Root<T> r, SupplyTimeFiltering orderFiltering) { } } <file_sep>/src/main/java/com/flexicore/order/request/UpdateSupplyTime.java package com.flexicore.order.request; import com.fasterxml.jackson.annotation.JsonIgnore; import com.flexicore.order.model.SupplyTime; public class UpdateSupplyTime extends CreateSupplyTime { private String id; @JsonIgnore private SupplyTime supplyTime; public String getId() { return id; } public <T extends UpdateSupplyTime> T setId(String id) { this.id = id; return (T) this; } @JsonIgnore public SupplyTime getSupplyTime() { return supplyTime; } public <T extends UpdateSupplyTime> T setSupplyTime(SupplyTime supplyTime) { this.supplyTime = supplyTime; return (T) this; } } <file_sep>/src/main/java/com/flexicore/order/request/UpdateOrderItem.java package com.flexicore.order.request; import com.fasterxml.jackson.annotation.JsonIgnore; import com.flexicore.order.model.OrderItem; public class UpdateOrderItem extends CreateOrderItem { private String id; @JsonIgnore private OrderItem orderItem; public String getId() { return id; } public <T extends UpdateOrderItem> T setId(String id) { this.id = id; return (T) this; } @JsonIgnore public OrderItem getOrderItem() { return orderItem; } public <T extends UpdateOrderItem> T setOrderItem(OrderItem orderItem) { this.orderItem = orderItem; return (T) this; } } <file_sep>/src/main/java/com/flexicore/order/interfaces/InitResponse.java package com.flexicore.order.interfaces; public class InitResponse { }
ec11a3099b5d9629c89651968dddc3c011326fa9
[ "Java" ]
15
Java
wizzdi/order-service
83e041f5246ea22b28dbc27e74c8d558ac4cb381
b2adcf1adaa33a277da8c57d514bacd347f105fe
refs/heads/master
<repo_name>iolanta/ShadersIntro<file_sep>/GLproj/GLobject.h #pragma once #include <GL/glew.h> #include <GL/wglew.h> #include <GL/glut.h> #include <SOIL2.h> #include <string> #include "GLShader.h" class GLobject { GLushort * indeces; public: GLuint VBO, VAO, IBO; GLsizei stride, CoordOffset, NormalOffset, TexCoordOffset, ColorOffset; int count_vertex; int count_indexes; GLobject(std::string path); bool BindAttributesToShader(GLShader& shaderobject); void drawObject(); ~GLobject(); }; <file_sep>/GLproj/main.cpp #include <GL/glew.h> #include <GL/wglew.h> #include <GL/glut.h> #include <SOIL2.h> #include <cmath> #include <utility> #include <iostream> #include <vector> #include <glm/mat4x4.hpp> #include <glm/gtc/matrix_transform.hpp> #include "GLShader.h" #include "GLobject.h" int w = 0, h = 0; GLShader * shaderwrap; GLobject * objectwrap; std::vector<int> VertShaders; std::vector<int> FragShaders; glm::mat4 Matrix_projection; float rotateX = 0; float rotateY = 0; float scaleX = 1; float scaleY = 1; int VertShader, FragShader0, FragShader1, FragShader2, FragShader3, FragShader4, FragShader5; GLuint tex1, tex2; int mode = 0; std::vector<std::string> pathsVert = { "shader_lab12.vert" }; std::vector<std::string> pathsFrag = { "shader_lab12.frag", "shader_lab12_horizontal.frag", "shader_lab12_vertical.frag", "shader_lab12_tex.frag", "shader_lab12_texcolor.frag", "shader_lab12_twotex.frag", }; void Init(void) { glClearColor(0, 0, 0, 1.0f); glPolygonMode(GL_FRONT_AND_BACK, GL_FILL); } void load_textures() { tex1 = SOIL_load_OGL_texture("cat_diff.tga", SOIL_LOAD_AUTO, SOIL_CREATE_NEW_ID, SOIL_FLAG_NTSC_SAFE_RGB | SOIL_FLAG_MULTIPLY_ALPHA | SOIL_FLAG_INVERT_Y); tex2 = SOIL_load_OGL_texture("tex2.jpg", SOIL_LOAD_AUTO, SOIL_CREATE_NEW_ID, SOIL_FLAG_NTSC_SAFE_RGB | SOIL_FLAG_MULTIPLY_ALPHA | SOIL_FLAG_INVERT_Y); } void Reshape(int x, int y) { if (y == 0 || x == 0) return; w = x; h = y; glViewport(0, 0, w, h); Matrix_projection = glm::perspective(80.0f, (float)w / h, 0.01f, 200.0f); glm::vec3 eye = {1,0,0}; glm::vec3 center = { 0,0,0 }; glm::vec3 up = { 0,0,1}; Matrix_projection *= glm::lookAt(eye,center,up); } void Update(void) { glMatrixMode(GL_MODELVIEW); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glUseProgram(shaderwrap->ShaderProgram); // vert shader part, never changes glm::mat4 transfor = glm::scale(Matrix_projection, glm::vec3{ scaleX,scaleY,1 }); transfor = glm::rotate(transfor, rotateX, glm::vec3( 1,0,0 )); transfor = glm::rotate(transfor, rotateY, glm::vec3(0, 1, 0)); shaderwrap->setUniformmat4("transf",false, transfor); switch (mode) { case 0: // fill shaderwrap->setUniformfv3("color", glm::vec3{ 1,0,0 }); break; case 1: // hor case 2: // vert shaderwrap->setUniform1i("width", 30); shaderwrap->setUniformfv3("color1", {1,0,0}); shaderwrap->setUniformfv3("color2", { 0,1,0 }); break; case 5: // 2 tex shaderwrap->setUniform1i("ourTexture2", 1); glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, tex2); shaderwrap->setUniform1f("mix_coef", 0.5); case 4: // texcolor case 3: // tex1 shaderwrap->setUniform1i("ourTexture", 0); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, tex1); break; default: break; } objectwrap->drawObject(); glUseProgram(0); glFlush(); glutSwapBuffers(); } void keyboard(unsigned char key, int x, int y) { switch (key) { case 'w': rotateX += 0.1; break; case 's': rotateX -= 0.1; break; case 'a': rotateY -= 0.1; break; case 'd': rotateY += 0.1; break; default: break; } glutPostRedisplay(); } void next_mode() { mode = (mode + 1) % 6; switch (mode) { case 0: // fill color shaderwrap->linkProgram(VertShader, FragShader0); objectwrap->BindAttributesToShader(*shaderwrap); break; case 1: // hor line shaderwrap->linkProgram(VertShader, FragShader1); objectwrap->BindAttributesToShader(*shaderwrap); break; case 2: // vert line shaderwrap->linkProgram(VertShader, FragShader2); objectwrap->BindAttributesToShader(*shaderwrap); break; case 3: // 1tex shaderwrap->linkProgram(VertShader, FragShader3); objectwrap->BindAttributesToShader(*shaderwrap); break; case 4: // texcolor shaderwrap->linkProgram(VertShader, FragShader4); objectwrap->BindAttributesToShader(*shaderwrap); break; case 5: // 2tex shaderwrap->linkProgram(VertShader, FragShader5); objectwrap->BindAttributesToShader(*shaderwrap); break; default: break; } } void specialKeys(int key, int x, int y) { switch (key) { case GLUT_KEY_UP: scaleX += 0.1; break; case GLUT_KEY_DOWN: scaleX -= 0.1; break; case GLUT_KEY_LEFT: scaleY -= 0.1; break; case GLUT_KEY_RIGHT: scaleY += 0.1; break; case GLUT_KEY_F1: next_mode(); default: break; } glutPostRedisplay(); } void LoadShaders() { VertShader = shaderwrap->load_shader(pathsVert[0], GL_VERTEX_SHADER); FragShader0 = shaderwrap->load_shader(pathsFrag[0], GL_FRAGMENT_SHADER); FragShader1 = shaderwrap->load_shader(pathsFrag[1], GL_FRAGMENT_SHADER); FragShader2 = shaderwrap->load_shader(pathsFrag[2], GL_FRAGMENT_SHADER); FragShader3 = shaderwrap->load_shader(pathsFrag[3], GL_FRAGMENT_SHADER); FragShader4 = shaderwrap->load_shader(pathsFrag[4], GL_FRAGMENT_SHADER); FragShader5 = shaderwrap->load_shader(pathsFrag[5], GL_FRAGMENT_SHADER); } int main(int argc, char **argv) { glutInit(&argc, argv); glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); glutInitWindowPosition(100, 100); glutInitWindowSize(800, 600); glutCreateWindow("OpenGL"); glEnable(GL_DEPTH_TEST); glutDisplayFunc(Update); glutReshapeFunc(Reshape); glutKeyboardFunc(keyboard); glutSpecialFunc(specialKeys); GLenum err = glewInit(); if (GLEW_OK != err) { /* Problem: glewInit failed, something is seriously wrong. */ std::cout << glewGetErrorString(err) << std::endl; } Init(); shaderwrap = new GLShader(); objectwrap = new GLobject("cat.obj"); load_textures(); LoadShaders(); mode = -1; next_mode(); shaderwrap->checkOpenGLerror(); glutMainLoop(); return 0; }
c674f01b73df5a1116d2f87b6fdd6fc206a950c5
[ "C++" ]
2
C++
iolanta/ShadersIntro
cf66fb7f972c504901365683a7a28a98797dd230
d94ad60c5bceae6e717fe64dfbb6d37961f95e5e
refs/heads/master
<repo_name>Jaimin7632/Attendence_System<file_sep>/classnames.php <?php require_once 'connection.php'; $ac =$conn->query("SELECT * FROM nameclass"); ?><file_sep>/asd.php <?php require_once 'connection.php'; $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'a5comp'"; $names =$conn->query($sql); $post = array(); while($row = mysqli_fetch_assoc($names)) { $post[] = $row; } array_splice($post, 0, 1); $data='( date1 date,ck int(2)'; foreach ($post as $post1) { foreach ($post1 as $post2) { $data=$data.', '.$post2.' int( 2 )'; } } $data=$data.')'; $sqlt = 'CREATE TABLE tname1'.$data; echo $sqlt; $conn -> query($sqlt) or die("erroe"); ?><file_sep>/student1111.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8" /> <title>Materil | Angular Material Design Admin Template</title> <meta name="description" content="app, web app, responsive, responsive layout, admin, admin panel, admin dashboard, flat, flat ui, ui kit, AngularJS, ui route, charts, widgets, components" /> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" /> <link rel="stylesheet" href="../libs/assets/animate.css/animate.css" type="text/css" /> <link rel="stylesheet" href="../libs/assets/font-awesome/css/font-awesome.css" type="text/css" /> <link rel="stylesheet" href="../libs/jquery/waves/dist/waves.css" type="text/css" /> <link rel="stylesheet" href="styles/material-design-icons.css" type="text/css" /> <link rel="stylesheet" href="../libs/jquery/bootstrap/dist/css/bootstrap.css" type="text/css" /> <link rel="stylesheet" href="styles/font.css" type="text/css" /> <link rel="stylesheet" href="styles/app.css" type="text/css" /> <style type="text/css"> .panel-card{ height: 250px; } .m-b{font-size: 15px; } </style> </head> <body> <div class="ng-app container"> <?php require_once 'connection.php'; $student = "16cl02"; $qv = $conn->query("SELECT name from nameclass"); while($ow = mysqli_fetch_assoc($qv)){ $g= $ow['name']; $pp = $conn->query("SELECT a$student from a$g"); if($pp){ if(mysqli_num_rows($pp) != 0){ $class = $g; } } } $sql11="SELECT name from subnames WHERE class='$class'"; $subname =$conn->query($sql11); $sname = array(); while($row = mysqli_fetch_assoc($subname)) { $sname[] = $row; } $sum =0; $cnt = 0; $attesname1 = array(); $attesname2 = array(); $v = 0; foreach ($sname as $key => $value) { foreach ($value as $ke ) { $leccheck = $conn->query("SELECT lec FROM subnames WHERE name = '$ke'"); $rw = mysqli_fetch_assoc($leccheck); $laccheck = $conn->query("SELECT lab FROM subnames WHERE name = '$ke'"); $sw = mysqli_fetch_assoc($laccheck); if($rw["lec"] && $sw["lab"]){ $asd = $ke; $attedq= $conn->query("Select (Count(A$student)* 100 / (Select Count(A$student) From $asd WHERE A$student=0 OR A$student=1 )) as Score From $asd WHERE A$student = 1"); $resatted = mysqli_fetch_assoc($attedq); $attesname1[] = $resatted["Score"]; $asa = $ke."lab"; $attedq= $conn->query("Select (Count(A$student)* 100 / (Select Count(*) From $asa)) as Score From $asa WHERE A$student = 1"); $resatted2 = mysqli_fetch_assoc($attedq); $attesname2[] = $resatted2["Score"]; $fgh = $conn->query("Select Count(A$student) AS cnt From $asd WHERE A$student=0 OR A$student=1"); $fgj= $conn->query("Select Count(A$student) AS cnt From $asa WHERE A$student=0 OR A$student=1"); $fgh1 = mysqli_fetch_assoc($fgh); $fgj1 = mysqli_fetch_assoc($fgj); $tolec = $fgh1['cnt'] + $fgj1['cnt']; $hh = $conn->query("Select Count(A$student) AS cnt From $asd WHERE A$student=1"); $hh1= $conn->query("Select Count(A$student) AS cnt From $asa WHERE A$student=0 OR A$student=1"); $ff = mysqli_fetch_assoc($hh); $ff1 = mysqli_fetch_assoc($hh1); $plec = $ff['cnt'] + $ff1['cnt']; if ($tolec == 0) { $tolec =1; $nolec = ' [ NO LEC/LAB ] ';} else{$cnt++;} $totalatte = ($plec * 100)/$tolec; $sum = $sum + $totalatte; ?> <div class="col-sm-4"> <div class="panel panel-card p m-b-sm"> <h5 class="no-margin m-b"><?php echo strtoupper($ke);?></h5> <div class="text-center"> <div class="inline"> <div ui-jp="easyPieChart" ui-options="{ percent: <?php echo round($totalatte); ?>, lineWidth: 12, trackColor: '#f1f2f3', barColor: '#4caf50', scaleColor: '#fff', size: 167, lineCap: 'butt', color: '', animate: 3000, rotate: 0 }" ng-init="color = getColor(app.setting.theme.primary, 400)"> <div class="font-bold text-primary" style="font-size: 20px;"> <?php echo round($totalatte); ?>% </div> </div> </div> </div> <div> <div><small>Lec : <?php echo round($resatted["Score"]); ?> % || Lab : <?php echo round($resatted2["Score"]); ?> %</small><?php if(isset($nolec)){ echo $nolec;}?></div> </div> </div> </div> <?php } elseif($rw["lec"]){ $asd = $ke; $attedq= $conn->query("Select (Count(A$student)* 100 / (Select Count(A$student) From $asd WHERE A$student=0 OR A$student=1 )) as Score From $asd WHERE A$student = 1"); $resatted = mysqli_fetch_assoc($attedq); $attesname1[] = $resatted["Score"]; $sum = $sum + $resatted["Score"]; $cnt++; ?> <div class="col-sm-4"> <div class="panel panel-card p m-b-sm"> <h5 class="no-margin m-b"><?php echo strtoupper($asd); ?></h5> <div class="text-center"> <div class="inline"> <div ui-jp="easyPieChart" ui-options="{ percent: <?php echo round($resatted["Score"]); ?>, lineWidth: 12, trackColor: '#f1f2f3', barColor: '#4caf50', scaleColor: '#fff', size: 167, lineCap: 'butt', color: '', animate: 3000, rotate: 0 }" ng-init="color = getColor(app.setting.theme.primary, 400)"> <div class="font-bold text-primary" style="font-size: 20px;"> <?php echo round($resatted["Score"]); ?>% </div> </div> </div> </div> <div> <div></div> </div> </div> </div> <?php } elseif($sw["lab"]){ $asa = $ke."lab"; $attedq= $conn->query("Select (Count(A$student)* 100 / (Select Count(A$student) From $asa WHERE A$student=0 OR A$student=1 )) as Score From $asa WHERE A$student = 1"); $resatted2 = mysqli_fetch_assoc($attedq); $attesname2[] = $resatted2["Score"]; $sum = $sum + $resatted2["Score"]; $cnt++; ?> <div class="col-sm-4"> <div class="panel panel-card p m-b-sm"> <h5 class="no-margin m-b"><?php echo strtoupper($asa); ?></h5> <div class="text-center"> <div class="inline"> <div ui-jp="easyPieChart" ui-options="{ percent: <?php echo round($resatted2["Score"]); ?>, lineWidth: 12, trackColor: '#f1f2f3', barColor: '#2196F3', scaleColor: '#fff', size: 167, lineCap: 'butt', color: '', animate: 3000, rotate: 0 }" ng-init="color = getColor(app.setting.theme.primary, 400)"> <div class="font-bold text-info" style="font-size: 20px;"> <?php echo round($resatted2["Score"]); ?>% </div> </div> </div> </div> <div> <div></div> </div> </div> </div> <?php } else{} } } $conn->close(); ?> <div class="col-sm-4"> <div class="panel panel-card p m-b-sm"> <h5 class="no-margin m-b">TOTAL </h5> <div class="text-center"> <div class="inline"> <div ui-jp="easyPieChart" ui-options="{ percent: <?php if($cnt != 0){echo round($sum/$cnt);} ?>, lineWidth: 12, trackColor: '#f1f2f3', barColor: '#2196F3', scaleColor: '#fff', size: 167, lineCap: 'butt', color: '', animate: 3000, rotate: 0 }" ng-init="color = getColor(app.setting.theme.primary, 400)"> <div class="font-bold text-primary" style="font-size: 20px;"> <?php if($cnt != 0){echo round($sum/$cnt);} ?>% </div> </div> </div> </div> <div> <div></div> </div> </div> </div> </div> <script src="../libs/jquery/jquery/dist/jquery.js"></script> <script src="../libs/jquery/bootstrap/dist/js/bootstrap.js"></script> <script src="../libs/jquery/waves/dist/waves.js"></script> <script src="scripts/ui-load.js"></script> <script src="scripts/ui-jp.config.js"></script> <script src="scripts/ui-jp.js"></script> <script src="scripts/ui-nav.js"></script> <script src="scripts/ui-toggle.js"></script> <script src="scripts/ui-form.js"></script> <script src="scripts/ui-waves.js"></script> <script src="scripts/ui-client.js"></script> </body> </html> <file_sep>/chng2.php <?php require_once 'connection.php'; $dat = $_POST['pdate']; $subn =$_POST['sname']; $rn = $_POST['rwno']; $class = $_POST['class']; $part = $_POST['part']; $xx = 0; $conn->query("UPDATE $subn SET ck = 0 WHERE date1 = '$dat'") or die("subject not found"); $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'a$class' "; $names =$conn->query($sql); $post = array(); while($row = mysqli_fetch_assoc($names)) { $x = $row['COLUMN_NAME']; $a=$conn->query("SELECT $x FROM A$class WHERE part='$part'") or die("error 1"); $c=mysqli_fetch_assoc($a); foreach ($c as $key => $m) { if($m == 1){ $post[] = $row; } } } if($post[0] == 'part'){array_splice($post, 0,1);} foreach ($post as $k) { foreach ($k as $kk) { $conn->query("UPDATE $subn SET $kk = 0 WHERE date1 = '$dat' AND ck = 0"); } } if(!empty($_POST['check_list'])) { foreach($_POST['check_list'] as $check) { $a =$check; $que = "UPDATE $subn SET A$a = 1 WHERE date1 = '$dat' AND ck=0"; $conn->query($que) or die("error for add data"); } } $conn->query("UPDATE $subn SET ck = 1 WHERE date1 = '$dat'"); $conn->close(); echo "<center><h2>Attendence Updated</h2></center>"; header( "refresh:2;url=changatte.php" ); ?><file_sep>/README.md # Attendence-System database create automatically in phpmyadmin . <br> admin : class.php<br> faculty : faculty1.php<br> student : student1111.php<br> <file_sep>/getsubname.php <?php require_once 'connection.php'; $classname = "class1"; $sql1="SELECT subname1 from $classname"; $subname =$conn->query($sql1); $sname = array(); $saname = array(); $saname1 = array(); while($row = mysql_fetch_assoc($subname)) { $sname[] = $row; $b = $row['subname1']; $a = "SELECT lec FROM $classname WHERE subname1 = '$b'"; $rs = $conn->query($a); $res = mysqli_fetch_assoc($rs); if($res == 1){$saname = $key ."lec";} $a = "SELECT lab FROM $classname WHERE subname1 = '$b'"; $es = $conn->query($a); $ees = mysqli_fetch_assoc($es); if($ees == 1){$saname1 = $key ."lab";} } $conn->close(); ?><file_sep>/fpdf/tuto5.php <?php $class = $_POST['cname']; $limit = $_POST['limit']; require('fpdf.php'); require_once 'connection.php'; class PDF extends FPDF { function addt($qt,$w){ $this->SetFillColor(255,0,0); $this->SetDrawColor(128,0,0); $this->Cell($w,5,$qt,1,0,'C',true); } function linebreak(){ $this->Ln(); } } $pdf = new FPDF(); $pdf->SetFont('Arial','',6); $pdf->SetFont('','B'); $pdf->SetFillColor(255); $pdf->AddPage(); $todaydate= 'Date : '.date("d / m / Y"); $pdf->Cell(40,5,$todaydate,0); $pdf->Ln(); $pdf->Ln(); $pdf->Cell(20,5,' ',1,0,'L'); $ac=$conn->query("SELECT * from subnames where class='$class'"); $ar = array(); $l=0; while ($qv = mysqli_fetch_assoc($ac)) { $ar[$l][0]=$qv['name']; $ar[$l][1]=$qv['lec']; $ar[$l][2]=$qv['lab']; $l++; $qt=$qv['name']; $pdf->Cell(27,5,$qt,1,0,'L'); } $pdf->Cell(15,5,'Total(%)',1,0,'R'); $pdf->Ln(); $pdf->Cell(20,5,'#',1,0,'L'); for($g=0;$g<$l;$g++)//second line { $pdf->Cell(10,5,'lec',1,0,'L'); $pdf->Cell(10,5,'lab',1,0,'L'); $pdf->Cell(7,5,'%',1,0,'L'); } $pdf->Cell(15,5,' ',1,0,'R'); $pdf->Ln(); $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'a$class' "; $names =$conn->query($sql); $post = array(); while($row = mysqli_fetch_assoc($names)) { $post[]=$row['COLUMN_NAME']; } array_splice($post, 0,1); $pdf->SetFont(''); foreach ($post as $key => $value) { $data = array(); $data[0][0] = substr($value,1); $gplec =0; $gtlec =0; for($g=0;$g<$l;$g++) { if($ar[$g][1] == 1)//lec { $df =$ar[$g][0]; $ct=$conn->query("Select Count($value) as a From $df WHERE $value=0 OR $value=1"); $ctt=mysqli_fetch_assoc($ct); $ht=$conn->query("Select Count($value) as a From $df WHERE $value=1"); $htt=mysqli_fetch_assoc($ht); $at = $htt['a']." / ".$ctt['a']; $data[$g+1][0]= $at; $plec = $htt['a']; $tlec = $ctt['a']; $gplec += $plec; $gtlec += $tlec; }else{ $data[$g+1][0]='-'; $plec = 0; $tlec = 0;} if($ar[$g][2] == 1)//lab { $df =$ar[$g][0]."lab"; $ch=$conn->query("Select Count($value) as a From $df WHERE $value=0 OR $value=1"); $chh=mysqli_fetch_assoc($ch); $hh=$conn->query("Select Count($value) as a From $df WHERE $value=1"); $hhh=mysqli_fetch_assoc($hh); $at = $hhh['a']." / ".$chh['a']; $data[$g+1][1]=$at; $plec = $hhh['a']; $tlec = $chh['a']; $gplec += $plec; $gtlec += $tlec; }else{ $data[$g+1][1] ='-'; $pplec = 0; $ttlec = 0;} // % if($tlec !=0 || $ttlec !=0){ $satte =(($plec+$pplec)*100)/($tlec+$ttlec);} else{ $satte=0;} $data[$g+1][2]=round($satte); } if($gtlec !=0){ $atte =($gplec*100)/($gtlec); }else{$atte=0;} $data[$l+1][0]=round($atte); if($data[$l+1][0] < $limit){ //check it less than limit $pdf->Cell(20,5,$data[0][0],1,0,'L',true); for($qq=1; $qq<=$l;$qq++){ $pdf->Cell(10,5,$data[$qq][0],1,0,'L',true); $pdf->Cell(10,5,$data[$qq][1],1,0,'L',true); $pdf->Cell(7,5,$data[$qq][2],1,0,'L',true); } $pdf->SetFillColor(240,240,245); $pdf->Cell(15,5,$data[$l+1][0],1,0,'R',true); $pdf->SetFillColor(255); $pdf->Ln(); } } $pdf->AddPage(); $pdf->output(); ?> <file_sep>/anglr/classdelete.php <?php require_once 'connection.php'; // Delete record by id. $postdata = file_get_contents("php://input"); if(isset($postdata) && !empty($postdata)) { $request = json_decode($postdata); $id = $request->recordId;//class name $sql = "DELETE FROM `nameclass` WHERE `name` = '$id' LIMIT 1"; mysqli_query($conn,$sql); $conn->query("DROP TABLE a$id"); $df=$conn->query("SELECT * FROM subnames WHERE class='$id'"); while($row=mysqli_fetch_assoc($df)) { $temp=$row['name']; if($row['lec']==1){$conn->query("DROP TABLE $temp");} if($row['lab']==1){$temp = $temp."lab"; $conn->query("DROP TABLE $temp");} $conn->query("DELETE FROM `subnames` WHERE `class` = '$id' LIMIT 1"); } } ?><file_sep>/faculty1.php <!-- <?php session_start(); $fid = $_SESSION['user_id']; ?> --> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <script src="js/jquery-1.8.3.js"></script> <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jqueryui/1.8.14/jquery-ui.min.js"></script> <link rel="stylesheet" type="text/css" media="screen" href="http://ajax.googleapis.com/ajax/libs/jqueryui/1.8.14/themes/base/jquery-ui.css"> <link rel="stylesheet" href="bootstrap.min.css"> <script src="bootstrap/js/bootstrap.js"></script> <script src="js/my.js"> </script> <style type="text/css"> body{} .setrelative{ position: relative; } .inline{ display: inline-block; } .blk{ height: 50px; border: 1px solid #000; position: relative; margin: 2.5px; padding: 0 10px; width: 100%; } .blkin{ height: 20px; position: absolute; top: 50%; width: 100%; margin-top: -10px; text-align: center; word-wrap: break-word; font-size: 15px; letter-spacing: 1px; margin-left: -10px; cursor:default; } .sblk{ height: 50px; border: 1px solid #000; margin: 2.5px; padding: 0 10px; font-size: 15px; width: 100%; font-weight: bold; letter-spacing: 1px; position: relative; } .hd{ font-size: 15px; font-weight: bold; } .t_submit{position: absolute;top: 0px; left:0px; right:0px; height: 100% !important; width: 100% !important; opacity: 0; border: none; background: #ff0; } .btn-none{background: none ; border: none; font-weight: normal;} .blk:hover{ background-color: #4d88ff; color: #fff; } .n-hover{ height: 60px; } .n-hover:hover{ background: #FFF; color: #000; } </style> </head> <body> <center> <div class="week-picker"></div></center> <br> <div class="setrelative" id="ttable" style="opacity: 0; visibility: hidden;"> <center> <div class="container row"> <div class="col-md-1" style="padding: 2px;"> <div class="sblk n-hover">#</div> <div class="sblk" > <div class="blkin">1</div> </div> <div class="sblk" > <div class="blkin">2</div> </div> <br> <div class="sblk" > <div class="blkin">3</div> </div> <div class="sblk" > <div class="blkin">4</div> </div> <br> <div class="sblk" > <div class="blkin">5</div> </div> <div class="sblk" > <div class="blkin">6</div> </div> </div> <div class="col-md-11" style="padding: 0;"> <?php require_once 'connection.php'; $ar = array("mon", "tue", "wed" , "thu", "fri", "sat"); $arf = array("Monday", "Tuesday", "Wednasday" , "Thusday", "Friday", "Saturday"); for ($j=0; $j < 6 ; $j++) { $x= $ar[$j]; ?> <div class="col-sm-2" style="padding: 2px;"> <?php $i = 1; ?> <div class="hd blk n-hover"><?php echo $arf[$j]; ?><br> <input type="button" <?php if($j !=5){ echo 'id="startDate'; echo $j+1; echo '"';}else{ echo 'id="endDate"';} ?> class="btn-none" disabled> </div> <?php while($i != 7){ $rowno = $i; $sql = $conn->query("SELECT $x from c0111 where id = $i"); $row = mysqli_fetch_assoc($sql); $s = $row[$x]; $rt = array_pad(explode('-', $s, 3), 3, null); $mn1 = $rt[0].'-'.$rt[1]; // display $passname = array_pad(explode('[', $mn1, 2), 2, null); $mn = $passname[0]; $pn = $passname[1]; //check lec or lab $ss = $rt[2]; //class name ?> <div class="blk" <?php if (strpos($pn, 'LAB') !== false) { echo 'style="height:100px; margin: 3.8px 2.5px;"'; $i++; } ?> > <div class="blkin"><?php if($s != ""){echo $mn1;} ?></div> <?php if($s != ""){ ?> <form action="faculty.php" method="post" > <input type="hidden" name="sname" value="<?php echo $mn; if (strpos($pn, 'LAB') !== false) { echo 'lab';} ?>"> <input type="hidden" name="pdate" <?php if($i !=6){ echo 'class="startDate'; echo $j+1; echo '"';}else{ echo 'class="endDate"';} ?>> <input type="hidden" name="rwno" value="<?php echo $rowno; ?>" > <input type="hidden" name="cname" value="<?php echo $ss; ?>"> <input type="submit" name="" class="t_submit" > </form> <?php } ?> </div> <?php if($i ==2 || $i==4){echo '<br>';} ?> <?php $i++; } ?> </div> <?php } ?> </div> </div> </center> </div> <br> <br> </body> </html> <file_sep>/anglr/post.php <?php file_get_contents("php://input"); require_once 'connection.php'; $postdata = file_get_contents("php://input"); $request = json_decode($postdata); $newName = $request->newName; $people = array(); $sql = "SELECT part as name FROM a$newName"; $result = $conn->query($sql); $count = mysqli_num_rows($result); $cr = 0; while($row = mysqli_fetch_assoc($result)) { $div[$cr]['name'] = $row['name']; $cr++; } $json = json_encode($div); echo $json; exit; ?><file_sep>/faculty.php <!DOCTYPE html> <html> <head> <link rel="stylesheet" href="bootstrap.min.css"> <script src="js/jquery-1.8.3.js"></script> <script src="bootstrap/js/bootstrap.js"></script> </head> <body > <?php require_once 'connection.php'; $d = $_POST['pdate']; $s = strtolower($_POST['sname']); $r = $_POST['rwno']; $c = strtolower($_POST['cname']); $cc = $c; $rt = explode('-', $s); if(isset($rt[0])){$fpar = $rt[0]; $fpart = $fpar;} if(isset($rt[1])){$ss = $rt[1]; $s = $ss;} $conn->query("CREATE TABLE IF NOT EXISTS attecheck ( date1 date,lecno int(3),sname varchar(20))"); $check = $conn->query("SELECT lecno FROM attecheck WHERE date1='$d' AND sname='$s'"); $rw = mysqli_fetch_assoc($check); if($rw['lecno'] == $r){ echo "you already filled attendence"; header( "refresh:3;url=faculty1.php" ); } else{ ?> <?php $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'A$c' "; $names =$conn->query($sql) or die("error 1"); $post = array(); while($row = mysqli_fetch_assoc($names)) { $x = $row['COLUMN_NAME']; if(isset($ss)){ $a=$conn->query("SELECT $x FROM A$cc WHERE part='$fpart'") or die("error 2"); $c=mysqli_fetch_assoc($a); foreach ($c as $key => $m) { if($m == 1){ $post[] = $row; } } } else{ $post[] = $row; } } if(!isset($ss)){ array_splice($post, 0,1);} ?> <center> <form action="setstudents.php" method="post"> <input type="hidden" name="pdate" value="<?php echo $d; ?>"> <input type="hidden" name="sname" value="<?php echo $s; ?>"> <input type="hidden" name="rwno" value="<?php echo $r; ?>"> <input type="hidden" name="class" value="<?php echo $cc; ?>"> <input type="hidden" name="part" value="<?php echo $fpart; ?>"> <?php $a = 0; ?> <div class="container"> <?php foreach($post as $qw): ?> <?php foreach($qw as $key): ?> <div class="col-md-4" style="margin-bottom: 10px;"> <div class="input-group"> <span class="input-group-addon"> <input type="checkbox" id="yourBox<?php echo $a; ?>" checked> </span> <input type="text" name="check_list[]" class="form-control" id="yourText<?php echo $a; ?>" value="<?php echo substr($key,1); ?>" ><br> </div> </div> <script type="text/javascript"> document.getElementById('yourBox<?php echo $a; ?>').onchange = function() { document.getElementById('yourText<?php echo $a++; ?>').disabled = !this.checked; }; </script> <?php endforeach; ?> <?php endforeach; ?> </div> <br><input type="submit" value="Submit" class="btn btn-primary"> </form> </center> <?php } $conn->close(); ?> <script type="text/javascript" src="./jquery/jquery-1.8.3.min.js" charset="UTF-8"></script> <script type="text/javascript" src="./bootstrap/js/bootstrap.min.js"></script> <script type="text/javascript" src="./js/bootstrap-datetimepicker.js" charset="UTF-8"></script> <script type="text/javascript" src="./js/locales/bootstrap-datetimepicker.es.js" charset="UTF-8"></script> <script type="text/javascript"> $('.form_date').datetimepicker({ language: 'fr', format: 'yyyy-mm-dd', weekStart: 1, todayBtn: 1, autoclose: 1, todayHighlight: 1, startView: 2, minView: 2, forceParse: 0 }); </script> </body> </html> <file_sep>/division.php <!DOCTYPE html> <html> <head> <link rel="stylesheet" href="bootstrap.min.css"> <script src="js/jquery-1.8.3.js"></script> <script src="bootstrap/js/bootstrap.js"></script> <script src="js/angular.min.js"></script> <script src="valid.js"></script> <title>DIVISION</title> </head> <body ng-app="ngPatternExample"> <nav class="navbar navbar-default"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="#">After320</a> </div> <ul class="nav navbar-nav"> <li ><a href="class.php">ClASS</a></li> <li class="active"><a href="division.php">DIVISION</a></li> <li><a href="batch.php">BATCH</a></li> <li><a href="addsub.php">SUBJECT</a></li> <li><a href="table.php">FACULTY TABLE</a></li> </ul> </div> </nav> <?php if(!isset($_POST['submit1'])) { ?> <?php require_once 'classnames.php'; ?> <center> <form action="division.php" method="post" style="width: 300px;"> <select name='cname' class="form-control" required><?php while ($qv = mysqli_fetch_assoc($ac)) { ?> <option><?php echo $qv['name']; ?></option> <?php } ?> </select> <br> <input type="submit" name="submit1" value="submit" class="btn btn-primary"> </form> </center> <?php } ?> <?php if(isset($_POST['submit1'])){ ?> <center> <form action="pstore.php" method="post" ng-controller="ExampleController" > <?php $s=$_POST['cname']; ?> <input type="hidden" name="cname" value="<?php echo $s; ?>"> <?php require_once 'connection.php'; $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'a$s'"; $names =$conn->query($sql); $post = array(); while($row = mysqli_fetch_assoc($names)) { $post[] = $row; } array_splice($post, 0, 1); ?> <h2>DIVISION</h2> <input type="text" name="parta" placeholder="division_name" class="form-control" style="width: 300px;" ng-model="model" id="input" my-directive required> * if division exists then it auto update <br><br> <!--student display--> <?php $a = 0; ?> <div class="container"> <?php foreach($post as $qw): ?> <?php foreach($qw as $key): ?> <div class="col-md-4" style="margin-bottom: 10px;"> <div class="input-group"> <span class="input-group-addon"> <input type="checkbox" id="yourBox<?php echo $a; ?>" checked> </span> <input type="text" name="check_list[]" class="form-control" id="yourText<?php echo $a; ?>" value="<?php echo substr($key,1); ?>" ><br> </div> </div> <script type="text/javascript"> document.getElementById('yourBox<?php echo $a; ?>').onchange = function() { document.getElementById('yourText<?php echo $a++; ?>').disabled = !this.checked; }; </script> <?php endforeach; ?> <?php endforeach; ?> </div> <input type="submit" name="submit222" value="submit" class="btn btn-primary"> </form> </center> <?php } ?> </body> </html><file_sep>/anglr/classget.php <?php require_once 'connection.php'; // Get the data $people = array(); $sql = "SELECT name FROM nameclass"; if($result = mysqli_query($conn,$sql)) { $count = mysqli_num_rows($result); $cr = 0; while($row = mysqli_fetch_assoc($result)) { $people[$cr]['name'] = $row['name']; $cr++; } } $json = json_encode($people); echo $json; exit; ?><file_sep>/anglr/post1.php <?php file_get_contents("php://input"); require_once 'connection.php'; $postdata = file_get_contents("php://input"); $request = json_decode($postdata); $newName = $request->newName; $lol = $request->lol; if($lol ==1){ $sql = "SELECT name FROM subnames where class='$newName' AND lec=1"; } if($lol ==2){ $sql = "SELECT name FROM subnames where class='$newName' AND lab=1"; } $people = array(); $result = $conn->query($sql); $count = mysqli_num_rows($result); $cr = 0; while($row = mysqli_fetch_assoc($result)) { $sub[$cr]['sub'] = $row['name']; $cr++; } $json = json_encode($sub); echo $json; exit; ?><file_sep>/pstore.php <?php require_once 'connection.php'; $class=$_POST['cname']; $part=strtolower($_POST['parta']); $ck = $conn->query("SELECT part from a$class where part = '$part'"); if(mysqli_num_rows($ck) != 1){ $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'a$class'"; $names =$conn->query($sql); $post = array(); while($row = mysqli_fetch_assoc($names)) { $post[] = $row; } array_splice($post, 0, 1); foreach ($post as $k) { foreach ($k as $e) { $conn->query("update a$class set $e = 0 where part = '$part'"); } } } $conn->query("INSERT INTO a$class (part) VALUES ('$part')");// optional foreach($_POST['check_list'] as $check) { $a =$check; $que = "UPDATE A$class SET A$a = 1 WHERE part = '$part'"; $conn->query($que) or die("error for add data"); } echo "DIVISION SUCCESSFUL"; $conn->close(); header( "refresh:2;url=division.php" ); ?><file_sep>/report.php <html> <head> <script src="https://code.jquery.com/jquery-2.1.1.js"></script> <link rel="stylesheet" href="bootstrap.min.css"> <script src="bootstrap/js/bootstrap.js"></script> <link rel="stylesheet" type="text/css" href="rangeslider/rangeslider.css"> <script type="text/javascript" src="rangeslider/rangeslider.js"></script> <style> body{ padding: 30px; } </style> </head> <body> <?php require_once 'connection.php'; $ac =$conn->query("SELECT * FROM nameclass");?> <form action="fpdf/tuto5.php" method="post"> <select name="cname" class="form-control" style="width:200px;"> <?php while($row=mysqli_fetch_assoc($ac)){ echo '<option value="'.$row['name'].'">'.$row['name'].'</option>'; }?> </select> <br> <label>Limit Value</label> <input type="text" style="width:200px;" value="75" name="limit" class="form-control" required> <br> <input type="submit" class="btn btn-success"> </body> </html><file_sep>/anglr/connection.php <?php $servername = "localhost"; $username = "root"; $password = ""; $conn = new mysqli($servername, $username, $password) or die($conn->connect_error); $sql = "CREATE DATABASE IF NOT EXISTS Attendence1"; $conn->query($sql) or die("databease not created"); mysqli_select_db($conn,"Attendence1"); ?><file_sep>/valid.js angular.module('ngPatternExample', []) .controller('ExampleController', ['$scope', function($scope) { $scope.regex = /^[^`~!@#$%\^&*()_+={}|[\]\\:';"<>?,./1-9]*$/; }]) .directive('myDirective', function() { function link(scope, elem, attrs, ngModel) { ngModel.$parsers.push(function(viewValue) { var reg = /^[^`~!@#$%\^&*()_+={}|[\]\\:';"<>?,./-]*$/; // if view values matches regexp, update model value if (viewValue.match(reg)) { return viewValue; } // keep the model value as it is var transformedValue = ngModel.$modelValue; ngModel.$setViewValue(transformedValue); ngModel.$render(); return transformedValue; }); } return { restrict: 'A', require: 'ngModel', link: link }; }); <file_sep>/table.php <!DOCTYPE html> <html ng-app="ajaxExample"> <head> <title></title> <link rel="stylesheet" href="bootstrap.min.css"> <script src="js/jquery-1.8.3.js"></script> <script src="bootstrap/js/bootstrap.js"></script> <script src="js/angular.min.js"></script> <style type="text/css"> .inline{ display: inline-block; } .a{ margin: 0 3px; background: #f1f1f1; padding: 6px 10px; border: none; } .main > table { z-index: 0; } td , th { padding: 10px 10px; } .intin{position: absolute; background: #FFF; box-shadow: 0 0 3px 1px #ccc; z-index: 2; visibility: hidden; padding: 10px 20px; transition: all ease-out .1s; } .intin:after { bottom: 100%; left: 40%; border: solid transparent; content: " "; height: 0; width: 0; position: absolute; pointer-events: none; border-color: rgba(255, 255, 255, 0); border-bottom-color: #ffffff; border-width: 10px; margin-left: -10px; } .intin > .icon-close{ position: absolute; right: 0px; top: 0px; height: 20px; width: 20px; background : #FF408C; } .op{opacity: 0;} .csel, .dsel{ width: 150px; margin-bottom: 10px; } .active{ background: #19A3FF;color: #FFF; } .inner-addon { position: relative; width: 200px; } /* style icon */ .inner-addon .glyphicon { position: absolute; padding-top: 12px; padding-right: 10px; cursor:default; } /* align icon */ .left-addon .glyphicon { left: 0px;} .right-addon .glyphicon { right: 0px;} /* add padding */ .left-addon input { padding-left: 30px; } .right-addon input { padding-right: 30px; } .int{ box-shadow: none; border: none; padding: 20px 20px; } </style> </head> <body ng-controller="mainController" > <nav class="navbar navbar-default"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="#">After320</a> </div> <ul class="nav navbar-nav"> <li><a href="class.php">ClASS</a></li> <li><a href="division.php">DIVISION</a></li> <li><a href="batch.php">BATCH</a></li> <li><a href="addsub.php">SUBJECT</a></li> <li class="active"><a href="table.php">FACULTY TABLE</a></li> </ul> </div> </nav> <div class="main"> <form action="#" id="tform"> <center><h2>falculty id from database [ffid]</h2> <table border="" > <tbody> <tr> <th><center>#</center></th> <th><center>Monday</center></th> <th><center>Tuesday</center></th> <th><center>Wednasday</center></th> <th><center>Thusday</center></th> <th><center>Friday</center></th> <th><center>Saturday</center></th> </tr> <?php $row =1; while($row != 7){ ?> <tr> <td><center><?php echo $row;?></center></td> <?php for ($i=1; $i <=6 ; $i++) { ?> <td> <center> <div class="par<?php echo $row; echo $i; ?>"> <div class="inner-addon right-addon"> <i class="glyphicon glyphicon-remove"></i> <input type="text" id="input" class="int form-control" readonly> </div> <input type="hidden" id="input" class="int2" name="blk<?php echo $row; echo $i; ?>" value=""> <div class="intin op"> <img src="close.png" class="icon-close"> <center> <div class="btn2"> <input type="button" value="LEC" class="a inline" ng-click="count = 1" > <input type="button" value="LAB" class="a inline" ng-click="count = 2" > </div> <br> <select class="csel form-control inline" ng-change="addPerson()" ng-model="newName"> <option value="" selected>Select Class</option> <?php include 'classnames.php'; while ($qv = mysqli_fetch_assoc($ac)) { ?> <option value="<?php echo $qv['name']; ?>"><?php echo $qv['name']; ?></option> <?php } ?> </select> <select class="dsel form-control inline" > <option value="" selected="true">divison / batch</option> <option ng-repeat="p in div" value="{{p.name}}">{{p.name}}</option> </select> <br> <select class="ssel form-control " > <option value="" selected="true">Select Subject</option> <option ng-repeat="s in subject" value="{{s.sub}}">{{s.sub}}</option> </select> <br> <input type="button" class="kbtn btn btn-success" value="Submit" ng-click="clear()"> </center> </div> </div> </center> </td> <?php } ?> </tr> <?php $row++; } ?> </tbody> </table> <br> <br> <input type="submit" class="btn btn-success" style="padding: 10px 40px; font-size: 20px;" id="submit16" value="Submit Table"> <br> <br> <span class="wrn"><strong>*</strong> if table exists then it auto update to this new table</span> </center> </form> </div> <script> $(document).ready(function(){ $('#submit16').click(function() { $.ajax({ type:"post", url:"tablestore.php", data: $("#tform").serialize(), success: function(response){ alert("successful"); } }); return false; }); }); </script> <script> $(document).ready(function () { var $ida = ''; var l =""; var cls = ""; var div = ""; var sub = ""; $('.int').click(function(e) { var $parent = $(this).parent(); $ida = $($parent).parent(); $($ida).find('.intin').css({"opacity": "1"}); $($ida).find('.intin').css({"visibility": "visible"}); $( $ida ).find( "select" ).prop('disabled', true); $( $ida ).find( ".kbtn" ).prop('disabled', true); $('.main div .int').css("pointer-events","none"); $($ida).find('.btn2 input').removeClass('active'); }); $('.btn2 input').click(function(e) { $('.btn2 input').removeClass('active'); var $parent = $(this).parent(); $(this).addClass('active'); var aa =$(this).val(); if(aa == 'LEC'){ l =""; } if(aa == 'LAB'){ l =aa; } $( $ida ).find( ".csel" ).prop('disabled', false); e.preventDefault(); }); $('.csel').click( function() { var val = $(this).val(); if(val != ""){ cls = val; $( $ida ).find( ".dsel" ).prop('disabled', false); } }); $('.dsel').click( function() { var val2 = $(this).val(); if(val2 != ""){ div= val2; $( $ida ).find( ".ssel" ).prop('disabled', false); } }); $('.ssel').click( function() { var val3 = $(this).val(); if(val3 != ""){ sub = val3; $( $ida ).find( ".kbtn" ).prop('disabled', false); } }); $('.kbtn').click(function(){ $($ida).find('.intin').css({"opacity": "0"}); $('.main div .int').css("pointer-events","auto"); $($ida).find('.intin').css({"visibility": "hidden"}); $($ida).find('.int2').val(div+"-"+sub+"["+l+"]-"+cls); $($ida).find('.int').val(div+"-"+sub+"["+l+"]"); $( $ida ).find( ".csel" ).val(""); $ida = ''; cls = ""; div = ""; sub = ""; l =""; }); $('.icon-close').click(function(e){ $($ida).find('.intin').css({"opacity": "0"}); $('.main div .int').css("pointer-events","auto"); $($ida).find('.intin').css({"visibility": "hidden"}); $(rw).attr("rowspan","1"); $ida = ''; cls = ""; div = ""; sub = ""; l =""; }); $('.glyphicon').click(function(e){ var parent = $(this).parent(); $(parent).find('.int').val(""); var parent2 = $(parent).parent(); $(parent2).find('.int2').val(""); var parent3 = $(parent2).parent(); var parent4 = $(parent3).parent(); $(parent4).attr("rowspan","1"); }); }); </script> <script type="text/javascript"> var ajaxExample = angular.module('ajaxExample', []); ajaxExample.controller('mainController',function($scope,$http){ $scope.people; $scope.addPerson = function() { $http({ method: 'POST', url: 'anglr/post.php', data: {newName: $scope.newName} }).then(function (response) {// on success $scope.div = response.data; }); $http({ method: 'POST', url: 'anglr/post1.php', data: {newName: $scope.newName,lol : $scope.count} }).then(function (response) {// on success $scope.subject = response.data; }); }; $scope.clear = function(){ $scope.newName = ""; $scope.count = 0; }; }); </script> </body> </html><file_sep>/tablestore.php <?php include 'connection.php'; $ffid = 'c0111'; $blk11 = $_POST['blk11']; $blk12 = $_POST['blk12']; $blk13 = $_POST['blk13']; $blk14 = $_POST['blk14']; $blk15 = $_POST['blk15']; $blk16 = $_POST['blk16']; $blk21 = $_POST['blk21']; $blk22 = $_POST['blk22']; $blk23 = $_POST['blk23']; $blk24 = $_POST['blk24']; $blk25 = $_POST['blk25']; $blk26 = $_POST['blk26']; $blk31 = $_POST['blk31']; $blk32 = $_POST['blk32']; $blk33 = $_POST['blk33']; $blk34 = $_POST['blk34']; $blk35 = $_POST['blk35']; $blk36 = $_POST['blk36']; $blk41 = $_POST['blk41']; $blk42 = $_POST['blk42']; $blk43 = $_POST['blk43']; $blk44 = $_POST['blk44']; $blk45 = $_POST['blk45']; $blk46 = $_POST['blk46']; $blk51 = $_POST['blk51']; $blk52 = $_POST['blk52']; $blk53 = $_POST['blk53']; $blk54 = $_POST['blk54']; $blk55 = $_POST['blk55']; $blk56 = $_POST['blk56']; $blk61 = $_POST['blk61']; $blk62 = $_POST['blk62']; $blk63 = $_POST['blk63']; $blk64 = $_POST['blk64']; $blk65 = $_POST['blk65']; $blk66 = $_POST['blk66']; $conn->query("DROP TABLE $ffid"); $conn->query("CREATE TABLE IF NOT EXISTS $ffid (id int(2),mon varchar(32) NOT NULL,tue varchar(32) NOT NULL,wed varchar(32) NOT NULL,thu varchar(32) NOT NULL,fri varchar(32) NOT NULL,sat varchar(32) NOT NULL)"); $a = $conn->query("INSERT INTO $ffid(id,mon,tue,wed,thu,fri,sat) VALUES (1,'$blk11','$blk12','$blk13','$blk14','$blk15','$blk16')") or die("error 1"); $b = $conn->query("INSERT INTO $ffid(id,mon,tue,wed,thu,fri,sat) VALUES (2,'$blk21','$blk22','$blk23','$blk24','$blk25','$blk26')") or die("error 2"); $c = $conn->query("INSERT INTO $ffid(id,mon,tue,wed,thu,fri,sat) VALUES (3,'$blk31','$blk32','$blk33','$blk34','$blk35','$blk36')") or die("error 3"); $d = $conn->query("INSERT INTO $ffid(id,mon,tue,wed,thu,fri,sat) VALUES (4,'$blk41','$blk42','$blk43','$blk44','$blk45','$blk46')") or die("error 4"); $e = $conn->query("INSERT INTO $ffid(id,mon,tue,wed,thu,fri,sat) VALUES (5,'$blk51','$blk52','$blk53','$blk54','$blk55','$blk56')") or die("error 5"); $d = $conn->query("INSERT INTO $ffid(id,mon,tue,wed,thu,fri,sat) VALUES (6,'$blk61','$blk62','$blk63','$blk64','$blk65','$blk66')") or die("error 6"); ?><file_sep>/class.php <!DOCTYPE html> <html> <head> <link rel="stylesheet" href="bootstrap.min.css"> <script src="js/jquery-1.8.3.js"></script> <script src="bootstrap/js/bootstrap.js"></script> <script src="js/angular.min.js"></script> <script src="valid.js"></script> <title></title> <style type="text/css"> .inline{ display: inline-block; } .wrn{ color: #fff; } #contact-form{ padding: 20px 20px; } </style> </head> <body ng-app='myApp' > <nav class="navbar navbar-default"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="#">After320</a> </div> <ul class="nav navbar-nav"> <li class="active"><a href="class.php">ClASS</a></li> <li><a href="division.php">DIVISION</a></li> <li><a href="batch.php">BATCH</a></li> <li><a href="addsub.php">SUBJECT</a></li> <li><a href="table.php">FACULTY TABLE</a></li> </ul> </div> </nav> <center><h2>ADD CLASSES</h2></center> <form action="#" method="post" id="contact-form" > <center> <span class="wrn"><strong>*</strong>only Numberic and Alphbates</span> <br> <br> <label>CLASS NAME</label> <input list="browsers" name="classname" class="form-control" style="width: 300px;" placeholder="ex. 5comp , 3civil , 2mech" required> <datalist id="browsers"> <?php require_once 'classnames.php'; ?> <?php while ($qv = mysqli_fetch_assoc($ac)) { ?> <option><?php echo $qv['name']; ?></option> <?php } ?> </datalist> <br> <div > <label>COMMON PATTEN</label> <input type="text" class="form-control" style="width: 300px; " ng-model="ptn" placeholder="eg. 15ce , 17me , 14cl"> <br> <label>HOW MANY STUDENTS </label> <input type="text" class="form-control" style="width: 300px; " ng-model="x" placeholder="ex. 50 , 60"> <br> <div class="container"> <div class="col-md-4" style="margin-bottom: 10px;" ng-repeat="n in [] | range:x"> <div class="input-group"> <span class="input-group-addon"> <input type="checkbox" ng-model="aa" ng-init="aa=true" > </span> <input type="text" class="form-control" name="check_list[]" ng-disabled="!aa" value="{{ptn+($index+1 | a)}}" > </div> </div> </div> </div> <input type="submit" value="Submit" class="btn btn-success" style="padding: 8px 30px; font-size: 17px;" id="submit-button"> </center> </form> <br> <br> <div ng-app="ajaxExample" ng-controller="mainController"> <center> <h4>Already Added Classes</h4> <table class="table" style="width:400px;"> <tr ng-repeat="per in p"> <td><center>{{ per.name }}</center></td> <td><center><button ng-click="delete( per.name )" style="border: none; background: #4C4CB8; color: #fff; border-radius: 4px; padding: 5px 10px;">Delete</button></center></td> </tr> </table> </center> </div> <script> var myApp = angular.module('myApp', []); myApp.filter('range', function() { return function(input, total) { total = parseInt(total); for (var i=0; i<total; i++) input.push(i); return input; }; }); myApp.filter('a', function() { return function(inp) { if(inp <10 ) return "0"+inp; return inp; }; }); myApp.controller('mainController',function($scope,$http){ $scope.people; $scope.get = function() { $http({ method: 'GET', url: 'anglr/classget.php' }).then(function (response) { // on success $scope.p = response.data; }); }; $scope.delete = function( id ) { $http({ method: 'POST', url: 'anglr/classdelete.php', data: { recordId : id } }).then(function (response) { $scope.get(); }); }; $scope.get(); }); </script> <script> $(document).ready(function(){ $('#submit-button').click(function() { $.ajax({ type:"post", url:"dbconnect.php", data: $("#contact-form").serialize(), success: function(response){ alert("successful"); } }); return false; }); }); </script> </html><file_sep>/dbconnect.php <?php require_once 'connection.php'; $class = preg_replace('/[^a-zA-Z0-9]/','',$_POST['classname']); $conn -> query("CREATE TABLE IF NOT EXISTS nameclass ( name varchar(32) PRIMARY KEY)") or die("create erroe") or die("error 1"); $ck =$conn-> query("INSERT INTO nameclass (name) VALUES ('$class')") or die("insert error"); $conn -> query("CREATE TABLE IF NOT EXISTS a$class(part varchar(20) PRIMARY KEY)") or die("class table error"); if(!empty($_POST['check_list'])) { foreach($_POST['check_list'] as $check) { $conn->query("ALTER TABLE A$class ADD A$check int( 2 )") or die("error 4"); } } $conn->query("INSERT INTO a$class(part) VALUES ('$class')"); $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'a$class'"; $names =$conn->query($sql); $post = array(); while($row = mysqli_fetch_assoc($names)) { $post[] = $row; } array_splice($post, 0, 1); foreach ($post as $k) { foreach ($k as $e) { $conn->query("update a$class set $e = 1 where part = '$class'"); } } $conn->close(); echo "SUCCESSFUL"; ?><file_sep>/addsub.php <!DOCTYPE html> <html> <head> <link rel="stylesheet" href="bootstrap.min.css"> <script src="js/jquery-1.8.3.js"></script> <script src="js/angular.min.js"></script> <script src="bootstrap/js/bootstrap.js"></script> <script > angular.module('ngPatternExample', []) .controller('ExampleController', ['$scope', function($scope) { $scope.regex = /^[^`~!@#$%\^&*()_+={}|[\]\\:';"<>?,./0-9]*$/; }]) .directive('myDirective', function() { function link(scope, elem, attrs, ngModel) { ngModel.$parsers.push(function(viewValue) { var reg = /^[^`~!@#$%\^&*()_+={}|[\]\\:';"<>?,./0-9]*$/; // if view values matches regexp, update model value if (viewValue.match(reg)) { return viewValue; } // keep the model value as it is var transformedValue = ngModel.$modelValue; ngModel.$setViewValue(transformedValue); ngModel.$render(); return transformedValue; }); } return { restrict: 'A', require: 'ngModel', link: link }; }); </script> <style type="text/css"> table > tr,td,th{ padding: 10px 10px; height: 50px; } .x{ width: 80%; } </style> <title>ADD_SUBJECT</title> </head> <body ng-app="ngPatternExample"> <nav class="navbar navbar-default"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="#">After320</a> </div> <ul class="nav navbar-nav"> <li ><a href="class.php">ClASS</a></li> <li><a href="division.php">DIVISION</a></li> <li><a href="batch.php">BATCH</a></li> <li class="active"><a href="addsub.php">SUBJECT</a></li> <li><a href="table.php">FACULTY TABLE</a></li> </ul> </div> </nav> <center> <h2>ADD SUBJECTS</h2><br> <form action="addsub.php" method="post" ng-app="app" style="width: 500px;"> <label style="display: inline-block;">CLASS :</label> <select name="cname" required class="form-control" style="width: 300px; display: inline-block;"> <?php require_once 'classnames.php'; ?> <?php while ($qv = mysqli_fetch_assoc($ac)) { ?> <option><?php echo $qv['name']; ?></option> <?php } ?> </select> <br><br> <table border=""> <tr> <th><center>#</center></th> <th><center>SUBJECT NAME</center></th> <th><center>LEC</center></th> <th><center>LAB</center></th> </tr> <tr> <td><center>1</center></td> <td><center><input type="text" name="fname1" class="form-control x" ng-model="model" id="input" my-directive placeholder="subject" required></center></td> <td><center><input type="checkbox" name="lec1" value="lec"></center></td> <td><center><input type="checkbox" name="lab1" value="lab"></center></td> </tr> <tr> <td><center>2</center></td> <td><center><input type="text" name="fname2" class="form-control x" ng-model="model2" id="input" my-directive placeholder="subject" ></center></td> <td><center><input type="checkbox" name="lec2" value="lec"></center></td> <td><center><input type="checkbox" name="lab2" value="lab"></center></td> </tr> <tr> <td><center>3</center></td> <td><center><input type="text" name="fname3" class="form-control x" ng-model="model3" id="input" my-directive placeholder="subject" ></center></td> <td><center><input type="checkbox" name="lec3" value="lec"></center></td> <td><center><input type="checkbox" name="lab3" value="lab"></center></td> </tr> <tr> <td><center>4</center></td> <td><center><input type="text" name="fname4" class="form-control x" ng-model="model4" id="input" my-directive placeholder="subject" ></center></td> <td><center><input type="checkbox" name="lec4" value="lec"></center></td> <td><center><input type="checkbox" name="lab4" value="lab"></center></td> </tr> <tr> <td><center>5</center></td> <td><center><input type="text" name="fname5" class="form-control x" ng-model="model5" id="input" my-directive placeholder="subject" ></center></td> <td><center><input type="checkbox" name="lec5" value="lec"></center></td> <td><center><input type="checkbox" name="lab5" value="lab"></center></td> </tr> <tr> <td><center>6</center></td> <td><center><input type="text" name="fname6" class="form-control x" ng-model="model6" id="input" my-directive placeholder="subject" ></center></td> <td><center><input type="checkbox" name="lec6" value="lec"></center></td> <td><center><input type="checkbox" name="lab6" value="lab"></center></td> </tr> <tr> <td><center>7</center></td> <td><center><input type="text" name="fname7" class="form-control x" ng-model="model7" id="input" my-directive placeholder="subject" ></center></td> <td><center><input type="checkbox" name="lec7" value="lec"></center></td> <td><center><input type="checkbox" name="lab7" value="lab"></center></td> </tr> </table> <br> <br> <input type="submit" name="submit76" value="ADD" class="btn btn-primary" style="padding: 8px 30px;"> </center> </form> <?php require_once 'connection.php'; if(isset($_POST['submit76'])){ $cname = strtolower($_POST['cname']); $conn -> query("CREATE TABLE IF NOT EXISTS subnames (class varchar(30), name varchar(32) PRIMARY KEY,lec int(2),lab int(2))") or die("create erroe"); $lec =0 ; $lab=0; $sql="SELECT `COLUMN_NAME` FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE `TABLE_SCHEMA` = 'attendence1' AND `TABLE_NAME` = 'a$cname'"; $names =$conn->query($sql); $post = array(); while($row = mysqli_fetch_assoc($names)) { $post[] = $row; } array_splice($post, 0, 1); $data='( date1 date,ck int(2)'; foreach ($post as $post1) { foreach ($post1 as $post2) { $data=$data.', '.$post2.' int( 2 )'; } } $data=$data.')'; if($_POST['fname1'] != ""){ $a = $_POST['fname1']; if(isset($_POST['lec1'])){ $lec =1 ; $tname1 = $_POST['fname1'];} if(isset($_POST['lab1'])){ $lab =1 ; $tname2 = $_POST['fname1'] ."lab";} $conn-> query("INSERT INTO subnames (class,name,lec,lab) VALUES ('$cname','$a',$lec,$lab)") or die("insert error"); if(isset($tname1)){ $sqlt = 'CREATE TABLE '.$tname1.$data; $conn -> query($sqlt) or die($conn->connect_error); } if(isset($tname2)){ $sqlt = 'CREATE TABLE '.$tname2.$data; $conn -> query($sqlt) or die($conn->connect_error); } } //sub if($_POST['fname2'] != ""){ $a = $_POST['fname2']; $tname1 = ''; $tname2 = ''; $lec = 0; $lab = 0; if(isset($_POST['lec2'])){ $lec =1 ; $tname1 = $_POST['fname2'];} if(isset($_POST['lab2'])){ $lab =1 ; $tname2 = $_POST['fname2'] ."lab";} $conn-> query("INSERT INTO subnames (class,name,lec,lab) VALUES ('$cname','$a',$lec,$lab)") or die("insert error"); if($tname1 != ''){ $sqlt = 'CREATE TABLE '.$tname1.$data; $conn -> query($sqlt) or die($conn->connect_error); } if($tname2 != ''){ $sqlt = 'CREATE TABLE '.$tname2.$data; $conn -> query($sqlt) or die($conn->connect_error); } } //sub if($_POST['fname3'] != ""){ $a = $_POST['fname3']; $tname1 = ''; $tname2 = ''; $lec = 0; $lab = 0; if(isset($_POST['lec3'])){ $lec =1 ; $tname1 = $_POST['fname3'];} if(isset($_POST['lab3'])){ $lab =1 ; $tname2 = $_POST['fname3'] ."lab";} $conn-> query("INSERT INTO subnames (class,name,lec,lab) VALUES ('$cname','$a',$lec,$lab)") or die("insert error"); if($tname1 != ''){ $sqlt = 'CREATE TABLE '.$tname1.$data; $conn -> query($sqlt) or die($conn->connect_error); } if($tname2 != ''){ $sqlt = 'CREATE TABLE '.$tname2.$data; $conn -> query($sqlt) or die($conn->connect_error); } } //sub if($_POST['fname4'] != ""){ $a = $_POST['fname4']; $tname1 = ''; $tname2 = ''; $lec = 0; $lab = 0; if(isset($_POST['lec4'])){ $lec =1 ; $tname1 = $_POST['fname4'];} if(isset($_POST['lab4'])){ $lab =1 ; $tname2 = $_POST['fname4'] ."lab";} $conn-> query("INSERT INTO subnames (class,name,lec,lab) VALUES ('$cname','$a',$lec,$lab)") or die("insert error"); if($tname1 != ''){ $sqlt = 'CREATE TABLE '.$tname1.$data; $conn -> query($sqlt) or die($conn->connect_error); } if($tname2 != ''){ $sqlt = 'CREATE TABLE '.$tname2.$data; $conn -> query($sqlt) or die($conn->connect_error); } } //sub if($_POST['fname5'] != ""){ $a = $_POST['fname5']; $tname1 = ''; $tname2 = ''; $lec = 0; $lab = 0; if(isset($_POST['lec5'])){ $lec =1 ; $tname1 = $_POST['fname5'];} if(isset($_POST['lab5'])){ $lab =1 ; $tname2 = $_POST['fname5'] ."lab";} $conn-> query("INSERT INTO subnames (class,name,lec,lab) VALUES ('$cname','$a',$lec,$lab)") or die("insert error"); if($tname1 != ''){ $sqlt = 'CREATE TABLE '.$tname1.$data; $conn -> query($sqlt) or die($conn->connect_error); } if($tname2 != ''){ $sqlt = 'CREATE TABLE '.$tname2.$data; $conn -> query($sqlt) or die($conn->connect_error); } } //sub if($_POST['fname6'] != ""){ $a = $_POST['fname6']; $tname1 = ''; $tname2 = ''; $lec = 0; $lab = 0; if(isset($_POST['lec6'])){ $lec =1 ; $tname1 = $_POST['fname6'];} if(isset($_POST['lab6'])){ $lab =1 ; $tname2 = $_POST['fname6'] ."lab";} $conn-> query("INSERT INTO subnames (class,name,lec,lab) VALUES ('$cname','$a',$lec,$lab)") or die("insert error"); if($tname1 != ''){ $sqlt = 'CREATE TABLE '.$tname1.$data; $conn -> query($sqlt) or die($conn->connect_error); } if($tname2 != ''){ $sqlt = 'CREATE TABLE '.$tname2.$data; $conn -> query($sqlt) or die($conn->connect_error); } } //sub if($_POST['fname7'] != ""){ $a = $_POST['fname7']; $tname1 = ''; $tname2 = ''; $lec = 0; $lab = 0; if(isset($_POST['lec7'])){ $lec =1 ; $tname1 = $_POST['fname7'];} if(isset($_POST['lab7'])){ $lab =1 ; $tname2 = $_POST['fname7'] ."lab";} $conn-> query("INSERT INTO subnames (class,name,lec,lab) VALUES ('$cname','$a',$lec,$lab)") or die("insert error"); if($tname1 != ''){ $sqlt = 'CREATE TABLE '.$tname1.$data; $conn -> query($sqlt) or die($conn->connect_error); } if($tname2 != ''){ $sqlt = 'CREATE TABLE '.$tname2.$data; $conn -> query($sqlt) or die($conn->connect_error); } } echo "SUCCESSFULLY SUBJECTS ADDED"; } ?> </center> </body> </html>
f4ad6181753afcba2d7ef9b2540afa07f748345c
[ "Markdown", "JavaScript", "PHP" ]
23
PHP
Jaimin7632/Attendence_System
9eb52eefd9be865ec14b39af1074f0c43f52344c
32ae50f1942bef3eb96b5701d22d8f291a6d2267
refs/heads/master
<file_sep><?php namespace App\Providers; use App\Helpers\Contracts\PhotoFileContract; use App\Helpers\PhotoFileNativePhp; use Illuminate\Support\ServiceProvider; class PhotoFileServiceProvider extends ServiceProvider { /** * Bootstrap services. * * @return void */ public function boot() { // } /** * Register services. * * @return void */ public function register() { // Register a contract for saving photo files and their cutting. $this->app->bind(PhotoFileContract::class, function (){ return new PhotoFileNativePhp(); }); } } <file_sep><?php namespace App\Http\Controllers; use App\Photo; use Illuminate\Http\Request; use App\Http\Requests\PhotoRequest; use Illuminate\Support\Facades\Auth; use Illuminate\Support\Facades\Gate; use App\Helpers\Contracts\PhotoFileContract; class PhotoController extends Controller { /** * Create a new controller instance. * * @return void */ public function __construct() { $this->middleware('auth', ['except' => ['show', 'showOriginal']]); } /** * Show the form for creating a new resource. */ public function create() { return view('photo.create'); } /** * Store a newly created resource in storage. */ public function store(PhotoRequest $request, PhotoFileContract $photo_file) { $request->flash(); $user = Auth::user(); if ($image = $request->file('img')) { // Saving photo files and its cutting $photo_file->storePhoto($image); // Save photo in db with binding to current User $data = $request->validated(); $data['img'] = $photo_file->image_name; $user->photos()->create($data); } return redirect()->route('user', ['id' => $user->id])->with('status', 'Photo was uploaded'); } /** * Display the specified resource. */ public function show($id) { $photo = Photo::find($id); return view('photo.show', compact('photo')); } /** * Show the form for editing the specified resource. */ public function edit($id) { // Checking have user access to edit if (Gate::denies('author-policy', Photo::find($id))) { return redirect()->back()->with('message', 'You don\'t have access'); } $photo = Photo::find($id); return view('photo.edit', compact('photo')); } /** * Update the specified resource in storage. */ public function update(PhotoRequest $request, PhotoFileContract $photo_file, $id) { // Checking have user access to edit if (Gate::denies('author-policy', Photo::find($id))) { return redirect()->back()->with('message', 'You don\'t have access'); } $user = Auth::user(); $photo = Photo::find($id); if ($image = $request->file('img')) { // Saving photo files and its cutting $photo_file->storePhoto($image); // Save photo in db $data = $request->validated(); $photo->img = $photo_file->image_name; $photo->name = $data['name']; if (isset($data['description'])) $photo->description = $data['description']; $photo->save(); } return redirect()->route('user', ['id' => $user->id])->with('status', 'Photo was updated'); } /** * Remove the specified resource from storage. */ public function destroy($id) { // Checking have user access to edit if (Gate::denies('author-policy', Photo::find($id))) { return redirect()->back()->with('message', 'You don\'t have access'); } $photo = Photo::find($id); if (isset($photo)) { $photo->delete(); return response()->json(['succeed' => 'true', 'message' => 'Photo was deleted']); } return response()->json(['succeed' => 'false', 'message' => 'Photo has not been deleted']); } /** * Display the original size Photo. */ public function showOriginal($id) { $photo = Photo::find($id); return view('photo.show_original', compact('photo')); } } <file_sep><?php use Illuminate\Database\Seeder; use App\Photo; use Illuminate\Filesystem\Filesystem; use Illuminate\Support\Facades\Storage; class PhotosSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { for ($i = 1; $i <= 5; $i++) { Photo::create([ 'img' => 'photo.jpg', 'name' => 'Photo' . $i, 'description' => 'Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. Nemo enim ipsam voluptatem, quia voluptas sit, aspernatur aut odit aut fugit' ]); } // Downloads images from dummy data $this->storePhotoDummyData(); } public function storePhotoDummyData() { $filesystem = new Filesystem(); $from = public_path('dummy-photo'); $images = $filesystem->allFiles($from); foreach ($images as $image) { $path = '/public/photos/' . $image->getFilename(); Storage::put($path, $filesystem->get($image->getPathname())); } } } <file_sep><?php namespace App\Helpers; use App\Helpers\Contracts\PhotoFileContract; class PhotoFileNativePhp implements PhotoFileContract { /** * The attribute that is image name. * * @var str */ public $image_name; /** * Method saves the image to the storage. */ public function storePhoto($image, $slice = true, $path = 'storage/photos/') { $this->image_name = time() . '-' . $image->getClientOriginalName(); $image->move(public_path($path), $this->image_name); // If necessary, cut the image if ($slice === true) { $this->slicePhoto($this->image_name, $slice, $path); } } /** * Method cuts the image. */ public function slicePhoto($image_name, $slice, $path) { if (file_exists($file = $path . $image_name)) { list($width, $height) = getimagesize($file); $ratio = $width / $height; $src = imagecreatefromjpeg($file); $pathinfo_name = pathinfo($file, PATHINFO_FILENAME); $slice = [ 'md' => [1200, 1200], 'sm' => [320, 240], ]; foreach ($slice as $size => $new_wh) { // The mechanism of conservation of proportions when cutting the image list($width_new, $height_new) = $new_wh; if ($ratio >= ($width_new / $height_new)) { // Line calculates new height keeping proportions $height_new = round($height / ($width / $width_new)); } else { // Line calculates new width keeping proportions $width_new = round($width / ($height / $height_new)); } $dest = imagecreatetruecolor($width_new, $height_new); imagecopyresampled($dest, $src, 0, 0, 0, 0, $width_new, $height_new, $width, $height); imagejpeg($dest, $path . str_replace($pathinfo_name, $pathinfo_name . '-' . $size, $image_name)); } imagedestroy($dest); imagedestroy($src); } } }<file_sep><?php use Illuminate\Database\Seeder; use App\User; class UsersSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { for ($i = 1; $i <= 3; $i++) { User::create([ 'name' => 'User ' . $i, 'email' => 'user' . $i . '@mail.com', 'password' => <PASSWORD>('<PASSWORD>') ]); } } } <file_sep><?php namespace App\Helpers\Contracts; interface PhotoFileContract { /** * Method saves the image to the storage. */ public function storePhoto($image, $slice, $path); /** * Method cuts the image. */ public function slicePhoto($image_name, $slice, $path); }<file_sep><?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\User; class UserController extends Controller { /** * Create a new controller instance. * * @return void */ public function __construct() { $this->middleware('user.has.table'); } public function index() { $users = User::all(); return view('home', compact('users')); } public function show($id) { $user = User::with('photos')->where('id', $id)->first(); return view('user.show', compact('user')); } } <file_sep><?php namespace App\Providers; use Illuminate\Support\ServiceProvider; use Illuminate\Support\Facades\Blade; class AppServiceProvider extends ServiceProvider { /** * Bootstrap any application services. * * @return void */ public function boot() { /** New Directive for display Photo * * $arguments = ['name_photo', 'size_photo', 'path_photo'] */ Blade::directive('getPhoto', function ($arguments) { $arr_args = explode(',', str_replace(' ', '', $arguments)); $image = empty($arr_args[0]) ? null : $arr_args[0]; $size = empty($arr_args[1]) ? null : $arr_args[1]; $path = empty($arr_args[2]) ? 'storage/photos/' : str_replace(["'", '"'], '', $arr_args[2]); if (!is_null($size) && !is_null($image)) { $result = '<?php $pathinfo_name = pathinfo(' . $image . ', PATHINFO_FILENAME); ?>' . '<?php $image_name = str_replace($pathinfo_name, $pathinfo_name . \'-\' . ' . $size . ', ' . $image . '); ?>' . '<?php if (file_exists("' . ($path . '$image_name') . '")) { ?>' . '<img src="' . asset($path . '<?php echo $image_name ?>') . '" class="img-thumbnail">' . '<?php } else { ?>' . '<img src="' . asset($path . '<?php echo '. $image .' ?>') . '" class="img-thumbnail">' . '<?php } ?>'; } else if (!is_null($image)) { $result = '<img src="' . asset($path . '<?php echo '. $image .' ?>') . '" class="img-thumbnail">'; } else { $result = '<div>Image not found</div>'; } return $result; }); } /** * Register any application services. * * @return void */ public function register() { // } } <file_sep><?php namespace App\Http\Middleware; use Closure; use Illuminate\Support\Facades\Schema; class UserHasTableMiddleware { /** * Handle an incoming request. * * @param \Illuminate\Http\Request $request * @param \Closure $next * @return mixed */ public function handle($request, Closure $next) { if (!Schema::hasTable('users')) { return redirect()->route('register')->with('message', 'Please create a table of users in the database or use migrations'); } return $next($request); } }
718865fd621b309590fb4c1fa18bba26dcf0e920
[ "PHP" ]
9
PHP
AlexandersGitHubWorks/PhotoGallery
2a31f5fb67fe0f23377c5b301ad7f1eb1e34087e
d257839a6058a9840994cc6a239e33379d925469
refs/heads/master
<file_sep>'use strict' module.exports = function(app, db) { var request = require('request'); app.get('/api/imagesearch/:id', searchImage); app.get('/api/latest/imagesearch', getSearches); app.route('/*') .get(function(req, res) { res.render('index', { err: "Error: You need to add a proper url" }); }); //app.get('/new/:url*', handleNewURL); //#/api/imagesearch/rerer?offset=dfmjdf function searchImage(req, res){ var key = process.env.KEY; var cx = process.env.CX; var query = req.params.id; var offset = req.query.offset; var num = Boolean(req.query.num) ? req.query.num : 10; var url = "https://www.googleapis.com/customsearch/v1?q="+query+"&offset="+offset+"&num="+num+"&searchType=image&cx=015352046996571260712:6impfbfmvba&key=<KEY>"; var toSave = { "query": query, "time": new Date() }; request.get( { uri: url, json: true, headers: { 'Content-Type' : 'application/x-www-form-urlencoded', } }, function (error, response, body) { if (!error && response.statusCode == 200) { //console.log(body) // Show the HTML for the Google homepage. var items = body.items.map(function(obj){ var rObj = {}; rObj["title"] = obj.title; rObj["image-link"] = obj.link; rObj["context"] = obj.image.contextLink; rObj["thumbnail"] = obj.image.thumbnailLink; return rObj; }) res.send(items); save(toSave, db); }else{ res.send(body); } }); } function save(obj, db) { // Save object into db. var searches = db.collection('searches'); searches.save(obj, function(err, result) { if (err) throw err; console.log('Saved ' + result); }); } function getSearches(req, res) { var searches = db.collection('searches'); searches.find({}).toArray(function(err, result){ if (err) throw error; var out = result.map(function(o){ var n_o = {}; n_o["query"] = o.query; n_o["time"] = o.time; return n_o; }) res.send(out); }); //var searches = db.searches.find(); } }; <file_sep># fcc-image-search FreeCodeCamp API Basejump: Image Search Abstraction Layer
b48e98f0d5763ab550a1ec2de5827a4f00bcfd42
[ "JavaScript", "Markdown" ]
2
JavaScript
darlingtonamz/fcc-image-search
69ea1ba54174ca83599614c7af7b598ed8b38584
96f8608dd6e444e1749c2dac560848d3194127fc
refs/heads/master
<repo_name>jolsby/Twitter-Analysis<file_sep>/twitter_config.py # -*- coding: utf-8 -*- """ Created on Sun Aug 12 12:29:32 2018 @author: jacob """ ''' This is a basic config file. Please use your own credentials to test twitter_master_jake_olsby.py. Have a great day! ''' consumer_key ='' consumer_secret ='' access_token_key ='' access_token_secret ='' <file_sep>/twitter_master_jake_olsby.py # -*- utf-8 -*- """ Created on Sun Aug 12 11:41:07 2018 @author: jacob """ import tweepy import pandas as pd import collections import datetime from textblob import TextBlob #place twitter_config in the same directory #please use your own keys and secrets import twitter_config ############################################################################### # - Access and required intialization (Please use your own credentials) ############################################################################### consumer_key = twitter_config.consumer_key consumer_secret = twitter_config.consumer_secret access_token_key = twitter_config.access_token_key access_token_secret = twitter_config.access_token_secret auth=tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token_key, access_token_secret) api=tweepy.API(auth) ############################################################################## # Ideally Credentials would be put into a referenced file for security ############################################################################## #----------------------------------------------------------------------------- ############################################################################## # Screen_Name's Most Recent Tweets ############################################################################## #----------------------------------------------------------------------------- #Get tweets function, defaults to 100 most recent def get_tweets(screen_name = '@elonmusk' ,number_of_tweets = 100): ''' A function to retrieve data around a users tweets for the most recent number of tweets. Defaulting as 100 tweets. Screen name, Tweet ID, Created Date Time, and Tweet text are loaded into newline-deliminated JSON format. ''' #Had to add, tweetmode = extended, to get full length of tweets tweets = api.user_timeline(screen_name=screen_name , count = number_of_tweets , tweet_mode='extended') #the tweet and some details on the topic tweets_master = [[screen_name, tweet.id_str, tweet.created_at, tweet.full_text.encode('utf-8'), tweet.favorite_count] for tweet in tweets] for j in tweets_master: #looping through all the tweets j3 = str(j[3]).replace('"','').replace('\\','').replace("'",'') #stripping quotes and \ within text to make the JSON function as expected. #I realize I could scale this out better with Regex and the re package #but this seemed more direct, and less verbose for this need. # #get the sentiment analysis score of each tweet's text with TextBlob #simple Natural Language Processing (NLP) demonstration below # to demonstrate more interesting analysis sentiment_score = round(float(TextBlob(j3).sentiment.polarity), 2) # #JSON format below json = (f'''{{"screen_name": "{j[0]}","tweet_id": {j[1]},"created_datetime": "{j[2]}","tweet_favorite_count": {j[4]},"tweet_sentiment_score": {sentiment_score},"tweet_full_text": "{j3}"}},\n''') #append the JSON within the file for each tweets data #write it as JSON file type with open(f"{screen_name}_by_jake_olsby.json", "a") as myfile: myfile.write(json) print(f'Successfully wrote to JSON. Please see {screen_name}_by_jake_olsby.json\n') #----------------------------------------------------------------------------- ############################################################################## # Hashtag Analytics Finder Functions ############################################################################## #----------------------------------------------------------------------------- #function to find hashtags def find_hashtags(tweet): ''' A function to find associated with hashtags to our target hashtags. Cleans data and places them into a list for proper capture. ''' tweet_text = tweet #leading spaces tweet_text = tweet_text.replace('#', ' #') #remove clutter in tweet for punct in '.!",;:%<>/~`()[]{{}}?': tweet_text = tweet_text.replace(punct,'') #split out the tweet into a list of words tweet_text = tweet_text.split() #initialize empty to capture hashtags hashtag_list = [] #loop over the words in the tweet for word in tweet_text: #find words that begin with a 'hashtag'=# if word[0]=='#': # Lower-case the word hashtag = word.lower() #Correct for possessives hashtag = hashtag.split('\'')[0] #remove the 'hashtag'=# symbol hashtag = hashtag.replace('#','') if len(hashtag)>0: hashtag_list.append(hashtag) return hashtag_list #find associated hashtags def hashtag_searcher(target_hashtag='#Seattle' ,count_of_tweets = 100 #default date is today's date (YYYY-MM-DD) #uses datetime module to get today's date ,to_date = datetime.datetime.today().strftime('%Y-%m-%d') ): ''' A function to analyze tweets associated with a target hashtag. Defaults to the most recent 100 tweets and to todays date as YYYY-MM-DD. Target hashtag defaults to #Seattle but this can be easily changed. Essentially, a kind of market basket analysis but for hashtags. ''' #simply hashtags to lower simple_hashtag = target_hashtag.lower() #remove the # to just the string simple_hashtag = simple_hashtag.replace('#', '') #create a empty list to capture new hashtags shared_tags = [] #enable the Cursor to capture your parameters tweets = tweepy.Cursor(api.search ,q = f"{target_hashtag}" ,count = count_of_tweets ,lang = "en" ,since = to_date ,tweet_mode ='extended' ).items() #loop through tweets for tweet in tweets: #clean the tweet to get just the hashtags hashtags = find_hashtags(tweet.full_text) #iterate through the captured hashtags for tag in hashtags: #not our inputed hashtag if simple_hashtag not in tag: shared_tags.append(tag) #get the distribution of the items in the list tag_counts = collections.Counter(shared_tags) #turn into a dataframe df = pd.DataFrame.from_dict(tag_counts, orient='index') #sort by value df.columns = ['value'] df = df.sort_values(by='value', ascending=False) #show Dataframe in console print(df) #Exported same data to excel to analyze and make decisions on! # Create a Pandas Excel writer using XlsxWriter as the engine. writer = pd.ExcelWriter(f'{target_hashtag}_analysis_by_jake_olsby.xlsx' , engine='xlsxwriter') # Convert the dataframe to an XlsxWriter Excel object. df.to_excel(writer, sheet_name='Sheet1') # Close the Pandas Excel writer and output the Excel file. writer.save() print(f'Successfully wrote to excel. Please see {target_hashtag}_analysis_by_jake_olsby.xlsx\n') #----------------------------------------------------------------------------- ############################################################################## # if __name__ == "__main__" ############################################################################## #----------------------------------------------------------------------------- if __name__ == "__main__": print("Executing as main program") print("Value of __name__ is: ", __name__) if(api.verify_credentials): print('We successfully logged in.\n') #Part 1 print('Starting Part 1 of Twitter Python Script by <NAME>...\n') get_tweets() #Part 2 print('Starting Part 2 of Twitter Python Script by <NAME>...\n') hashtag_searcher()
f4f27d8cdb787ebbe104d15528795b19446d4e3c
[ "Python" ]
2
Python
jolsby/Twitter-Analysis
b84bd60b027ae264913eeff02d45e64c1e7ac6e2
b532ce4b2b3e75443688ed6b4484b588652d222b
refs/heads/master
<file_sep>/** * */ package brick.view; import java.awt.Color; import java.awt.Font; import java.awt.Graphics; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import brick.controller.Controller; import brick.model.Game; import brick.model.Game.STATE; /** * @author BigRam * */ public class Menu extends MouseAdapter{ private Game game; /** * */ public Menu(Game game) { this.game = game; } @Override public void mousePressed(MouseEvent e) { int mx = e.getX(); int my = e.getY(); if (mouseOver(mx, my, 300, 200, 200, 80)) { // clicked on PLay game.setState(STATE.game); game.setIsGameStarted(true); System.out.print("Dans le bouton Play"); game.initGame(); game.createBricks(); game.createPaddle(); game.createBall(); game.createHud(); }else if (mouseOver(mx, my, 300, 300, 200, 80)) { // click on Help }else if (mouseOver(mx, my, 300, 400, 200, 80)) { // click on Quit System.exit(1); } } /* (non-Javadoc) * @see java.awt.event.MouseAdapter#mouseReleased(java.awt.event.MouseEvent) */ @Override public void mouseReleased(MouseEvent e) { } public boolean mouseOver(int mx, int my, int x, int y, int width, int height) { if (mx > x && mx < x + width) { if (my > y && my < y + height) { return true; } } return false; } public void render(Graphics g) { g.setColor(Color.WHITE); Font font = new Font("Arial", Font.BOLD, 62); g.setFont(font); g.drawString("MENU", 300, 120); // draw buttons Font font2 = new Font("Arial", Font.BOLD, 40); g.setFont(font2); g.drawRect(300, 200, 200, 80); g.drawString("Play", 360, 250); g.drawRect(300, 300, 200, 80); g.drawString("Help", 360, 350); g.drawRect(300, 400, 200, 80); g.drawString("Quit", 360, 450); } /** * */ public void tick() { // TODO Auto-generated constructor stub } } <file_sep>package brick.utils; public enum Direction { RIGHT,LEFT, UP, DOWN; } <file_sep>/** * */ package brick.utils; /** * @author BigRam * */ public enum ID { Paddle(), Ball(), Brick(), BonusLargerPaddle(), Menu(), Hud(); } <file_sep> package brick.controller; import java.awt.Color; import java.awt.Font; import java.awt.Graphics; import java.util.ArrayList; import brick.model.Game; import brick.model.Game.STATE; import brick.model.GameObject; import brick.utils.ID; import brick.view.Ball; import brick.view.Bonus; import brick.view.BonusLargerPaddle; import brick.view.Brick; import brick.view.Fenetre; /** * @author BigRam * */ public class Controller { private boolean lifeLost = false; private Fenetre fenetre; private Game game; private boolean isGameOver = false; public static ArrayList<GameObject> objects = new ArrayList<>(); public static ArrayList<GameObject> toRemove = new ArrayList<>(); public Controller(Game game, Fenetre fenetre) { this.game = game; this.fenetre = fenetre; } /** * @return the game */ public Game getGame() { return game; } /** * @return the fenetre */ public Fenetre getFenetre() { return fenetre; } private void gameLost() { System.out.println("Game over"); game.setIsGameStarted(false); clearObjects(); fenetre.repaint(); isGameOver = true; } /** * @param b */ public void setLifeLost(boolean b) { lifeLost = b; if (game.getLife() >= 0) { // remove the balls and the paddle from the GameObject array and create them for // a new try game.newTry(); } else { // GameOver gameLost(); } } public void tick() { for (GameObject object : objects) { object.tick(); // check if ball is lost if (object.getId() == ID.Ball) { if (((Ball) object).isLost()) { looseLife(); return; } else { collisionWithBrick((Ball) object); } } // check collision between paddle and bonus if (object.getId() == ID.BonusLargerPaddle) { Bonus bonus = (BonusLargerPaddle)object; if ((game.getPaddle().getBounds().toRectBounds().intersects((bonus.getBounds().toRectBounds())))){ game.getPaddle().setWidth(game.getPaddle().getWidth() + 20); game.getPaddle().setX(game.getPaddle().getX() - 10); toRemove.add(bonus); System.out.println("bonus removed : "+ bonus.toString()); } } } objects.removeAll(toRemove); // check if total number of bricks equals zero if (game.getNbBricks() == 0 && game.getScore() > 0) { // Level won setLevelWon(); } } /** * */ private void setLevelWon() { game.setIsGameStarted(false); game.setLevelWon(true); clearObjects(); fenetre.repaint(); } /** * */ private void looseLife() { game.looseLife(); game.getHud().setLife(game.getLife()); } /** * @param object * @return */ private void collisionWithBrick(Ball ball) { for (GameObject object : objects) { if (object.getId() == ID.Brick) { Brick brick = (Brick) object; if (ball.getX() >= brick.getX() && ball.getX() < brick.getX() + Brick.WIDTH && ball.getY() + ball.HEIGHT >= brick.getY() && ball.getY() < brick.getY() + Brick.HEIGHT) { /* * ball has touched a brick * * check which direction the ball should bounce to */ if (ball.getX() <= brick.getX()) { // touched the left side of the brick ball.setVitX(ball.getVitX() * -1); } else if (ball.getX() >= brick.getX() + Brick.WIDTH) { // touched the right side of the brick ball.setVitX(ball.getVitX() * -1); } else if (ball.getY() + ball.HEIGHT >= brick.getY()) { // touched the top side of the brick ball.setVitY(ball.getVitY() * -1); } else if (ball.getY() <= brick.getY() + Brick.WIDTH) { // touched the bottom side of the brick ball.setVitY(ball.getVitY() * -1); } // increment score score(); // if brick contains a bonus, let the bonus fall if (brick.getBonus() != null) { brick.getBonus().setDiscovered(true); } ((Brick) object).decreaseLife(); if (((Brick) object).getLife() == 0) { // remove the brick toRemove.add(object); // decrease total number of bricks game.setNbBricks(game.getNbBricks() - 1); } } } } } /** * */ private void score() { // update the variable score for the whole game game.addScore(); // update the hud game.getHud().setScore(game.getScore()); } public void render(Graphics g) { if (game.getState() == STATE.game) { for (GameObject object : objects) { object.render(g); } if (game.isLevelWon()) { g.setColor(Color.WHITE); Font font = new Font("Arial", Font.BOLD, 82); g.setFont(font); g.drawString("LEVEL WON", 160, 200); } if (isGameOver) { g.setColor(Color.WHITE); Font font = new Font("Arial", Font.BOLD, 82); g.setFont(font); g.drawString("GAME OVER", 160, 200); } } else if (game.getState() == STATE.menu) { game.getMenu().render(g); } } public void addObject(GameObject o) { this.objects.add(o); } public void removeObject(GameObject o) { this.objects.remove(o); } /** * @return */ public ArrayList<GameObject> getObjects() { return objects; } /** * */ public void clearObjects() { objects.clear(); } } <file_sep>package brick.model; import java.awt.Canvas; import java.awt.Color; import java.awt.Graphics; import java.awt.image.BufferStrategy; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Random; import com.sun.org.glassfish.external.statistics.BoundaryStatistic; import brick.controller.Controller; import brick.controller.KeyInput; import brick.utils.ID; import brick.view.Ball; import brick.view.Bonus; import brick.view.BonusLargerPaddle; import brick.view.Brick; import brick.view.Fenetre; import brick.view.Hud; import brick.view.Menu; import brick.view.Paddle; /** * @author BigRam * */ public class Game extends Canvas implements Runnable { private static final long serialVersionUID = -3291259578636120204L; private static final int NB_COL = 10; private static final int NB_LINES = 2; Thread thread; Fenetre fenetre; public static int PADDLE_Y; private boolean running = false; private Controller controller; private int gridStartX, gridStartY, space; private boolean isGameStarted; private int paddleStepX, paddleStepY; int pause = 10; private boolean lifeLost = false; private int life; private int score; private boolean readyToPlay; public static Paddle paddle; private Ball lastBall; public Hud hud; private int nbBricks; private boolean isGameOver = false; private int currentLevel; private boolean levelWon; private Menu menu; private STATE state = STATE.menu; private ArrayList<Integer> brickWithBonuses = new ArrayList<>(); Random r = new Random(); private int nbOfBonuses; public Game() { fenetre = new Fenetre(); controller = new Controller(this, fenetre); fenetre.getFrame().add(this); PADDLE_Y = fenetre.getFrame().getHeight() - 100; this.addKeyListener(new KeyInput(controller)); this.menu = new Menu(this); this.addMouseListener(menu); this.start(); // if (state == STATE.game) { // initGame(); // createBricks(); // createPaddle(); // createBall(); // createHud(); // } } public void initGame() { gridStartX = 100; gridStartY = 100; space = 5; isGameStarted = false; paddleStepX = 20; paddleStepY = 0; life = 1; currentLevel = 1; levelWon = false; isGameOver = false; } public void createPaddle() { // calculate center X paddle = new Paddle(0, PADDLE_Y, ID.Paddle, Color.gray, controller); int centerX = fenetre.getFrame().getWidth() / 2 - paddle.getWidth() / 2; paddle.setX(centerX); paddle.setY(paddle.getY()); controller.addObject(paddle); } public void createBall() { Ball ball = new Ball(0, 0, ID.Ball, Color.WHITE); lastBall = ball; centerBall(ball); controller.addObject(ball); } public void createHud() { hud = new Hud(0, 0, ID.Hud, controller); hud.setLife(life); hud.setScore(score); controller.addObject(hud); } public void createBricks() { generateBonuses(currentLevel); int k = 0; Brick br; for (int i = 0; i < NB_LINES; i++) { // line gridStartY += Brick.HEIGHT + space; for (int j = 0; j < NB_COL; j++) { if (((i +1) * j) == brickWithBonuses.get(k)) { Bonus bonus = new BonusLargerPaddle(gridStartX + 10, gridStartY + 5, ID.BonusLargerPaddle); controller.addObject(bonus); br = new Brick(gridStartX, gridStartY, ID.Brick, 1, bonus); if (k < 2) k++; }else br = new Brick(gridStartX, gridStartY, ID.Brick, 1, null); gridStartX += Brick.WIDTH + space; controller.addObject(br); nbBricks++; } gridStartX = 100; } } public void centerBall(Ball b) { int posX = paddle.getX() + (paddle.getWidth() / 2) - b.getWidth() / 2; int posY = paddle.getY() - b.getHeight(); b.setX(posX); b.setY(posY); } public void setIsGameStarted(boolean state) { isGameStarted = state; if (isGameStarted == true) { // ball has been launched // increase ball velocity moveBall(); } } /** * @return */ public boolean isGameStarted() { return isGameStarted; } /** * @return the nbBricks */ public int getNbBricks() { return nbBricks; } /** * @param nbBricks the nbBricks to set */ public void setNbBricks(int nbBricks) { this.nbBricks = nbBricks; } /** * @return the paddleStepX */ public int getPaddleStepX() { return paddleStepX; } /** * @return the paddleStepY */ public int getPaddleStepY() { return paddleStepY; } /** * @param paddleStepX the paddleStepX to set */ public void setPaddleStepX(int paddleStepX) { this.paddleStepX = paddleStepX; } /** * @param paddleStepY the paddleStepY to set */ public void setPaddleStepY(int paddleStepY) { this.paddleStepY = paddleStepY; } /** * @return the endPaddleX */ public float getEndPaddleX() { return paddle.getX() + paddle.getWidth(); } /** * */ public void looseLife() { System.out.println("You lose a life"); life--; controller.setLifeLost(true); } public boolean isReadyToPlay() { return readyToPlay; } public void setReadyToPlay(boolean readyToPlay) { this.readyToPlay = readyToPlay; } public int getLife() { return life; } /** * @param life the life to set */ public void setLife(int life) { this.life = life; } /** * @return the score */ public int getScore() { return score; } /** * @param score the score to set */ public void setScore(int score) { this.score = score; } public void gameOver() { stopThread(); } public synchronized void start() { thread = new Thread(this); thread.start(); running = true; } public synchronized void stopThread() { try { thread.join(); running = false; } catch (InterruptedException e) { e.printStackTrace(); } } @Override public void run() { long lastTime = System.nanoTime(); double amountOfTicks = 60.0; double ns = 1000000000 / amountOfTicks; double delta = 0.0; long timer = System.currentTimeMillis(); int frames = 0; while (running) { long now = System.nanoTime(); delta += (now - lastTime) / ns; lastTime = now; while (delta >= 1) { tick(); delta--; } if (running) { render(); } frames++; if (System.currentTimeMillis() - timer > 1000) { timer += 1000; frames = 0; } } stopThread(); } private void tick() { if (getState() == STATE.game) { controller.tick(); } else if (getState() == STATE.menu) { menu.tick(); } } private void render() { BufferStrategy bs = this.getBufferStrategy(); if (bs == null) { this.createBufferStrategy(3); return; } Graphics g = bs.getDrawGraphics(); // background color g.setColor(Color.BLACK); g.fillRect(0, 0, fenetre.getFrame().getWidth(), fenetre.getFrame().getHeight()); controller.render(g); g.dispose(); bs.show(); } public Paddle getPaddle() { return paddle; } /** * @return the hud */ public Hud getHud() { return hud; } public void moveBall() { for (GameObject object : controller.getObjects()) if (object.getId() == ID.Ball) { object.setVitX(((Ball) object).getXSpeed()); object.setVitY(((Ball) object).getYSpeed()); } } /** * @return */ public Ball getLastBall() { return lastBall; } /** * */ public void newTry() { for (GameObject object : controller.objects) { if (object.getId() == ID.Ball || object.getId() == ID.Paddle || object.getId() == ID.Hud) { // remove controller.toRemove.add(object); setIsGameStarted(false); } } controller.objects.removeAll(controller.toRemove); // now prepare the game for a new try // new paddle createPaddle(); createBall(); createHud(); } /** * */ public void addScore() { score += 10; } public int clamp(int x, int min, int max) { if (x < min) x = min; else if (x > max) x = max; return x; } /** * @return the isGameOver */ public boolean isGameOver() { return isGameOver; } /** * @param isGameOver the isGameOver to set */ public void setGameOver(boolean isGameOver) { this.isGameOver = isGameOver; } /** * @param levelWon the levelWon to set */ public void setLevelWon(boolean levelWon) { this.levelWon = levelWon; currentLevel++; } /** * @return the levelWon */ public boolean isLevelWon() { return levelWon; } /** * @param state the state to set */ public void setState(STATE state) { this.state = state; } /** * @return the state */ public STATE getState() { return state; } public enum STATE { game, menu; } /** * @return the menu */ public Menu getMenu() { return menu; } private void generateBonuses(int currentLevel){ if (currentLevel == 1) { // 3 bonuses to generate nbOfBonuses = 3; int value; for (int i = 0; i < nbOfBonuses; i++) { do { value = r.nextInt(NB_LINES * NB_COL +1); }while (brickWithBonuses.contains(value)); brickWithBonuses.add(value); System.out.println(brickWithBonuses.get(i)); } Collections.sort(brickWithBonuses); System.out.println(Arrays.asList(brickWithBonuses)); } } } <file_sep>/** * */ package brick.view; import java.awt.Color; import java.awt.Graphics; import com.sun.javafx.geom.Rectangle; import com.sun.org.apache.bcel.internal.generic.IXOR; import brick.controller.Controller; import brick.model.GameObject; import brick.utils.ID; /** * @author BigRam * */ public class Paddle extends GameObject{ private Color color; public static int WIDTH = 80; public static int HEIGHT = 8; public static int STEP = 8; private Controller controller; /** * * @param posX position in X * @param posY position in Y */ public Paddle(int posX, int posY, ID id, Color col, Controller controller) { super(posX, posY, id); this.color = col; setWidth(WIDTH); setHeight(HEIGHT); this.controller = controller; vitX = 0; vitY = 0; } public Color getColor() { return color; } @Override public void tick() { x += vitX; y += vitY; int x = controller.getGame().clamp(getX(), 0, Fenetre.WINDOW_WIDTH - getWidth()); setX(x); if (!controller.getGame().isGameStarted()) { // stick the ball centered on the paddle controller.getGame().centerBall(controller.getGame().getLastBall()); } } @Override public void render(Graphics g) { g.setColor(color); g.fillRect(x, y, getWidth(), getHeight()); } /* (non-Javadoc) * @see brick.model.GameObject#getBounds() */ @Override public Rectangle getBounds() { return new Rectangle(x,y,getWidth(),getHeight()); } } <file_sep>/** * */ package brick.view; import java.awt.Color; import java.awt.Font; import java.awt.Graphics; import com.sun.javafx.geom.Rectangle; import brick.controller.Controller; import brick.model.GameObject; import brick.utils.ID; /** * @author BigRam * */ public class Hud extends GameObject { private int life; private int score; private Controller controller; /** * @param x * @param y * @param id */ public Hud(int x, int y, ID id, Controller controller) { super(x, y, id); this.controller = controller; } /* * (non-Javadoc) * * @see brick.model.GameObject#tick() */ @Override public void tick() { } /* * (non-Javadoc) * * @see brick.model.GameObject#render(java.awt.Graphics) */ @Override public void render(Graphics g) { String strLife = "Life: " + life; String strScore = "Score:" + score; g.setColor(Color.BLACK); g.setFont(new Font("Sans Serif", Font.BOLD, 28)); g.drawString(strLife, 10, 560); g.drawString(strScore, 600, 560); g.setColor(Color.decode("#a93e5a")); g.setFont(new Font("Sans Serif", Font.BOLD, 28)); g.drawString(strLife, 10, 560); g.drawString(strScore, 600, 560); } /** * @param life the life to set */ public void setLife(int life) { this.life = life; } /** * @param score the score to set */ public void setScore(int score) { this.score = score; } /* (non-Javadoc) * @see brick.model.GameObject#getBounds() */ @Override public Rectangle getBounds() { // TODO Auto-generated constructor stub return null; } } <file_sep>/** * */ package brick.view; import java.awt.Color; import java.awt.Font; import java.awt.Graphics; import com.sun.javafx.geom.Rectangle; import brick.utils.ID; /** * @author BigRam * */ public class BonusLargerPaddle extends Bonus { public BonusLargerPaddle(int x, int y, ID id) { super(x, y, id); } @Override public void tick() { if (isDiscovered()) { y += vitY; } } @Override public void render(Graphics g) { if (isDiscovered()) { g.setColor(new Color(187, 100, 36)); g.setFont(new Font("arial", Font.BOLD, 22)); g.drawString("<====>", x, y); } } @Override public Rectangle getBounds() { return new Rectangle(x, y, width, height); } }
ef0f02f272c8a081698446af63fab658945420f0
[ "Java" ]
8
Java
RamsesZH/Brick-Game
1d250dc55c886b2644c11b458918ada74aee7a9d
50d6c9f4ca432b31f492ebcf4b0fc430ad61fd31
refs/heads/master
<repo_name>ritvikdave/VoiceControlledSwitchBoard<file_sep>/smartswitch.py import time import urllib2 import RPi.GPIO as GPIO GPIO.setmode(GPIO.BCM) GPIO.setup(02,GPIO.OUT) GPIO.setup(03,GPIO.OUT) light = GPIO.input(02) fan = GPIO.input(03) while True: response = urllib2.urlopen('https://listenuswitch.eu-gb.mybluemix.net/status?light='+str(light)+'&fan='+str(fan)).read() light = GPIO.input(02) fan = GPIO.input(03) print response print 'light='+str(light) print 'fan ='+str(fan) if (response== '1'): GPIO.output(02,GPIO.HIGH) GPIO.output(03,GPIO.HIGH) elif (response== '2'): GPIO.output(02,GPIO.LOW) GPIO.output(03,GPIO.LOW) elif (response== '3'): GPIO.output(02,GPIO.HIGH) elif (response== '4'): GPIO.output(02,GPIO.LOW) elif (response== '5'): GPIO.output(03,GPIO.HIGH) elif (response== '6'): GPIO.output(03,GPIO.LOW) <file_sep>/README.md # VoiceControlledSwitchBoard Upload the python code on to Raspberry Pi3. The Nodered File contants JSON Data which needs to be uploaded to Internet of Things Starter Application on IBM Cloud.
b4a592046864d827c595d330b51ea4ad5c2ee73f
[ "Markdown", "Python" ]
2
Python
ritvikdave/VoiceControlledSwitchBoard
82acfc588111df752a1c309a8efeed95e586616f
f84c4c130d5dca7db7fe139c5a100e79237b8b43
refs/heads/master
<repo_name>dawidja94/CIS2561-IntroToCSHARP<file_sep>/WFMovieDB/WFMovieDB/MovieDBXmlMappings.cs using System; using System.Xml.Serialization; using System.Collections.Generic; namespace WFMovieDB { [XmlRoot(ElementName="Movie")] public class Movie { [XmlElement(ElementName="Id")] public int Id { get; set; } [XmlElement(ElementName="Title")] public string Title { get; set; } [XmlElement(ElementName="Released")] public int Released { get; set; } [XmlElement(ElementName="Gross")] public int Gross { get; set; } [XmlElement(ElementName="Studio")] public int Studio { get; set; } } [XmlRoot(ElementName="MovieTable")] public class MovieTable { [XmlElement(ElementName="Movie")] public List<Movie> Movies { get; set; } } [XmlRoot(ElementName="Studio")] public class Studio { [XmlElement(ElementName="Id")] public int Id { get; set; } [XmlElement(ElementName="Name")] public string Name { get; set; } } [XmlRoot(ElementName="StudioTable")] public class StudioTable { [XmlElement(ElementName="Studio")] public List<Studio> Studios { get; set; } } [XmlRoot(ElementName="MovieDB")] public class MovieDB { [XmlElement(ElementName="MovieTable")] public MovieTable MovieTable { get; set; } [XmlElement(ElementName="StudioTable")] public StudioTable StudioTable { get; set; } } } <file_sep>/WFMovieDB/MySQLDB/Class1.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using MySql.Data.MySqlClient; namespace MySQLDB { public class MySQLDB { public static void RunQuery(string connStr, string query, Func<string[], int> handleRecord) { MySqlConnection conn = null; try { conn = new MySqlConnection(connStr); conn.Open(); MySqlCommand command = new MySqlCommand(query, conn); MySqlDataReader reader = command.ExecuteReader(); //loop over each record in result set while (reader.Read()) { //roll up fields into a string array string[] fields = new string[reader.FieldCount]; for (int i = 0; i < reader.FieldCount; i++) { fields[i] = reader.GetString(i); } //we now have a record handleRecord(fields); } } catch (Exception e) { throw new Exception("Query failed: ", e); } finally { if (conn != null) conn.Close(); } } } } <file_sep>/Hangman/Hangman/Form1.cs using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace Hangman { public partial class Form1 : Form { const int MAX_WRONG_GUESSES = 6; const int MAX_LETTERS = 10; TextBox[] letters = new TextBox[MAX_LETTERS]; String secretWord; int incorrectGuesses = 0; //initialize the set which stores the guessed letters HashSet<char> guessedLetters; public Form1() { InitializeComponent(); //initialize the set which stores the guessed letters guessedLetters = new HashSet<char>(); InitTextBoxes(); } void InitTextBoxes() { letters[0] = textBox1; letters[1] = textBox2; letters[2] = textBox3; letters[3] = textBox4; letters[4] = textBox5; letters[5] = textBox6; letters[6] = textBox7; letters[7] = textBox8; letters[8] = textBox9; letters[9] = textBox10; } private void exitToolStripMenuItem_Click(object sender, EventArgs e) { this.Close(); } private void Form1_Load(object sender, EventArgs e) { NewGame(); } void NewGame() { //Set secretWord to a new secret word. secretWord = WordList.WordList.GetWord(); //secretWord = "coffee"; //Call SetupTextBoxes() SetupTextBoxes(); //Clear the set of guessed letters(call its //Clear() function) guessedLetters.Clear(); //Clear the text box which displays the //guessed letters. guessedLettersText.Text = ""; // Set incorrectGuesses to 0 incorrectGuesses = 0; //Set the image in the big picture box to //image1. pictureBox1.Image = global::Hangman.Properties.Resources.image1; } void SetupTextBoxes() { for (int i = 0; i < MAX_LETTERS; i++) { if (i < secretWord.Length) { letters[i].Visible = true; letters[i].Text = "_"; } else { letters[i].Visible = false; } } } private void guessButton_Click(object sender, EventArgs e) { if (!(PlayerWon() || incorrectGuesses > MAX_WRONG_GUESSES)) { HandleGuess(); } } void HandleGuess() { if (guessBox.Text.Length > 0) //make sure something was entered { char ch = guessBox.Text[0]; //get the 1st character in the text //see if the player guessed it, if not, see if it's in the word if (guessedLetters.Contains(ch)) { MessageBox.Show("You've already guessed " + ch + "."); } else { //put the letter in the set of used letters guessedLetters.Add(ch); //append the character to the guessed letters text box guessedLettersText.Text += ch; //now use CheckGuess() to see if the that letter was in the secret word. //hint: if (CheckGuess(ch)) if (CheckGuess(ch)) { if (PlayerWon()) { MessageBox.Show("You win!"); } } else { incorrectGuesses++; NextPicture(); if (incorrectGuesses > MAX_WRONG_GUESSES) { MessageBox.Show("You lose! the word is " + secretWord); } } } } else { MessageBox.Show("Please enter a letter a-z"); return; } } bool CheckGuess(char ch) { bool found = false; for (int i = 0; i < secretWord.Length; i++) { //does char i in the word match char i in the word if (secretWord[i] == ch) { // we found a match! // FOR YOU TO DO: put the ch in letters array at positin i letters[i].Text = "" + ch; // FOR YOU TO DO: set found to true found = true; } } return found; } //displays the picture of the character in the PictureBox control public void NextPicture() { if (incorrectGuesses == 1) { pictureBox1.Image = global::Hangman.Properties.Resources.image2; } else if (incorrectGuesses == 2) { pictureBox1.Image = global::Hangman.Properties.Resources.image3; } else if (incorrectGuesses == 3) { pictureBox1.Image = global::Hangman.Properties.Resources.image4; } else if (incorrectGuesses == 4) { pictureBox1.Image = global::Hangman.Properties.Resources.image5; } else if (incorrectGuesses == 5) { pictureBox1.Image = global::Hangman.Properties.Resources.image6; } else if (incorrectGuesses == 6) { pictureBox1.Image = global::Hangman.Properties.Resources.image7; } } private bool PlayerWon() { //look at each char in the word and see //if it has been guessed for (int i = 0; i < secretWord.Length; i++) { if (!guessedLetters.Contains(secretWord[i])) { //found a character that hasn't been guessed yet return false; } } return true; //if we reached here, all the characters must have been guessed } private void newGameToolStripMenuItem_Click(object sender, EventArgs e) { NewGame(); } } } <file_sep>/Pokemon/Pokemon/Pokemon/XmlMappings.cs /* Licensed under the Apache License, Version 2.0 http://www.apache.org/licenses/LICENSE-2.0 */ using System; using System.Xml.Serialization; using System.Collections.Generic; namespace Pokemon { [XmlRoot(ElementName="Player")] public class Player { [XmlElement(ElementName="Id")] public int Id { get; set; } [XmlElement(ElementName="Name")] public string Name { get; set; } [XmlElement(ElementName="Username")] public string Username { get; set; } [XmlElement(ElementName = "City")] public string City { get; set; } [XmlElement(ElementName="Paid")] public Boolean Paid { get; set; } } [XmlRoot(ElementName="PlayerTable")] public class PlayerTable { [XmlElement(ElementName="Player")] public List<Player> Players { get; set; } } [XmlRoot(ElementName="Pokemon")] public class Pokemon { [XmlElement(ElementName="Id")] public int Id { get; set; } [XmlElement(ElementName="Name")] public string Name { get; set; } [XmlElement(ElementName="Attack")] public int Attack { get; set; } [XmlElement(ElementName="Defense")] public int Defense { get; set; } } [XmlRoot(ElementName="PokemonTable")] public class PokemonTable { [XmlElement(ElementName="Pokemon")] public List<Pokemon> Pokemon { get; set; } } [XmlRoot(ElementName="Ownership")] public class Ownership { [XmlElement(ElementName="PlayerId")] public int PlayerId { get; set; } [XmlElement(ElementName="PokemonId")] public int PokemonId { get; set; } [XmlElement(ElementName="Level")] public int Level { get; set; } [XmlElement(ElementName="NumberOwned")] public int NumberOwned { get; set; } } [XmlRoot(ElementName="OwnershipTable")] public class OwnershipTable { [XmlElement(ElementName="Ownership")] public List<Ownership> Ownership { get; set; } } [XmlRoot(ElementName="PokemonDB")] public class PokemonDB { [XmlElement(ElementName="PlayerTable")] public PlayerTable PlayerTable { get; set; } [XmlElement(ElementName="PokemonTable")] public PokemonTable PokemonTable { get; set; } [XmlElement(ElementName="OwnershipTable")] public OwnershipTable OwnershipTable { get; set; } } } <file_sep>/WFMovieDB/WFMovieDB/Form1.cs using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using System.Net; using System.Xml.Serialization; using System.IO; namespace WFMovieDB { public partial class Form1 : Form { /* List<Movie> movieTable = new List<Movie>(); List<Studio> studioTable = new List<Studio>(); string connStr = "server=10.32.45.68;uid=dbuser;pwd=<PASSWORD>;database=MovieDB"; string movieSQL = "SELECT ID, TITLE, RELEASED, GROSS, STUDIO FROM Movies"; //db name is case sensitive string studioSQL = "SELECT ID, NAME FROM Studios"; */ MovieDB movieDB = new MovieDB(); public Form1() { InitializeComponent(); /* //load the movies MySQLDB.MySQLDB.RunQuery(connStr, movieSQL, MakeMovie); dataGridView1.DataSource = movieTable; //has to be a list to use dataGridView MySQLDB.MySQLDB.RunQuery(connStr, studioSQL, MakeStudio); var studioNames = (from s in studioTable select s.Name).Distinct(); //filters out the duplicates comboBox1.Items.AddRange(studioNames.ToArray()); */ LoadXml(); } /* public int MakeStudio(string[] fields) { Studio s = new Studio() { Id = Convert.ToInt32(fields[0]), Name = fields[1] }; studioTable.Add(s); return 0; }*/ /* public int MakeMovie(string[] fields) { Movie m = new Movie() { Id = Convert.ToInt32(fields[0]), Title = fields[1], Released = Convert.ToInt32(fields[2]), Gross = Convert.ToInt32(fields[3]), Studio = Convert.ToInt32(fields[4]) }; movieTable.Add(m); return 0; //Func has to return an integer }*/ private void comboBox1_SelectedIndexChanged(object sender, EventArgs e) { string studio = comboBox1.Items[comboBox1.SelectedIndex].ToString(); var movies = from m in movieDB.MovieTable.Movies join s in movieDB.StudioTable.Studios on m.Studio equals s.Id //compare contents of an object where s.Name == studio //compare the references select new { m.Title, m.Released, s.Name }; dataGridView2.DataSource = movies.ToList(); } void LoadXml() { WebRequest request = WebRequest.Create("http://mrwrightteacher.net/CIS2561/MovieDB.php"); WebResponse resp = request.GetResponse(); Stream dataStr = resp.GetResponseStream(); XmlSerializer serializer = new XmlSerializer(typeof(MovieDB)); movieDB = (MovieDB)serializer.Deserialize(dataStr); //populate movie table dataGridView1.DataSource = movieDB.MovieTable.Movies; //populate studio names var studioNames = (from s in movieDB.StudioTable.Studios select s.Name).Distinct(); //populate combo box comboBox1.Items.AddRange(studioNames.ToArray()); } private void dataGridView1_CellContentClick(object sender, DataGridViewCellEventArgs e) { } } /* class Movie { public int Id { get; set; } public string Title { get; set; } public int Released { get; set; } public int Gross { get; set; } public int Studio { get; set; } } class Studio { public int Id { get; set; } public string Name { get; set; } } */ } <file_sep>/Pokemon/Pokemon/Pokemon/Form1.cs using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using System.Net; using System.Xml.Serialization; using System.IO; namespace Pokemon { public partial class Form1 : Form { PokemonDB pokemonDB; public Form1() { InitializeComponent(); LoadXml(); } private void Form1_Load(object sender, EventArgs e) { } private void dataGridView1_CellContentClick(object sender, DataGridViewCellEventArgs e) { } private void textBox1_TextChanged(object sender, EventArgs e) { } void LoadXml() { WebRequest request = WebRequest.Create("http://mrwrightteacher.net/NianticCorp/PokemonDB.php"); WebResponse resp = request.GetResponse(); Stream dataStr = resp.GetResponseStream(); XmlSerializer serializer = new XmlSerializer(typeof(PokemonDB)); pokemonDB = (PokemonDB)serializer.Deserialize(dataStr); //populate lists for players dataGridView4.DataSource = pokemonDB.PlayerTable.Players; dataGridView5.DataSource = pokemonDB.PokemonTable.Pokemon; dataGridView6.DataSource = pokemonDB.OwnershipTable.Ownership; var cityName = (from player in pokemonDB.PlayerTable.Players select player.City).Distinct(); //populate combo box //populate combobox for cities comboBox1.Items.AddRange(cityName.ToArray()); } private void textBox2_TextChanged(object sender, EventArgs e) { } private void tabPage1_Click(object sender, EventArgs e) { } private void button1_Click(object sender, EventArgs e) { var result = from pokemon in pokemonDB.PokemonTable.Pokemon join ownership in pokemonDB.OwnershipTable.Ownership on pokemon.Id equals ownership.PokemonId join player in pokemonDB.PlayerTable.Players on ownership.PlayerId equals player.Id where player.Name == textBox1.Text select new { pokemon.Name, ownership.Level, ownership.NumberOwned }; dataGridView1.DataSource = result.ToList(); } private void button2_Click(object sender, EventArgs e) { var result = from pokemon in pokemonDB.PokemonTable.Pokemon join ownership in pokemonDB.OwnershipTable.Ownership on pokemon.Id equals ownership.PokemonId join player in pokemonDB.PlayerTable.Players on ownership.PlayerId equals player.Id where player.Id.ToString() == textBox2.Text select new { pokemon.Name, ownership.Level, ownership.NumberOwned }; dataGridView1.DataSource = result.ToList(); } private void dataGridView2_CellContentClick(object sender, DataGridViewCellEventArgs e) { } private void comboBox1_SelectedIndexChanged(object sender, EventArgs e) { string city = comboBox1.Items[comboBox1.SelectedIndex].ToString(); var result = from player in pokemonDB.PlayerTable.Players where player.City == city select new { player.Id, player.Name, player.Username, player.City, player.Paid }; dataGridView2.DataSource = result.ToList(); } private void textBox3_TextChanged(object sender, EventArgs e) { } private void textBox4_TextChanged(object sender, EventArgs e) { } private void button4_Click(object sender, EventArgs e) { var results = from pokemon in pokemonDB.PokemonTable.Pokemon where pokemon.Attack >= Convert.ToInt32(textBox4.Text) orderby pokemon.Attack descending select pokemon; dataGridView3.DataSource = results.ToList(); } private void dataGridView3_CellContentClick(object sender, DataGridViewCellEventArgs e) { } private void dataGridView5_CellContentClick(object sender, DataGridViewCellEventArgs e) { } private void dataGridView6_CellContentClick(object sender, DataGridViewCellEventArgs e) { } private void menuStrip1_ItemClicked(object sender, ToolStripItemClickedEventArgs e) { // MessageBox.Show("By <NAME>, 2018"); } private void exitToolStripMenuItem1_Click(object sender, EventArgs e) { Application.Exit(); } private void aboutToolStripMenuItem1_Click(object sender, EventArgs e) { MessageBox.Show("By <NAME>, 2018", caption: "About"); } private void dataGridView7_CellContentClick(object sender, DataGridViewCellEventArgs e) { } private void textBox5_TextChanged(object sender, EventArgs e) { } private void button5_Click(object sender, EventArgs e) { var results = from pokemon in pokemonDB.PokemonTable.Pokemon where pokemon.Defense <= Convert.ToInt32(textBox5.Text) orderby pokemon.Defense descending select pokemon; dataGridView7.DataSource = results.ToList(); } } } <file_sep>/README.txt This repository contains projects for CIS2561 - Intro to C#<file_sep>/Pokemon/Pokemon/MySqlDB/Class1.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using MySql.Data.MySqlClient; namespace MySqlDB { public class Class1 { } } <file_sep>/Hangman/Hangman/WordList.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.IO; using System.Windows; using System.Reflection; using System.Windows.Forms; namespace WordList { class WordList { private static string []words = { "admiral", "anaconda", "beekeeper", "blabbering", "bummer", "buzzer", "difficult", "dizzy", "fizzy", "fuzzy", "fluffy", "hound", "joking", "joker", "overjoyed", "poncho", "yellow", "zigzagging", "sausage", "blubber", "pencil", "cloud", "moon", "water", "computer", "school", "network", "hammer", "walking", "mediocre", "literature", "chair", "window", "cords", "musical", "zebra", "xylophone", "penguin", "home", "hound", "final", "inked", "teacher", "funny", "website", "banana", "uncle", "softly", "awesome", "attach", "blue", "internet", "bottle", "tight", "zone", "tomato", "prison", "hydro", "cleaning", "telivision", "send", "frog", "coffee", "book", "zooming", "falling", "evily", "gamer", "juice", "moniter", "captain", "bonding", "loudly", "thudding", "guitar", "shaving", "hair", "soccer", "water", "racket", "table", "late", "media", "desktop", "flipper", "club", "flying", "smooth", "monster", "purple", "guardian", "bold", "hyperlink", "presenter", "world", "national", "comment", "element", "magic", "lion", "sand", "crust", "toast", "hunter", "forest", "foraging", "silent", "pong", "waves" }; public static string GetWord() { Random r = new Random(); return words[r.Next(0, words.Length)]; } } }
1c532f3b8e8644edd337791386374d2f302a7c65
[ "C#", "Text" ]
9
C#
dawidja94/CIS2561-IntroToCSHARP
dfe8a09ae4d014d29da04e3bc28a06fd12fa4da0
155c4b26fc530578fee630de547b27a9b6094f12
refs/heads/master
<file_sep># Laravel test helpers ## Installation Include the required files in your composer.json ``` "autoload-dev": { "psr-4": { "Tests\\": "tests/" }, "files": [ "tests/utilities/factory_helpers.php" ] }, ``` <file_sep><?php namespace Tests; use App\User; use Illuminate\Foundation\Testing\TestCase as BaseTestCase; abstract class TestCase extends BaseTestCase { protected function login(User $user = null) { if ($user) { return $this->actingAs($user); } if ( ! $this->user) { $this->user = create(User::class); } return $this->actingAs($this->user); } }
cb5fb35cee8138a79311765c35a5519fc88548d6
[ "Markdown", "PHP" ]
2
Markdown
ians88/laravel-test-helpers
9185c7ee452dc0145750317df70b0bd7b42efa57
fb9f98e3643cc48ed18222f57577b2880e88a2df
refs/heads/master
<file_sep>var ExtractText = require('extract-text-webpack-plugin'); var LessClean = require('less-plugin-clean-css'); var HtmlFile = require('html-webpack-plugin'); var Copy = require('copy-webpack-plugin'); var webpack = require('webpack'); var config = { cache: true, entry: { android: './src/android/main.less', ios: './src/ios/main.less' }, output: { path: 'build', filename: '[name].js', pathinfo: false }, module: { loaders: [ { test: /\.(png|jpe?g|gif|svg)$/, loaders: [ 'url?limit=8192&name=asset/[name].[ext]', 'image-webpack?{progressive:true, optimizationLevel: 7, interlaced: false, pngquant:{quality: "75-90", speed: 4}}' ] }, { test: /\.less$/, loader: ExtractText.extract( 'css!autoprefixer?browsers=Android >= 4 iOS >= 7' + '!less?config=lessLoaderCustom' ) } ] }, lessLoader: { lessPlugins: [ new LessClean({advanced: true}) ] }, plugins: [ new ExtractText('[name].css'), new Copy([ { from: './asset', to: 'asset' } ]) ] }; var k; for (k in config.entry) { config.plugins.push( new HtmlFile({ filename: k + '.html', template: 'index.html', hash: true, inject: 'head', chunks: [k] }) ); } module.exports = config; <file_sep>WEBPACK='node_modules/webpack/bin/webpack.js' .PHONY: build build: $(WEBPACK) -p mv build/android.css build/android.min.css mv build/ios.css build/ios.min.css $(WEBPACK) rm build/android.js rm build/ios.js <file_sep>Mobile Application UI ====== Mobile CSS UI for hybrid application development ## What is it? It is the minimalistic application mobile UI to use mainly in hybrid development. But you can use it also on your mobile site. The main idea is - use minimum as you can to simplify common wide used mobile elements. Need more? Develop it yourself for your needs. ## Demo Check out how does it look like online. Android example: http://dmitrykuzmenkov.github.io/mob-app-ui/android.html IOS example: http://dmitrykuzmenkov.github.io/mob-app-ui/ios.html ## Installation You can direct git clone repository or use npm. ``` npm install mob-app-ui ``` ## Usage Well, just clone the git repository, get android.css or ios.css from build directory and start mark up your html native looking page! You can use also source code in your project. Just import into your less needed main.less file from src folder. For example: ```less @import 'src/android/main.less'; ``` or ```less @import 'src/ios/main.less'; ``` If you installed package using npm just require less in your application with webpack: ```javascript require('mob-app-ui/src/ios/main.less'); ``` ## Webpack configuration If you use npm package I recommend you to use example config for webpack to handle less files: ```javascript { test: /\.less$/, loader: ExtractText.extract( 'css!autoprefixer?browsers=Android >= 4 iOS >= 7' + '!less?config=lessLoaderCustom' ) } ``` ## Building distribution To build distribution for using as single css file in your project just run ```bash make build ``` In build folder you will find android.css and ios.css and minimized versionf of it. Also there is android.html and ios.html as example of mark up supported elements. Just include CSS files in your project and start developing native looking app.
671611f1295294e249ddf7a36dc7ba5a916c91eb
[ "JavaScript", "Makefile", "Markdown" ]
3
JavaScript
dmitrykuzmenkov/mob-app-ui
afe8654cf481d50d41484ba9b64d977cca567d96
bd8ba8cd75cc82e81cf1b097f4d86807318e92d0
refs/heads/master
<file_sep>/* * jMatTcpJade * <NAME>, 2011 * Establishes a TCP connection with a JADE agent in Java * */ package javafiles; import jade.core.AID; import jade.core.Agent; import jade.core.behaviours.SimpleBehaviour; import jade.lang.acl.ACLMessage; import jade.lang.acl.StringACLCodec; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.net.ServerSocket; import java.net.Socket; public class TcpTestAgent extends Agent { private static final long serialVersionUID = -4394243932169660776L; // Class variables ServerSocket srvr = null; Socket skt = null; BufferedReader in; PrintWriter out; int value = 0; // Constructor public TcpTestAgent() { super(); } // Setup method protected void setup() { System.out.println("Agent started"); // Create the TCP connection try { // Create server and socket srvr = new ServerSocket(1234); skt = srvr.accept(); System.out.println("Server connection initiated"); // Create writer and reader to send and receive data out = new PrintWriter(skt.getOutputStream(), true); in = new BufferedReader(new InputStreamReader(skt.getInputStream())); } catch (IOException e) { e.printStackTrace(); } // Run behavior CommWithMatlab commWithMatlab = new CommWithMatlab(); addBehaviour(commWithMatlab); } // End setup class CommWithMatlab extends SimpleBehaviour { private static final long serialVersionUID = 8966535884137111965L; @Override public void action() { // Prepare the message to send String msgContentTest = "" + value; System.out.println("Message sent to Matlab: " + msgContentTest); // Get the answer and display it String msgReceived; msgReceived = callMatlab(msgContentTest); System.out.println("Message received from Matlab: " + msgReceived); // Increment the test variable value++; } // End action @Override public boolean done() { return false; } } // End behavior @Override protected void takeDown() { System.out.println("Agent being taken down"); // Close writer and socket try { out.close(); in.close(); skt.close(); srvr.close(); } catch (IOException e) { e.printStackTrace(); } } public String callMatlab(String msgContent) { ACLMessage msg; String matlabAnswer = ""; while(!matlabAnswer.equals(msgContent)) { String ack = ""; while(!ack.equals("ok")) { // Send the message to Matlab via JADE msg = new ACLMessage(ACLMessage.INFORM); msg.addReceiver(new AID("localhost:1234", AID.ISGUID)); msg.setContent(msgContent); // Encode message to send as an ACL Message StringACLCodec codec = new StringACLCodec(in, out); codec.write(msg); out.flush(); // Wait for ACK message try { ack = in.readLine().toString(); in.reset(); System.out.println("ack = " + ack); } catch (IOException e1) {} } // Wait for its answer try { while (!in.ready()) {} matlabAnswer = matlabAnswer + in.readLine().toString(); } catch (IOException e) { e.printStackTrace(); } } return matlabAnswer; } // End callMatlab } <file_sep># mat2jade A basic interface between JADE agents and Matlab using TCP/IP. ### Licence None. Feel free to use it as you wish. ### Context Developing smart control strategies for power systems requires testing their behavior and performance, at first in simulation, as shown in a recent [paper](http://dx.doi.org/10.1109/DEXA.2012.9). To do that, you need a simulation tool capable of advanced analysis (e.g., power flow) and of advanced and customizable control algorithms. As we could not find any satisfactory existing solution, we developed an interface between two/three software to achieve it: [JADE](http://jade.tilab.com/) / [Matlab](http://www.mathworks.com/products/matlab/) / [PowerWorld Simulator](http://www.powerworld.com/). This interface was developed in collaboration with Colorado State University's [Dr. Suryanarayanan](http://www.engr.colostate.edu/~ssuryana). ![Interface between JADE, Matlab and PowerWorld Simulator](http://robinroche.com/webpage/images/Jadepw.png) mat2jade is only a portion of the whole interface, as it only concerns the interface between Matlab and JADE. See [mat2pws](https://github.com/robinroche/mat2pws) and [jade2pws](https://github.com/robinroche/jade2pws) for the other parts of the whole interface. ### Interface concept This script is a basic interface between Matlab and JADE agents. It can be easily tested and modified. ### Code structure - In JADE: - LauncheJade.java: A simple class that runs JADE and the test agent. - TcpTestAgent.java: An agent that exchanges data with Matlab through TCP. - In Matlab: - tcpTest.m: The main file for the Matlab side. - tcp_send_function.m: A function that sends data through TCP. - tcp_receive_function.m: A function that receives data through TCP. ### Using the code The following software are required: - Matlab (tested with R2011b) with the Instrument Control Toolbox. This toolbox is only used for the TCP functions. If you find another way to use TCP communication with Matlab, you may not need this toolbox. - JADE (tested with 4.0) libraries. Code use instructions are as follows: 1. Get the mat2jade files. 2. Import them to your favorite IDE, like Eclipse. 3. Get JADE jar libraries. 4. Include the libraries to the mat2jade project. 6. Run the JADE program with the Launcher class. 7. In Matlab, open the tcpTest.m file and run it. 8. The communication should then be established and data should be exchanged. You should see things displaying in the console. If not, well, there is a problem somewhere. Please cite one of my papers if you use it, especially for research: http://dx.doi.org/10.1109/DEXA.2012.9 ### Sample output When running the test, you should see in the java console: Agent started Server connection initiated Message sent to Matlab: 0 Message received from Matlab: 0 Message sent to Matlab: 1 Message received from Matlab: 1 etc. And in Matlab: Connection established Message from JADE: 0 Message from JADE: 1 Message from JADE: 2 etc. ### Limitations - No extensive testing has been done, so use it at your own risk. - If you find bugs, errors, etc., or want to contribute, please let me know. ### Contact <NAME> - <EMAIL>
2fda89e9de9976eebf87541e702590e077dfcd11
[ "Markdown", "Java" ]
2
Java
ShiyaoLi95/mat2jade
dce3f71295599b9961af109a5204133b67f48aa0
5d158422178f49de0736c7ba04a2025bd6e0c503
refs/heads/master
<repo_name>kozhyx/Json_java<file_sep>/Test/src/Cameras.java public class Cameras { String id; String urlType; String videoUrl; String value; String ttl; Cameras(String a, String b, String c, String d, String e){ id = a; urlType = b; videoUrl = c; value = d; ttl = e; } }
0122fa77d3f51aef3cec237bbe82ca02931a6a6c
[ "Java" ]
1
Java
kozhyx/Json_java
df379bfd8cfbe7ee968a3e78aa32c1a9a8b6eb0f
98a48e1386b12d5b0ea85216e51c31400f6bac45
refs/heads/master
<file_sep>package commaciejprogramuje.facebook.solarsystem; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentStatePagerAdapter; /** * Created by m.szymczyk on 2017-09-14. */ public class MoonsFragmentAdapter extends FragmentStatePagerAdapter { private final SolarObject[] objectWithMoons; public MoonsFragmentAdapter(FragmentManager fm, SolarObject[] objectWithMoons) { super(fm); this.objectWithMoons = objectWithMoons; } @Override public Fragment getItem(int position) { return SolarObjectsFragment.newInstance(objectWithMoons[position].getMoons()); } @Override public int getCount() { return objectWithMoons.length; } @Override public CharSequence getPageTitle(int position) { return objectWithMoons[position].getName(); } }
7369882ffa46c649acbafe9839c8255ca93a7a7b
[ "Java" ]
1
Java
maciejprogramuje/SolarSystem
b55c32b3a4e4346396b03f003767ff44fbf985c2
5f097abbe5b73f9e949b63d744ebf8a32a7f5b12
refs/heads/master
<file_sep>#!/usr/bin/python import json import sys import os ADDRSTRING = os.environ["DOCKER_HOST"] s = ADDRSTRING.split("//") t = s[1].split(":") CTRL_ADDR = t[0] with open(sys.argv[1], 'r+') as j: data = json.load(j) data['default']['baseURL'] = "http://" + CTRL_ADDR + ":" j.seek(0) json.dump(data, j) j.truncate() <file_sep>FROM ubuntu:latest MAINTAINER <NAME> "<EMAIL>" RUN apt-get -y update RUN apt-get -y install curl unzip git zip npm python-pip htop RUN curl -sL https://deb.nodesource.com/setup | sudo bash - RUN apt-get install -y nodejs RUN npm update -g npm ADD bin/bvc-core-odl-web-1.2.0.zip bvc-core-odl-web-1.2.0.zip ADD bin/bvc-core-bvc-web-1.2.0.zip bvc-core-bvc-web-1.2.0.zip ADD bin/bvc-app-vyatta-ems-web.zip bvc-app-vyatta-ems-web.zip ADD bin/bvc-app-path-explorer-web.zip bvc-app-path-explorer-web.zip RUN mkdir -p /opt/bvc/versions RUN unzip bvc-core-odl-web-1.2.0.zip -d /opt/bvc RUN unzip -uo bvc-core-bvc-web-1.2.0.zip -d /opt/bvc RUN unzip -uo bvc-app-vyatta-ems-web.zip -d /opt/bvc RUN unzip -uo bvc-app-path-explorer-web.zip -d /opt/bvc #ADD config.json /opt/bvc/web/config.json ADD fix.py fix.py ADD startnode.sh /opt/bvc/startnode.sh RUN chmod u+x /opt/bvc/startnode.sh <file_sep>FROM ubuntu:latest MAINTAINER <NAME> "<EMAIL>" RUN apt-get -y update RUN apt-get -y install curl gcc git golang RUN curl -sL https://deb.nodesource.com/setup | sudo bash - RUN apt-get install -y nodejs RUN npm update -g npm RUN mkdir -p /opt/bvc ENV GOPATH /opt/bvc RUN cd $GOPATH && go get github.com/grafana/grafana RUN cd $GOPATH/src/github.com/grafana/grafana && go run build.go setup RUN godep restore RUN go build . #RUN apt-get install -y apt-transport-https #RUN apt-get -y update & apt-get -y install wget #RUN echo "deb https://packagecloud.io/grafana/stable/debian/ wheezy main" >> /etc/apt/sources.list #RUN curl https://packagecloud.io/gpg.key | sudo apt-key add - #RUN apt-get update #RUN apt-get install grafana #RUN wget https://grafanarel.s3.amazonaws.com/builds/grafana_2.0.2_amd64.deb #RUN apt-get install -y adduser libfontconfig #RUN dpkg -i grafana_2.0.2_amd64.deb <file_sep>#!/usr/bin/python # # Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved. # # import re,os, sys, shlex, glob, zipfile, subprocess, time,shutil,datetime,getopt,json,re,sys from pprint import pprint from subprocess import Popen, PIPE from bvc_install_common import logAndPrintMessage,unzip,moveFile,decorateDirectoryName from os.path import expanduser #from install_bvc import unzip # the install-bvc script is in the same directory as the karaf.zip # # Setup global variables and process commandline arguments gArchiveDir = "" acceptLicense = False NODE_REQ_MAJOR_VERSION = 0; NODE_REQ_MINOR_VERSION = 10; NODE_REQ_REV_VERSION = 29; modulesCue = 'begin_bvc_modules' packagesCue = 'begin_bvc_packages' controller ='localhost' baseUIComponentsZipFilePatterns= ["bvc-core-odl-web*.zip","bvc-core-bvc-web*.zip"] def checkForNode(): nodeVersionPassed = False proc = subprocess.Popen(["node --version"], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) tmp = proc.communicate()[0] #tmp = 'v0.11.2' if tmp: version = tmp.split('.') if(len(version)) >= 3: majorVersion = int(re.sub("[^0-9]","",version[0])) minorVersion = int(version[1]) revVersion = int(version[2]) if(majorVersion > NODE_REQ_MAJOR_VERSION) : nodeVersionPassed = True if(majorVersion == NODE_REQ_MAJOR_VERSION and minorVersion > NODE_REQ_MINOR_VERSION): nodeVersionPassed = True if(majorVersion == NODE_REQ_MAJOR_VERSION and minorVersion == NODE_REQ_MINOR_VERSION and revVersion >= NODE_REQ_REV_VERSION): nodeVersionPassed = True if nodeVersionPassed == True: printMsg(" NODEJS Check: ......................... [ OK ]") else: printMsg(" NODEJS Check: ......................... [ FAILED ]") return nodeVersionPassed def getHostName(controllerNode='localhost'): hostName = controllerNode if(controllerNode == 'localhost' or controllerNode == '127.0.0.1'): command = "ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1'" #command = "hostname" proc = subprocess.Popen([command], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) hostNameLines = proc.communicate()[0] hostName = hostNameLines.splitlines() if len(hostName) > 0: hostName = hostName[0] else: hostName = controllerNode return hostName def replaceHostNameInConfFile(hostName,destinationDir): replaceHostName( hostName,destinationDir, 'config.json') def replaceHostName( hostName, destinationDir, file ): fileName = destinationDir + file newFileName = fileName+'.new' if os.path.exists(newFileName): os.remove(newFileName) infile = open(fileName) outfile = open(newFileName, 'w') for line in infile: line = line.replace('localhost',hostName) outfile.write(line) infile.close() outfile.close() os.remove(fileName) os.rename(newFileName,fileName) # This method should be invoked when UI installer is driven by an external master installer def installUI(topInstallDir,destinationDir,zipFileDir,archiveDir,controller): returnCode = 0 controllerNode = getHostName(controller) global gArchiveDir gArchiveDir = archiveDir stopNodeJs() baseInstallSuccess = True replaceHostNameInConfFile(controllerNode,decorateDirectoryName(destinationDir)) configureExtensionForWeb( decorateDirectoryName( destinationDir )) startNodeJs(destinationDir) if(baseInstallSuccess == True): printMsg(' UI Installation: ...................... [ OK ]') else: returnCode = -1 return returnCode def copyFiles(zipFileDir): if(len(glob.glob( zipFileDir +"*.zip")) > 0): print "Copying required files..." for zFile in glob.glob('../*ODLUI*.zip'): shutil.copy(zFile, "./") def printMsg(message): #print message logAndPrintMessage(message) def unzipBaseUIFiles(zipFileDir,destinationDir): for zipPattern in baseUIComponentsZipFilePatterns: filePattern = zipFileDir + zipPattern files = glob.glob(filePattern) if( len(files) > 0 ): unzipFile(files[0],destinationDir) def moveBaseUIZipFiles(zipFileDir,archiveDirForFiles): for zipPattern in baseUIComponentsZipFilePatterns: filePattern = zipFileDir + zipPattern files = glob.glob(filePattern) if( len(files) > 0 ): moveFile( files[0], archiveDirForFiles ) def unzipFile(fileName,destinationDir): unzip(fileName,destinationDir); def restartNodeJs(): stopNodeJs() startNodeJs() def startNodeJs(destinationDir): os.chdir(destinationDir) command = 'echo "Starting NODEJS server - $(date)" >> ../log/web.log && node server.js' logfile = open( "../log/web.log", 'a' ) inputdevnull = open( "/dev/null" ); proc = subprocess.Popen([command], shell=True, stdout=logfile, stderr=logfile, stdin=inputdevnull ) logfile.close() inputdevnull.close() # hostNameLines = proc.communicate()[0] printMsg(" Starting NODEJS: ...................... [ OK ]") printMsg(" Server @ http://"+getHostName()+":9000/") def stopNodeJs(): command = "kill -9 $(ps aux | grep '\snode\s' | awk '{print $2}')" proc = subprocess.Popen([command], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) hostNameLines = proc.communicate()[0] printMsg(" Stopping NODEJS: ...................... [ OK ]") def configureExtensionForWeb(baseWebDirectory): installFiles = glob.glob(baseWebDirectory +"install-*.json") globalAppConfigFile = baseWebDirectory + 'config.json' for installFile in installFiles: appConfigFile = open(installFile) appConfig = json.load(appConfigFile) appConfigFile.close() for item in appConfig['install']: srcFileName = baseWebDirectory + item['src'] #print "processing file: " + srcFileName srcFile = open(srcFileName) fileTextAsList = re.compile("\r\n|\n").split(srcFile.read()) srcFile.close() if '\n'.join(item['data']) not in fileTextAsList: index = next((index for index,value in enumerate(fileTextAsList) if value.strip() == item['cue']),-1) if (index > -1): fileTextAsList.insert(index, '\n'.join(item['data'])) outputFile = open(srcFileName, "w") outputFile.write('\n'.join(fileTextAsList)) outputFile.close() #Updating the config section if 'config' in appConfig: configSection = appConfig['config'] configFile = open(globalAppConfigFile) configFileJson = json.load(configFile) configFile.close() #print json.dumps(configFileJson) configFileJson["default"].update(configSection) #print json.dumps(configFileJson) configFile = open(globalAppConfigFile,"w") json.dump(configFileJson, configFile) configFile.close() <file_sep>#!/bin/bash # start kibana /opt/kibana/bin/kibana -e "http://$ELASTICSEARCH_1_PORT_9200_TCP_ADDR:9200" <file_sep>FROM ubuntu:latest MAINTAINER <NAME> "<EMAIL>" ENV LS logstash-1.4.2 RUN apt-get update RUN apt-get -y install wget python RUN apt-get install -y software-properties-common RUN add-apt-repository ppa:webupd8team/java RUN echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections RUN apt-get update RUN apt-get -y install --no-install-recommends oracle-java7-installer oracle-java7-set-default RUN wget -nv https://download.elastic.co/logstash/logstash/${LS}.tar.gz RUN tar -xf ${LS}.tar.gz -C /opt RUN ln -fs /opt/${LS} /opt/logstash RUN /opt/logstash/bin/plugin install contrib #RUN /opt/logstash/bin/plugin install logstash-input-jmx --no-verify <file_sep>FROM ubuntu:latest MAINTAINER <NAME> "<EMAIL>" RUN apt-get -y update && apt-get -y install curl #ADD https://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb /influxdb_latest_amd64.deb ADD http://get.influxdb.org/influxdb_0.8.8_amd64.deb /influxdb_0.8.8_amd64.deb RUN dpkg -i /influxdb_0.8.8_amd64.deb ADD startinfluxdb.sh /opt/startinfluxdb.sh RUN chmod u+x /opt/startinfluxdb.sh <file_sep>FROM ubuntu:latest MAINTAINER <NAME> "<EMAIL>" RUN apt-get autoclean $$ apt-get clear cache RUN apt-get update RUN apt-get clean RUN apt-get -y upgrade RUN apt-get --fix-missing install -y build-essential ADD http://download.redis.io/releases/redis-3.0.0.tar.gz redis-3.0.0.tar.gz RUN mkdir -p /opt/monitor RUN tar -zxvf redis-3.0.0.tar.gz -C /opt/monitor WORKDIR /opt/monitor/redis-3.0.0 RUN make WORKDIR /opt/monitor/redis-3.0.0/src <file_sep>#!/bin/bash # start node python fix.py /opt/bvc/web/config.json cd /opt/bvc/web nohup node server.js >/bvc/system/logs/node.log 2>&1 <file_sep>#!/bin/bash # start redis /opt/kibana/bin/kibana -e "http://$ELASTICSEARCH_1_PORT_9200_TCP_ADDR:9200" <file_sep>#!/usr/bin/python #Copyright: # # Copyright (c) 2012-2014 by Brocade Communications Systems, Inc. # All Rights Reserved. # #License: # #This software is licensed, and not freely redistributable. See the #license agreement for details. import os, sys, shlex, glob, zipfile, subprocess, time,shutil,datetime,getopt import re import fnmatch path = os.path.abspath(os.path.join(os.path.dirname(__file__), '.lib')) if not path in sys.path: sys.path.insert(1, path) del path from subprocess import Popen, PIPE from controller_prereq import getValidJAVAHOME, checkMemoryCPU ,setupSetenv from bvc_install_common import setupLog,logMessage, logAndPrintMessage, unzip, moveFile from controller_install import isControllerRunning, installBaseExtensions, installExtensions, getZipList,mergeFeatureCfg, runCustomConfigInstall, stopController, startController, setupKarafLogging from install_bvc_ui import checkForNode, installUI, unzipBaseUIFiles, moveBaseUIZipFiles # Setup global variables and process commandline arguments relDir = sys.argv[0].split('install')[0] os.chdir(relDir) installDir=os.getcwd()+'/' noService=False ignoreErrors = False shortOptsIn = "ih" longOptsIn = ["ignore-prereqs","help" ] usageStr = 'USAGE: install --ignore-prereqs --help' try: opts, args = getopt.getopt(sys.argv[1:],shortOptsIn,longOptsIn) except getopt.GetoptError: print usageStr sys.exit(2) for opt,arg in opts: if opt in ('-i', '--ignore-prereqs'): ignoreErrors = True elif opt in ( '-h', '--help'): print usageStr sys.exit() else: print usageStr sys.exit() # setup .archives directory & log start message archiveDir = installDir+'/.archive' if not os.path.exists( archiveDir ): os.makedirs( archiveDir ) logDir = installDir+'/log' logFileName = 'install_log' if not os.path.exists( logDir ): os.makedirs( logDir ) setupLog( logDir, logFileName ) logAndPrintMessage( ' Brocade Vyatta Controller Installation\n') logAndPrintMessage( ' Starting @ : '+str(datetime.datetime.now())) # check for the base install file and if found run the controller installation karafDistName = 'bvc-core-odl-controller' baseUXDistName = 'bvc-core-odl-web' bvcBaseDistName = 'bvc-core-bvc-controller' depDir = '.dependencies' dependenciesDir = installDir+depDir+'/' karafExtensionsDir = '.extensions' extensionsDir=installDir+karafExtensionsDir versionsDir=installDir+'versions' karafInstallDir=installDir+'controller/' nodeInstallDir=installDir+'web/' def getBaseNames( dirName, suffix ): names = [] for n in fnmatch.filter(os.listdir(dirName),'*.'+suffix): if os.path.isfile (os.path.join( dirName,n )): name = n.split(re.search('-[0-9]*\.[0-9]*', n).group(0))[0] names.append( name ) return names def convertVersionNames( vlist ): convNames = [] for vName in vlist: if '-' in vName: name = vName.split(vName.split('-')[0])[1] name = re.search('(?<=-).*',name).group(0) convNames.append(name) return convNames extBaseNames = getBaseNames(extensionsDir, 'zip') depBaseNames = getBaseNames(dependenciesDir, 'zip') versionsBaseNames = convertVersionNames( getBaseNames(versionsDir, 'properties') ) archivedBaseNames = getBaseNames( archiveDir,'zip') def exitInstall(): logAndPrintMessage( ' Install Exiting @ : '+str(datetime.datetime.now())+'\n') sys.exit(1) if os.path.exists(installDir+'bvc'): logAndPrintMessage('Found a \"'+installDir+'bvc'+'\" directory, it appears that bvc-dependencies-<version>.zip was improperly unzipped.\nPlease remove this directory and recheck your installation steps.') exitInstall() if 'bvc-dependencies' not in versionsBaseNames: logAndPrintMessage('Error: Missing bvc-dependencies which indicates you haven\'t properly unzipped\n the bvc-dependencies-<version>.zip file prior to running install.\nPlease recheck your installation steps.') exitInstall() installingBaseKaraf = karafDistName in depBaseNames installingBaseUX = baseUXDistName in depBaseNames installingBvcBase = bvcBaseDistName in depBaseNames upgradeNames = [] for bName in extBaseNames: if bName in archivedBaseNames: upgradeNames.append(bName) for bName in depBaseNames: if bName in archivedBaseNames: upgradeNames.append(bName) if installingBaseKaraf == True or installingBaseUX == True or installingBvcBase == True: if installingBaseKaraf == True: validJRE, validJAVA_HOME = getValidJAVAHOME(installDir) validMemoryCPU, javaMaxMem, javaMaxPermSize = checkMemoryCPU( ignoreErrors, installDir, validJAVA_HOME) if installingBaseUX == True : validNode = checkForNode() if ( ( installingBaseKaraf == True and validJRE != True ) or ( installingBaseKaraf == True and validMemoryCPU != True ) or ( installingBaseUX == True and validNode != True)): if ignoreErrors == False: sys.exit(1) if len(upgradeNames) > 0: logAndPrintMessage(' Re-Installing one or more .zip files:') for name in upgradeNames: logAndPrintMessage(' '+name) stopController(karafInstallDir) if os.path.exists(karafInstallDir+'/data'): try: logAndPrintMessage( ' Removing the karaf data directory ...') origDir = os.getcwd() os.chdir(karafInstallDir+'/data') for file in glob.glob("*"): if file != 'log': if os.path.isfile(file): os.remove( file ) else: shutil.rmtree(file) os.chdir(origDir) logAndPrintMessage( ' ............... [ OK ]') except OSError, e: logAndPrintMessage( ' Error removing directory '+karafInstallDir+'/data') logAndPrintMessage( ' Install cannot recover, exiting.') logAndPrintMessage( ' ............... [ FAILED ]') exitInstall() def unzipAndMove( dirName, installDir): for zFile in getZipList( dirName, installDir): fullName = installDir+dirName+'/'+zFile unzip( fullName, installDir) moveFile( fullName, archiveDir ) unzipBaseUIFiles( dependenciesDir, installDir ) moveBaseUIZipFiles( dependenciesDir, archiveDir ) unzipAndMove( depDir, installDir ) if installingBaseKaraf == True: os.chmod(karafInstallDir+'/bin/start', 0555) os.chmod(karafInstallDir+'/bin/client', 0555) os.chmod(karafInstallDir+'/bin/stop', 0555) os.chmod(karafInstallDir+'/bin/status', 0555) os.chmod(karafInstallDir+'/bin/karaf', 0555) # The data directory doesn't exist if the controller hasn't yet been started if not os.path.exists(installDir+'/controller/data/log'): os.makedirs(installDir+'/controller/data/log') if not os.path.exists(installDir+'/controller/data/log/controller_logs'): try: os.symlink( '../controller/data/log',installDir+'log/controller_logs') except OSError, e: fileRemoved =False if not os.path.exists(installDir+'/controller/data/log/karaf.out'): try: os.symlink( '../controller/data/karaf.out',installDir+'log/karaf.out') except OSError, e: fileRemoved =False # Setup KarafMBeans os.rename(karafInstallDir+'/bin/karaf',karafInstallDir+'/bin/karaf.old' ) karafScript = open(karafInstallDir+'/bin/karaf.old') newKarafFile = open(karafInstallDir+'/bin/karaf',"w") for line in karafScript: if 'KarafMBeanServerBuilder' in line: lineTokens = line.split(" ") for token in lineTokens: if not 'KarafMBeanServerBuilder' in token: newKarafFile.write( ' '+token) else: newKarafFile.write( line ) karafScript.close() newKarafFile.close() os.chmod(karafInstallDir+'/bin/karaf', 0555) setupSetenv( karafInstallDir, validJAVA_HOME, javaMaxMem, javaMaxPermSize ) setupKarafLogging( karafInstallDir, '10', '100' ) else: logMessage( ' No '+karafDistName+'*.zip found. Not installing the base controller.' ) # Extensions Installation Section controllerRestartRequested = False if len(extBaseNames) == 0: logMessage( 'No extensions to install') else: unzipAndMove( karafExtensionsDir, installDir ) controllerRestartRequested = installExtensions( ' Extension Install', installDir, karafInstallDir, karafExtensionsDir, archiveDir ) if controllerRestartRequested == True: logAndPrintMessage( ' Restarting controller to complete extension install') stopController( installDir ) startController(installDir) if controllerRestartRequested == True: logAndPrintMessage( ' Restart controller .................... [ OK ]') if installingBaseUX == True or len(extBaseNames) > 0: installUI( installDir,nodeInstallDir,extensionsDir,archiveDir,'localhost') logAndPrintMessage( ' Install completed @ : '+str(datetime.datetime.now())+'\n') <file_sep> #!/usr/bin/python # #Copyright: # # Copyright (c) 2012-2014 by Brocade Communications Systems, Inc. # All Rights Reserved. # #License: # #This software is licensed, and not freely redistributable. See the #license agreement for details. import os, sys, shlex, glob, zipfile, subprocess, time,shutil,datetime,getopt from subprocess import Popen, PIPE logFile = None def setupLog( logDir,logFileName ): global logFile logFile = logDir+'/'+logFileName if not os.path.exists( logDir ): os.path.mkdir(logDir) def logMessage( line ): global logFile if logFile is None: print 'Error: logMessage called before setupLog' sys.exit(1) with open( logFile, "a") as logfile: if not line.endswith('\n') : logfile.write(line+'\n') else: logfile.write(line) logfile.close() def logAndPrintMessage( line ): logMessage(line) print line def unzip(zipFilePath, destDir): if not os.path.exists( destDir ): os.makedirs( destDir ) zfile = zipfile.ZipFile(zipFilePath) for name in zfile.namelist(): (dirName, fileName) = os.path.split(name) if fileName == '': # directory newDir = destDir + '/' + dirName if not os.path.exists(newDir): os.mkdir(newDir) else: # file ... effectively overwrite existing files if os.path.isfile(destDir + '/' + name): os.remove(destDir + '/' + name) fd = open(destDir + '/' + name, 'wb') fd.write(zfile.read(name)) fd.close() zfile.close() def moveFile(srcFile,destDir): fileMoved = True fileExisted = False fileName = srcFile.split('/')[-1] if(destDir.endswith("/") == False): destDir = destDir + "/" destFileName = destDir + fileName success, fileExisted = removeExistingFile(destFileName) if success == True: fileExisted = True try: shutil.move( srcFile, destDir ) except OSError, e: fileMoved =False else : fileMoved = False return fileMoved, fileExisted def removeExistingFile(filename): fileRemoved = True fileExisted = False if os.path.isfile(filename): fileExisted = True try: os.remove(filename) except OSError, e: fileRemoved =False return fileRemoved, fileExisted def decorateDirectoryName(directoryName): if(directoryName.endswith("/") == False): directoryName = directoryName + "/" return directoryName <file_sep>FROM ubuntu:14.04 MAINTAINER <NAME> "<EMAIL>" ENV JAVA_HOME /usr ENV JAVA_OPTS -Xmx3g RUN apt-get install -y software-properties-common RUN add-apt-repository ppa:webupd8team/java RUN apt-get -y install wget RUN echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections RUN apt-get update RUN apt-get -y install --no-install-recommends oracle-java7-installer oracle-java7-set-default maven python-pip ADD bvc-1.2.0.zip bvc-1.2.0.zip ADD bvc-dependencies-1.2.0.zip bvc-dependencies-1.2.0.zip RUN unzip bvc-1.2.0.zip -d /opt RUN unzip bvc-dependencies-1.2.0.zip -d /opt ADD installer/.lib /opt/bvc/.lib/ ADD installer/install /opt/bvc/install WORKDIR /opt/bvc/ RUN /opt/bvc/install -i ADD start /opt/bvc/controller/bin/start RUN chmod u+x /opt/bvc/controller/bin/start RUN sed -i "s|log4j.appender.out.file=\${karaf.data}/log/karaf.log|log4j.appender.out.file=/bvc/logs/karaf.log|" /opt/bvc/controller/etc/org.ops4j.pax.logging.cfg ADD org.ops4j.pax.logging.cfg /opt/bvc/controller/etc/org.ops4j.pax.logging.cfg <file_sep># bvcdocker Dockerized Brocade COntroller Repository for Brocade Controller Operations Dashboard <file_sep>FROM ubuntu:latest MAINTAINER <NAME> "<EMAIL>" RUN apt-get install -y software-properties-common RUN add-apt-repository ppa:webupd8team/java RUN apt-get -y install wget RUN echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections RUN apt-get update RUN apt-get -y install --no-install-recommends oracle-java7-installer oracle-java7-set-default maven RUN wget https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.5.2.tar.gz RUN tar -xvf elasticsearch-1.5.2.tar.gz -C /opt RUN ln -fs /opt/elasticsearch-1.5.2 /opt/elasticsearch <file_sep>FROM ubuntu:latest MAINTAINER <NAME> "<EMAIL>" ENV KIBANA kibana-4.0.2-linux-x64 RUN apt-get -y install wget RUN apt-get update RUN wget -nv https://download.elastic.co/kibana/kibana/${KIBANA}.tar.gz RUN tar -xf ${KIBANA}.tar.gz -C /opt RUN ln -fs /opt/${KIBANA} /opt/kibana #ADD kibana.yml /opt/kibana-4.0.2-linux-x64/config/kibana.yml ADD startkibana.sh /opt/kibana/bin/startkibana.sh RUN chmod u+x /opt/kibana/bin/startkibana.sh <file_sep>#!/bin/bash #/usr/bin/influxdb -config=/bvc/configs/influxdb/config1.toml #sleep 30 curl -X POST 'http://192.168.59.103:8086/db?u=root&p=root' -d '{"name": "cadvisor"}' curl -X POST 'http://192.168.59.103:8086/db?u=root&p=root' -d '{"name": "logstash"}' curl -X POST 'http://192.168.59.103:8086/db/cadvisor/users?u=root&p=root' -d '{"name": "user", "password": "<PASSWORD>"}' curl -X POST 'http://192.168.59.103:8086/db/logstash/users?u=root&p=root' -d '{"name": "user", "password": "<PASSWORD>"}' tail -f /usr/bin/nohup.out<file_sep>#!/usr/bin/python import os ES = os.environ["BVC_ELASTICSEARCH_1_PORT_9200_TCP_ADDR"] lsconf = """ input {{ stdin {{ type => "stdin-type" }} file {{ type => "syslog" path => [ "/var/log/*.log", "/var/log/messages", "/var/log/syslog" ] }} file {{ type => "logstash" path => [ "/var/log/logstash/logstash.log" ] start_position => "beginning" }} }} filter {{ if [type] == "docker" {{ json {{ source => "message" }} }} mutate {{ rename => [ "log", "message" ] }} }} date {{ match => [ "time", "ISO8601" ] }} }} }} }} output {{ stdout {{ codec => rubydebug }} elasticsearch {{ host => {ES_HOST} port => {ES_PORT} protocol => "http" }} }} """ def main(): with open('/etc/logstash.conf', 'w') as f: print(lsconf.format(ES_HOST=ES, ES_PORT='9200'), f) if __name__ == "__main__": main() <file_sep>#!/usr/bin/python #Copyright: # # Copyright (c) 2012-2014 by Brocade Communications Systems, Inc. # All Rights Reserved. # #License: # #This software is licensed, and not freely redistributable. See the #license agreement for details. import os, sys, shlex, glob, zipfile, subprocess, time,shutil,datetime,getopt from subprocess import Popen, PIPE from bvc_install_common import logMessage, logAndPrintMessage # gets the version number of the java installed on the arg path # Sets the check_JDK boolean to true if the java version is equal or greater than the recommended version def checkJDKVersion( JAVA_HOME,req_ver,recommended_rev): check_JDK = False if JAVA_HOME: proc = subprocess.Popen([JAVA_HOME + "/bin/java -version"], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) tmp = proc.communicate()[1] if tmp: if tmp.split('"'): if len(tmp.split('"')) > 1: curr_ver = tmp.split('"')[1] if curr_ver: if curr_ver.split('_'): if curr_ver.split('_')[0] == req_ver: check_JDK = True if len(curr_ver.split('_')) >= 2: if curr_ver.split('_')[1] < recommended_rev: logAndPrintMessage( 'WARNING : Recommended JAVA '+req_ver+' revision is '+recommended_rev+' or greater') return check_JDK def getValidJAVAHOME(installDir): REQ_VER = '1.7.0' RECOMMENDED_REV = '67' JAVA_HOME = os.getenv("JAVA_HOME") check_JDK = False # This method takes the JAVA_HOME path as a string arg while (check_JDK == False): if JAVA_HOME: check_JDK = checkJDKVersion( JAVA_HOME, REQ_VER, RECOMMENDED_REV) if(check_JDK == True): logAndPrintMessage( "\n JDK Check ............................. [ OK ]") else: JAVA_HOME = raw_input("\nRequired JAVA version is \"1.7.0\" Recommended revision is 67 or greater\nEnter the path to the Required JAVA Version:\n") return check_JDK, JAVA_HOME def setupSetenv( distDir, JAVA_HOME, xmxValue, maxPS): logMessage( 'Updating JAVA_MAX_MEM, JAVA_MAX_PERM_SIZE in bin/setenv ...' ) setenvFilename = distDir+'/bin/setenv' # can we open the new file with truncate? delete for now if os.path.isfile( setenvFilename+'.new' ): os.remove( setenvFilename+'.new') newf = open(setenvFilename+'.new', "w") with open( setenvFilename ) as f: for line in f: # the stock file checks for env vars already set to allow override # but we don't allow override so strip out the conditional code. if 'x$JAVA_MAX_PERM_MEM' in line or 'x$JAVA_MAX_MEM' in line: for line in f: # skip the next line for line in f: break; break; elif ( 'export JAVA_HOME' in line ): newf.write( 'export JAVA_HOME='+JAVA_HOME+'\n' ) elif ( 'export JAVA_MAX_MEM' in line ): newf.write('export JAVA_MAX_MEM='+str(xmxValue)+'m\n' ) elif ( 'export JAVA_MAX_PERM_MEM' in line ): newf.write('export JAVA_MAX_PERM_MEM='+str(maxPS)+'m\n' ) else: newf.write(line) newf.close() os.remove( setenvFilename ) os.rename( setenvFilename+'.new',setenvFilename) # This function should work for both Centos6 and Ubuntu 14.04 def checkMemoryCPU( ignoreErrors, installDir, JAVA_HOME): maxXmxValue = 12 * 1024 minCPUspeed = 2.0 minCPUcount = 2 minMemory = 4 * 1024 xmxValue = 2 * 1024 maxPS = int(512) availableMemory = '1024' CPUspeed = 0 CPUcount = 0 cmd = "lscpu" process = Popen(shlex.split(cmd), stdout=PIPE) out, err = process.communicate() outLines = out.split('\n') for aLine in outLines: if 'CPU MHz:' in aLine: CPUspeed = float(aLine.split(':')[1]) if ( 'CPU(s):' in aLine and 'NUMA' not in aLine ): CPUcount = aLine.split(':')[1] with open( '/proc/meminfo' ) as f: for line in f: if 'MemTotal:' in line: availableMemory = int(line.split('k')[0].split(':')[1]) availableMemory = availableMemory/1024 f.close() sufficient = True cpuSpeedStr = ' CPU Speed Check: ...................... [' cpuCountStr = ' CPU Count Check: ...................... [' memoryStr = ' Memory Size Check: .................... [' if int(CPUspeed) < minCPUspeed: logMessage( '\nYour systems CPU speed is '+str(CPUspeed)+' but should be '+str(minCPUspeed)+' or more to ensure acceptable performance.') cpuSpeedStr += ' FAILED ]' sufficient = False else: cpuSpeedStr += ' OK ]' if int(CPUcount) < minCPUcount: logMessage( '\nYour systems CPU count is '+str(CPUcount)+' but should be '+str(minCPUcount)+' or more to ensure acceptable performance.') cpuCountStr += ' FAILED ]' sufficient = False else: cpuCountStr += ' OK ]' if int(availableMemory) < minMemory: logMessage( '\nYour system\'s available memory is '+str(availableMemory)+'m. This does not meet the minimum memory requirements. Please refer to the system requirements.') sufficient = False memoryStr += ' FAILED ]' else: memoryStr += ' OK ]' logAndPrintMessage ( cpuSpeedStr) logAndPrintMessage ( cpuCountStr) logAndPrintMessage ( memoryStr) if sufficient == False: logAndPrintMessage( '\nThis system will not sustain a controller, please increase') logAndPrintMessage( 'the required resources for best results.') if ignoreErrors == False: return False, 0, 0 xmxValue = int(0.85 * availableMemory) if int(xmxValue) >= int(maxXmxValue): logAndPrintMessage( 'Java processes exceeding '+str(maxXmxValue)+' MB may run into') logAndPrintMessage( 'GC issues, so we cap memory at that limit.') xmxValue = maxXmxValue return True, xmxValue, maxPS <file_sep>#!/usr/bin/python #Copyright: # # Copyright (c) 2012-2014 by Brocade Communications Systems, Inc. # All Rights Reserved. # #License: # #This software is licensed, and not freely redistributable. See the #license agreement for details. import os, sys, shlex, glob, zipfile, subprocess, time,shutil,datetime,getopt from subprocess import Popen, PIPE from controller_prereq import getValidJAVAHOME, checkMemoryCPU from bvc_install_common import logMessage, logAndPrintMessage, unzip, moveFile def runCommand( cmd): process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) outputStr = [] for line in process.stdout: logMessage(line) outputStr.append(line) for line in process.stderr: logMessage(line) returnCode = process.wait() return outputStr, returnCode def runSilentCommand( cmd): process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) def isControllerRunning(): process = subprocess.Popen(['ps','-elf'], stdout=subprocess.PIPE) running = False for line in process.stdout: if 'org.apache.karaf' in line: running = True return running def startController( controllerDir): if isControllerRunning() == False: logAndPrintMessage( ' Starting controller ... please wait ...' ) os.chmod(controllerDir+'/bin/start', 0555) cmd = [ controllerDir+'/bin/start'] runCommand( cmd ) retries = 0 while isControllerRunning() == False and retries < 10: retries += 1 time.sleep(2) logAndPrintMessage( ' ............... [ OK ]') return def stopController( controllerDir): if isControllerRunning() == True: logAndPrintMessage( ' Stopping controller ... please wait ...') os.chmod(controllerDir+'/bin/stop', 0555) cmd = [ controllerDir+'/bin/stop'] runCommand( cmd ) retries = 0 while isControllerRunning() == True and retries < 30: retries += 1 time.sleep(2) logAndPrintMessage( ' ............... [ OK ]') return def installBaseExtensions( msg, installDir, karafInstallDir, karafExtensionsDir, archiveDir ): if not isControllerRunning(): startController( karafInstallDir ) # extract repos, features & merge to new features.cfg file extList, repoList, origFeaturesName, newFeaturesName = mergeFeatureCfg( karafInstallDir ) installFeatures( karafInstallDir, extList, repoList ) logAndPrintMessage( msg + ' ... please wait ...') logAndPrintMessage( ' ............... [ OK ]') def installExtensions( msg, installDir, karafInstallDir, karafExtensionsDir, archiveDir ): if not isControllerRunning(): startController( karafInstallDir ) # extract repos, features & merge to new features.cfg file extList, repoList, origFeaturesName, newFeaturesName = mergeFeatureCfg( karafInstallDir ) installFeatures( karafInstallDir, extList, repoList ) logAndPrintMessage( msg + ' ... please wait ...') restartControllerRequested = runCustomConfigInstall( karafInstallDir ) # replace the original features.cfg with the merged file if it exists if ( newFeaturesName is not None and os.path.exists( origFeaturesName) ) : os.remove(origFeaturesName) os.rename(newFeaturesName,origFeaturesName) logAndPrintMessage( ' ............... [ OK ]') return restartControllerRequested def getZipList( extDir, topDir ): origDir = os.getcwd() os.chdir( topDir + extDir ) extensionList = [] for file in glob.glob("*.zip"): extensionList.append(file) os.chdir( origDir ) return extensionList def runCustomConfigInstall( karafHomeDir ): restartRequested=False if os.path.exists( karafHomeDir + '/etc/bvc-extensions' ): originalDir = os.getcwd() os.chdir(karafHomeDir + '/etc/bvc-extensions' ) for file in glob.glob("*.install"): restartReturnCode = None configFileName = file.rstrip( ".install" ) + ".cfg" if os.path.exists( configFileName ): with open( configFileName ) as cfgfile: for line in cfgfile: if 'restartControllerOnCustomInstallRC =' in line: restartReturnCode = int( line.lstrip( "restartControllerOnCustomInstallRC =" ) ) break os.chmod(file, 0555) cmd = ['./' + file] logMessage( "Running custom install script: " + file ) output, rc = runCommand( cmd ) if restartReturnCode != None: if int( rc ) == restartReturnCode: logMessage( "Install script requested restart of controller." ) restartRequested = True logMessage( "Custom install script complete with code: " + str(rc) + ".\n**********Standard Output*******\n" + str(output) + "\n*************************************\n") os.chdir( originalDir ) return restartRequested def setupKarafLogging( karafInstallDir, maxFileSize, maxBackupIndex ): cfgFName = karafInstallDir+'/etc/org.ops4j.pax.logging.cfg' newFName = cfgFName+'.new' newfile = open( newFName, 'w') with open( cfgFName ) as cfgfile: for line in cfgfile: if 'log4j.appender.out.maxFileSize=' in line: newfile.write( 'log4j.appender.out.maxFileSize='+maxFileSize+'MB\n') elif 'log4j.appender.out.maxBackupIndex=' in line: newfile.write( 'log4j.appender.out.maxBackupIndex='+maxBackupIndex+'\n') else: newfile.write( line ) newfile.close() os.remove( cfgFName ) os.rename( newFName, cfgFName ) ###### # Method to collect existing and new extension karaf features & repos # and merge into a single etc/org.apache.karaf.features.cfg file. # Parameters: home directory of karaf # Looks for any file in the subdirectory karafhome/etc/bvc-extensions which # must contain two lists, one of features and one of repos: # # featuresBoot = token1,token2,token3... tokenN # featuresRepositories = repoA,reposB,repoC ... repoZ # # Returns: list of new features, list of new repos, the original and new features # filenames. The caller must replace the old with the new. ###### def mergeFeatureCfg( karafHomeDir): originalDir = os.getcwd() extensionFeatures = [] extensionRepos = [] newFeaturesFileName = None featuresFileName = karafHomeDir+'/etc/org.apache.karaf.features.cfg' if os.path.exists( karafHomeDir + '/etc/bvc-extensions' ): newFeaturesFileName = karafHomeDir+'/etc/org.apache.karaf.features.cfg.new' if os.path.exists( newFeaturesFileName ): os.remove( newFeaturesFileName ) currentFeatures = [] currentRepos = [] extensionList = [] os.chdir(karafHomeDir + '/etc/bvc-extensions' ) for file in glob.glob("*"): extensionList.append(file) extFile = open(file) for line in extFile: if 'featuresBoot' in line: featuresExt = line.split("=") features = featuresExt[1] features = features.replace(' ','').replace('\n','') features = features.split(",") for feature in features: if not feature in extensionFeatures: extensionFeatures.append(feature) else: logMessage( 'already have feature: '+ feature) elif 'featuresRepositories =' in line: reposExt = line.split("=") repos = reposExt[1] repos = repos.replace(' ','').replace('\n','') repos = repos.split(",") for repo in repos: if not repo in extensionRepos: extensionRepos.append(repo) else: logMessage( 'already have repo: '+ repo) extFile.close() os.chdir( karafHomeDir ) newFile = open (newFeaturesFileName,'w') with open( featuresFileName ) as cfgfile: for line in cfgfile: if 'featuresBoot=' in line: newFile.write( '\nfeaturesBoot= ' ) baseFeatures = line.split("=") baseFeatures = baseFeatures[1] baseFeatures = baseFeatures.split(",") for feat in baseFeatures: feat = feat.replace('\n','') feat = feat.replace(' ','') currentFeatures.append(feat) for feat in extensionFeatures: if not feat in currentFeatures: currentFeatures.append(feat) comma = False for feature in currentFeatures: if comma == True: newFile.write( ',' ) newFile.write( feature ) comma = True newFile.write( '\n' ) elif 'featuresRepositories =' in line: newFile.write( '\nfeaturesRepositories = ' ) baseRepos = line.split("=") baseRepos = baseRepos[1] baseRepos = baseRepos.split(",") for repo in baseRepos: repo = repo.replace('\n','') repo = repo.replace(' ','') currentRepos.append(repo) logMessage( str(extensionRepos)) for repo in extensionRepos: if not repo in currentRepos: currentRepos.append(repo) comma = False for repo in currentRepos: if comma == True: newFile.write( ',' ) newFile.write( repo ) comma = True newFile.write( '\n' ) else: newFile.write( line ) newFile.close( ) else: logMessage( 'No bvc-extensions to be installed') os.chdir( originalDir ) return extensionFeatures, extensionRepos, featuresFileName, newFeaturesFileName def installFeatures( karafInstallDir, extList, repoList ): print " Adding Repositories ... please wait ..." for repo in repoList: logMessage( ' Adding repo '+repo+' ...') cmd = [karafInstallDir+'/bin/client','-r','60','-d','5','feature:repo-add',repo] runCommand( cmd ) print " ................. [ OK ]" print " Installing Features ... please wait ..." for ext in extList: logMessage( ' Installing '+ext+' ...') cmd = [karafInstallDir+'/bin/client','-r','60','-d','5','feature:install',ext] runCommand( cmd ) print " ................. [ OK ]"
4bdf6c36144d93d815475944dc3cbd8d3e2cf2e5
[ "Markdown", "Python", "Dockerfile", "Shell" ]
20
Python
PlumpMath/bvcdocker
ca5b7642140a2ba40b2488ef4a4e771435157fca
c3713d1185b26b1808e239f9949a22089f04fd76
refs/heads/master
<file_sep># -*- coding: utf-8 -*- import xlsxwriter class HtmlOutputer(object): def save_to_excel(self, results, file_name): book = xlsxwriter.Workbook(R'E:/2018python/%s.xls' % file_name) tmp = book.add_worksheet() # row_num = len(results)+1 row = 0 col = 0 for version,Vtime,Vinfo in (results): tmp.write(row,col,version) tmp.write(row,col+1,Vtime) tmp.write(row,col+2,Vinfo) row += 1 book.close() <file_sep># -*- coding: utf-8 -*- from bs4 import BeautifulSoup import numpy as np class HtmlParser(object): def parse(self,html_content): if html_content is None: return None soup = BeautifulSoup(html_content) #获取到版本号和时间等信息 versionNums = soup.find_all('li',class_="sne") versionNumsArry = [] for VN in versionNums: res = self.get_versionNums(VN) versionNumsArry.append(res) #获取版本的详细信息 versionDetails = soup.find_all('div',class_="softqx") versionDetailsArry = [] for VD in versionDetails: det = self.get_versionDetails(VD) versionDetailsArry.append(det) versionInfoCon = np.hstack((versionNumsArry,versionDetailsArry)) # print(versionDetailsArry) versionAllInfo = [] for VI in versionInfoCon: VAI = [] VAI.append(VI[0]) VAI.append(VI[1]) VAI.append("|".join(VI[2])) versionAllInfo.append(VAI) # print(versionAllInfo) return versionAllInfo def get_versionNums(self,soup): #版本号 vnVersion = soup['title'] #更新时间等信息 vnInfo = soup.font.contents[0] return[vnVersion,vnInfo] def get_versionDetails(self,soup): vdDetailsResult = soup.find_all("p") vdDetailsArry = [] for vd in vdDetailsResult: vdstr = vd.get_text() vdDetailsArry.append(vdstr) # print(vdDetailsArry) return [vdDetailsArry] <file_sep># -*- coding: utf-8 -*- import html_downloader,html_parser,html_outputer # 程序的入口 if __name__ == "__main__": #想获取APP版本更新的数据地址 baseUrl = "https://soft.shouji.com.cn/down/18684.html" #下载网站数据 pageLoader = html_downloader.HtmlDownloader() pageContent = pageLoader.get_page(baseUrl) # 对下载数据进行拆分组合 pageParser = html_parser.HtmlParser() versionNumResult = pageParser.parse(pageContent) # 保存需要的数据到excel pageOutputer = html_outputer.HtmlOutputer() pageOutputer.save_to_excel(versionNumResult,'test1') <file_sep># -*- coding: utf-8 -*- import requests class HtmlDownloader(object): def get_page(self,baseUrl): try: #设置请求头,模拟浏览器访问 header = { 'User-Agent': r'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko)' r'Chrome/45.0.2454.85 Safari/537.36 115Browser/6.0.3', 'Referer': r'https://soft.shouji.com.cn/', 'Connection': 'keep-alive' } result = requests.get(baseUrl,headers=header) data = result.text.encode("latin1").decode("utf-8") # print(data) return data except Exception as err: print(err) print("获取数据失败") return None
5ad385fc40a43bf3991ac6d938d9c49059d429ac
[ "Python" ]
4
Python
henya/phoneLy
b1c932d1b5187f63687d764f34c3bc38ac7ca2f8
5185c7d76854c477b7acd7a155047c8747aacc16
refs/heads/master
<repo_name>kayoxu/app<file_sep>/src/main/java/com/kayo/app/handle/ExceptionHandle.java package com.kayo.app.handle; import com.kayo.app.bean.RetData; import com.kayo.app.exception.KayoException; import com.kayo.app.utils.RetDataUtil; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseBody; @ControllerAdvice public class ExceptionHandle { @ExceptionHandler(value = Exception.class) @ResponseBody public RetData handle(Exception e) { Integer state = 100; if (e instanceof KayoException) { state = ((KayoException) e).getState(); } return RetDataUtil.error(state, e.getMessage()); } } <file_sep>/src/main/resources/application.properties spring.jpa.database-platform=org.hibernate.dialect.MySQL5InnoDBDialect spring.datasource.driver-class-name=com.mysql.jdbc.Driver spring.datasource.url=jdbc:mysql://:3306/demo?characterEncoding=utf8&useSSL=false spring.datasource.username= spring.datasource.password= spring.jpa.hibernate.ddl-auto=update spring.jpa.show-sql=true <file_sep>/src/main/java/com/kayo/app/controller/UserController.java package com.kayo.app.controller; import com.kayo.app.bean.RetData; import com.kayo.app.bean.User; import com.kayo.app.repository.UserRepository; import com.kayo.app.service.UserService; import com.kayo.app.utils.RetDataUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.*; import javax.validation.Valid; import java.util.List; import java.util.Optional; @RestController @RequestMapping("/user") public class UserController { @Autowired UserRepository userRepository; @Autowired UserService userService; @GetMapping("/get") public RetData getUserList() { List<User> all = userRepository.findAll(); return RetDataUtil.success(all); } @GetMapping("/id/{id}") public Optional<User> getUserList(@PathVariable("id") Integer id) { return userRepository.findById(id); } @GetMapping("/name/{userName}") public List<User> getUserByName(@PathVariable("userName") String userName) { return userRepository.findByUserName(userName); } @GetMapping("/age/{age}") public List<User> getUserByAge(@PathVariable("age") Integer age) { return userRepository.findByAge(age); } @PostMapping("/save") public RetData saveUser(@Valid User user, BindingResult bindingResult) { Object obj = null; String msg = ""; Integer state = 0; if (bindingResult.hasErrors()) { msg = bindingResult.getFieldError().getDefaultMessage(); state = 1; return RetDataUtil.error(state, msg); } else { obj = userRepository.save(user); return RetDataUtil.success(obj); } } @PostMapping("/update") public RetData updateUser(User user) { User save = userRepository.save(user); return RetDataUtil.success(save); } @PostMapping("/delete") public void deleteUser(User user) { userRepository.delete(user); } @PostMapping("/two") public RetData userTwo() { List<User> users = userService.insertTwo(); return RetDataUtil.success(users); } @GetMapping("/size/{id}") public void userSize(@PathVariable("id") Integer id) throws Exception { userService.getAge(id); } }
ca157acb3c7216032fb0bcf0ae78d8091b981730
[ "Java", "INI" ]
3
Java
kayoxu/app
2d6132d66db874d59b41e56ae02e293364665db4
25111d6529d56acb1324b9a61d668f55eaad208c
refs/heads/master
<repo_name>nuxt-community/gooogle-analytics-module<file_sep>/docs/content/en/options.md --- title: Options description: Learn how to configure the Google Analytics module in Nuxt position: 3 category: Guide fullscreen: false --- To configure the module, you can use `googleAnalytics` section in `nuxt.config.js`. ```js{}[nuxt.config.js] export default { googleAnalytics: { // Options } } ``` <alert type="info"> `router` instance is added out of the box. You can refer [here](https://github.com/MatteoGabriele/vue-analytics/blob/master/docs/page-tracking.md#disable-page-auto-tracking) on to how to disable it if needed. </alert> ## Google Analytics options ### `id` * Type: `String` * **Required** The tracking ID of your Google Analytics account. It is required to have Google Analytics (GA) know which account and property to send the data to. ```js[nuxt.config.js] export default { googleAnalytics: { id: 'UA-XXX-X' } } ``` <alert type="info"> For backwards compatibilities, use `ua` instead. </alert> ### `asyncID` * Type: `Function` * Should return a `String` as tracking `id` for GA account. Allow an asynchronous function to load the `id` ```js[nuxt.config.js] export default { googleAnalytics: { asyncID: async (context) => { /* do something */ return 'UA-XXX-X' } } } ``` <alert type="warning"> If both `id` and `asyncID` are present, the returned value from `asyncID` will override the value of `id`. </alert> ### `debug` * Type: `Object` * `enabled`: `Boolean` - to enable the debug mode * `sendHitTask`: `Boolean` - to sent GA hits. Default is `false` for development mode. ### `dev` * Type: `Boolean` Turn on the development mode and disable the module. ### `checkDuplicatedScript` * Type: `Boolean` It will detect if any analytics script has been added in your HTML page. ```js[nuxt.config.js] export default { googleAnalytics: { checkDuplicatedScript: true } } ``` ### `disableScriptLoader` * Type: `Boolean` Disable the script loader ```js[nuxt.config.js] export default { googleAnalytics: { disableScriptLoader: true } } ``` <alert type="info"> For a full list of options, please see [Vue Analytics](https://matteogabriele.gitbooks.io/vue-analytics) documentation. </alert> <file_sep>/docs/nuxt.config.js import theme from '@nuxt/content-theme-docs' export default theme({ docs: { primaryColor: '#E37400' }, buildModules: ['nuxt-ackee'], ackee: { server: 'https://ackee.nuxtjs.com', domainId: '79213d80-7ac2-47c6-ba6b-0025a7a0ee35', detailed: true }, pwa: { manifest: { name: 'Nuxt Google Analytics' } } }) <file_sep>/lib/plugin.js import Vue from 'vue' import VueAnalytics from 'vue-analytics' export default async (ctx, inject) => { const runtimeConfig = ctx.$config && ctx.$config.googleAnalytics || {} const moduleOptions = <%= serialize(options) %> const options = {...moduleOptions, ...runtimeConfig} if (typeof options.asyncID === 'function') { options.id = await options.asyncID(ctx) } Vue.use(VueAnalytics, {...{ router: ctx.app.router }, ...options}) ctx.$ga = Vue.$ga inject('ga', Vue.$ga) } <file_sep>/docs/content/en/setup.md --- title: Setup description: Learn how to setup the Google Analytics module in Nuxt position: 2 category: Guide --- Check the [Nuxt.js documentation](https://nuxtjs.org/guides/configuration-glossary/configuration-modules) for more information about installing and using modules in Nuxt.js. ## Installation 1. Add `@nuxtjs/google-analytics` dependency to your project: <code-group> <code-block label="Yarn" active> ```bash yarn add --dev @nuxtjs/google-analytics ``` </code-block> <code-block label="NPM"> ```bash npm install --save-dev @nuxtjs/google-analytics ``` </code-block> </code-group> <alert type="warning"> If you are using Nuxt **< v2.9**, you have to install the module as `dependency` (**without** `--dev` or `--save-dev`) </alert> 2. Add `@nuxtjs/google-analytics` to the `buildModules` section of `nuxt.config.js`: ```js[nuxt.config.js] { buildModules: [ '@nuxtjs/google-analytics' ], } ``` <alert type="warning"> If you are using Nuxt **< v2.9**, you have to add it to `modules` section instead of `buildModules`. </alert> ### Configure Add `googleAnalytics` section in `nuxt.config.js` to set the module options: ```js[nuxt.config.js] export default { googleAnalytics: { // Options } } ``` Then pass your Google Analytics ID to `id` field of `googleAnalytics`: ```js[nuxt.config.js] export default { googleAnalytics: { id: 'UA-XXX-X' } } ``` <alert type="info"> `router` instance is added out of the box. You can refer [here](https://github.com/MatteoGabriele/vue-analytics/blob/master/docs/page-tracking.md#disable-page-auto-tracking) on to how to disable it if needed. </alert> ### Runtime Config You can use `publicRuntimeConfig`(from [runtime config](https://nuxtjs.org/guide/runtime-config)) to have dynamic environment variables available in production. Otherwise, the configuration options passed in `nuxt.config.js` will be read and hard-coded during the build once only. ```js[nuxt.config.js] export default { buildModules: [ '@nuxtjs/google-analytics' ], googleAnalytics: { id: process.env.GOOGLE_ANALYTICS_ID, // Use as fallback if no runtime config is provided }, publicRuntimeConfig: { googleAnalytics: { id: process.env.GOOGLE_ANALYTICS_ID } } } ``` <alert type="info"> For a full list of usage, refer to [Vue Analytics Documentation](https://matteogabriele.gitbooks.io/vue-analytics). </alert> <file_sep>/README.md [![@nuxtjs/google-analytics](https://google-analytics.nuxtjs.org/preview.png)](https://google-analytics.nuxtjs.org) # @nuxtjs/google-analytics [![npm version][npm-version-src]][npm-version-href] [![npm downloads][npm-downloads-src]][npm-downloads-href] [![Github Actions CI][github-actions-ci-src]][github-actions-ci-href] [![Codecov][codecov-src]][codecov-href] [![License][license-src]][license-href] > [Google Analytics](https://analytics.google.com/analytics/web/) integration for [Nuxt](https://nuxtjs.org) using [vue-analytics](https://github.com/MatteoGabriele/vue-analytics). - [✨ &nbsp;Release Notes](./CHANGELOG.md) - [📖 &nbsp;Documentation](https://google-analytics.nuxtjs.org) ## Features - Automatic page tracking - Event batching - User timings - Screen view - Multiple domain ID - Automatic Google Analytics script loading - E-commerce support [📖 &nbsp;Read more](https://google-analytics.nuxtjs.org) ## Contributing 1. Clone this repository 2. Install dependencies using `yarn install` or `npm install` 3. Start development server using `yarn dev` or `npm run dev` ## License [MIT License](./LICENSE) Copyright (c) Nuxt Community <!-- Badges --> [npm-version-src]: https://img.shields.io/npm/v/@nuxtjs/google-analytics/latest.svg [npm-version-href]: https://npmjs.com/package/@nuxtjs/google-analytics [npm-downloads-src]: https://img.shields.io/npm/dm/@nuxtjs/google-analytics.svg [npm-downloads-href]: https://npmjs.com/package/@nuxtjs/google-analytics [github-actions-ci-src]: https://github.com/nuxt-community/analytics-module/workflows/ci/badge.svg [github-actions-ci-href]: https://github.com/nuxt-community/analytics-module/actions?query=workflow%3Aci [codecov-src]: https://img.shields.io/codecov/c/github/nuxt-community/analytics-module.svg [codecov-href]: https://codecov.io/gh/nuxt-community/analytics-module [license-src]: https://img.shields.io/npm/l/@nuxtjs/google-analytics.svg [license-href]: https://npmjs.com/package/@nuxtjs/google-analytics <file_sep>/docs/content/en/usage/event-tracking.md --- title: Event tracking description: Learn how to send event to Google Analytics with the Nuxt module position: 5 category: Usage --- This module injects `$ga` instance globally. You can access the instance anywhere using: - `this.$ga` within a component - `context.$ga` for plugins, `asyncData`, `nuxtServerInit` and [middleware](https://nuxtjs.org/guides/directory-structure/middleware) ## `event()` The received parameters can be: * Event `Object` contains: * `eventCategory` - the object that user interacted with. * Type: `String` * `required` * `eventAction` - the type of interaction (`click`, `play`, etc.) * Type: `String` * `required` * `eventLabel` - for categorizing events * Type: `String` * `eventValue` - a numberic value associated with the event * Type: `Number` ```js this.$ga.event({ eventCategory: 'category', eventAction: 'action', eventLabel: 'label', eventValue: 123 }) ``` Or the Event's information can be spread directly as separate arguments, in the exact order ```js event(eventCategory, eventAction, eventLabel, eventValue) ``` For example: ```js this.$ga.event('category', 'action', 'label', 123) ``` <file_sep>/docs/content/en/usage/page-tracking.md --- title: Page tracking description: Learn how to track a page in Google Analytics with the Nuxt module position: 4 category: Usage --- This module injects `$ga` instance globally. You can access the instance anywhere using `this.$ga` (within a component), or `context.$ga` (for plugins, `asyncData`, `fetch`, `nuxtServerInit` ,and middleware) ## Automatic page tracking Since `router` instance is added out of the box during installation of the module, it will handle page tracking automatically for you. ## Manual page tracking ### `page(options)` * Type: `String` | `Object` | `VueRouter instance` Track and send event on a single page. <alert type="info"> You can read more about page tracking on [Google Analytics Documentation](https://developers.google.com/analytics/devguides/collection/analyticsjs/pages) </alert> The most standard is to pass the page path: ```js this.$ga.page('/') ``` Or to pass an `Object` containing the page details: ```js this.ga.page({ page: '/', title: 'Home page', location: window.location.href }) ``` Or to pass a `VueRouter` instance existing in the component. The module will auto-detect related information about the page. ```js this.$ga.page(this.$router) ``` <alert type="info"> For other page tracking setting up options, please refer [Vue Analytics Documentation - Page tracking](https://matteogabriele.gitbooks.io/vue-analytics/content/docs/page-tracking.html) </alert> <file_sep>/docs/content/en/usage/time-tracking.md --- title: User timings tracking description: Learn how to track and measure user interactive time with the Google Analytics module for Nuxt position: 7 category: Usage --- This module injects `$ga` instance globally. You can access the instance anywhere using `this.$ga` (within a component), or `context.$ga` (for plugins, `asyncData`, `fetch`, `nuxtServerInit` ,and middleware) ## `time()` The received parameters can be: * Event `Object` contains: * `timingCategory` - for categorizing all user timing variables into logical groups * Type: `String` * `required` * `timingVar` - identify the variable being recorded ('load' for instance) * Type: `String` * `required` * `timingValue` - number of milliseconds in elapsed time to report * Type: `Number` * `required` * `timingLabel` - used to add flexibility in visualizing user timings in the reports * Type: `String` ```js this.$ga.time({ timingCategory: 'category', timingVar: 'variable', timingValue: 123, timingLabel: 'label' }) ``` Or the Event's information can be spread directly as separate arguments, in the exact order ```js event(timingCategory, timingVar, timingValue, timingLabel) ``` For example: ```js this.$ga.time('category', 'variable', 123, 'label') ``` <file_sep>/CHANGELOG.md # Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. ## [2.4.0](https://github.com/nuxt-community/analytics-module/compare/v2.3.0...v2.4.0) (2020-07-16) ### Features * **plugin:** support runtimeConfig ([#81](https://github.com/nuxt-community/analytics-module/issues/81)) ([8248e60](https://github.com/nuxt-community/analytics-module/commit/8248e602d881e2451c5b3e07f12acd99628c8260)) ## [2.3.0](https://github.com/nuxt-community/analytics-module/compare/v2.2.3...v2.3.0) (2020-05-19) ### Features * typescript typings ([#78](https://github.com/nuxt-community/analytics-module/issues/78)) ([5c75f37](https://github.com/nuxt-community/analytics-module/commit/5c75f37ae97d0fa4beb58d9a58befb224b38b2fc)) ### [2.2.3](https://github.com/nuxt-community/analytics-module/compare/v2.2.2...v2.2.3) (2020-01-10) ### [2.2.2](https://github.com/nuxt-community/analytics-module/compare/v2.2.1...v2.2.2) (2019-12-03) ### [2.2.1](https://github.com/nuxt-community/analytics-module/compare/v2.2.0...v2.2.1) (2019-10-25) ### Bug Fixes * send hits in production mode by default ([#60](https://github.com/nuxt-community/analytics-module/issues/60)) ([0871f84](https://github.com/nuxt-community/analytics-module/commit/0871f848051ce8f5646493552c405b0c4dd44e0a)) <a name="2.2.0"></a> # [2.2.0](https://github.com/nuxt-community/analytics-module/compare/v2.1.0...v2.2.0) (2019-03-08) ### Features * **module:** inject $ga in ctx ([#40](https://github.com/nuxt-community/analytics-module/issues/40)) ([2c42bf3](https://github.com/nuxt-community/analytics-module/commit/2c42bf3)) * **plugin:** support id as a function (asyncID) ([#43](https://github.com/nuxt-community/analytics-module/issues/43)) ([6b6809e](https://github.com/nuxt-community/analytics-module/commit/6b6809e)) <a name="2.1.0"></a> # [2.1.0](https://github.com/nuxt-community/analytics-module/compare/v2.0.4...v2.1.0) (2019-02-17) ### Bug Fixes * default dev option to true to prevent breaking change ([860a27c](https://github.com/nuxt-community/analytics-module/commit/860a27c)) ### Features * support `googleAnalytics` in `nuxt.config` ([bda825e](https://github.com/nuxt-community/analytics-module/commit/bda825e)) <a name="2.0.4"></a> ## [2.0.4](https://github.com/nuxt-community/analytics-module/compare/v2.0.3...v2.0.4) (2019-02-17) ### Bug Fixes * disable on dev unless specified ([63e22e5](https://github.com/nuxt-community/analytics-module/commit/63e22e5)) <a name="2.0.3"></a> ## [2.0.3](https://github.com/nuxt-community/analytics-module/compare/v2.0.2...v2.0.3) (2019-02-08) <a name="2.0.2"></a> ## [2.0.2](https://github.com/nuxt-community/analytics-module/compare/v2.0.1...v2.0.2) (2017-11-24) <a name="2.0.1"></a> ## [2.0.1](https://github.com/nuxt-community/analytics-module/compare/2.0.0...2.0.1) (2017-10-27) <file_sep>/docs/content/en/usage/screen-tracking.md --- title: Screen tracking description: Learn how to track screen views using the Google Analytics module for Nuxt position: 6 category: Usage --- This module injects `$ga` instance globally. You can access the instance anywhere using `this.$ga` (within a component), or `context.$ga` (for plugins, `asyncData`, `fetch`, `nuxtServerInit` ,and middleware) ## `screenview(options)` * `options` * Type: `String` | `Object` * `required` * The screen view event of a component Track the screen hits of a page or a component. You can pass a string as the `screenName` property for the screenview event sent to GA. ```js this.$ga.screenview('home') ``` Or you can pass the event object with customized fields ```js this.$ga.screenview({ screenName: 'home', ... // other properties }) ``` <alert type="info"> You can turn on auto-tracking for screen view by adding `screenview` property to `autoTracking` field in `googleAnalytics` section. ```js[nuxt.config.js] export { googleAnalytics: { id: 'UA-XXX-X', autoTracking: { screenview: true } } } ``` </alert> <file_sep>/docs/content/en/index.md --- title: Introduction description: The Google Analytics module for Nuxt position: 1 category: '' features: - Automatic page tracking - Event batching - User timings - Screen view - Multiple domain ID - Automatic Google Analytics script loading - E-commerce support --- <img src="/preview.png" class="light-img" width="1280" height="640" alt=""/> <img src="/preview-dark.png" class="dark-img" width="1280" height="640" alt=""/> [Google Analytics](https://analytics.google.com/analytics/web/) integration for [Nuxt](https://nuxtjs.org) using [vue-analytics](https://github.com/MatteoGabriele/vue-analytics). Track the visitors to your sites and applications, measure your ROI and provide in-depth analysis details about your visitors' behaviors. ## Features <list :items="features"></list> <p class="flex items-center">Enjoy light and dark mode:&nbsp;<app-color-switcher class="inline-flex ml-2"></app-color-switcher></p>
a4d8fa67d91a229fbf3f2f89f3640a0031f7da83
[ "Markdown", "JavaScript" ]
11
Markdown
nuxt-community/gooogle-analytics-module
e01065d25ccd1e5096656ee89619823cc807afd7
0d58624e010bf4d3e81f451ea045712fbf01b7e4
refs/heads/master
<file_sep> import {RouterModule,Routes} from '@angular/router'; import { HomeComponent } from './components/home/home.component'; import { AboutComponent } from './components/about/about.component'; import { HeroesComponent } from './components/heroes/heroes.component'; import { HeroeComponent } from './components/heroe/heroe.component'; import { ResultadosComponent } from './components/resultados/resultados.component'; const APP_ROUTES: Routes = [ {path:'home', component:HomeComponent}, {path:'about', component:AboutComponent}, {path:'heroes', component:HeroesComponent}, {path:'heroe/:id', component:HeroeComponent}, {path:'resultados/:termino', component:ResultadosComponent}, {path:'**', pathMatch:'full', redirectTo:'home'} ]; export const APP_ROUTING = RouterModule.forRoot(APP_ROUTES); <file_sep>import { Component, OnInit } from '@angular/core'; import { Router, ActivatedRoute, } from '@angular/router'; import { HeroesService } from '../../services/heroes.service'; @Component({ selector: 'app-resultados', templateUrl: './resultados.component.html' }) export class ResultadosComponent implements OnInit { heroes:any[]; termino:string; constructor(private activatedRouter:ActivatedRoute, private _heroesService:HeroesService, private router:Router) { } ngOnInit() { this.activatedRouter.params.subscribe( params =>{ this.termino =params['termino']; this.heroes = this._heroesService.buscarHeroes( params['termino'] ); }) } verHeroe( idx:number ){ this.router.navigate( ['/heroe',idx] ); } }
941d707cbf27b2dfb88bf2cbb568e54b0f974cc4
[ "TypeScript" ]
2
TypeScript
EnriqueMB3/HeroApp
aae171c0e312d99e2f253853321a9c5e91e30c0a
ac2fb42923a10b5a146a9028a216083a5418ed43
refs/heads/master
<file_sep>#include "thread.h" Thread::Thread(QString com, QObject *parent) :QThread(parent) { myCom = new Posix_QextSerialPort(com, QextSerialBase::Polling); myCom->open(QIODevice::ReadWrite); stopped = 1; myCom->setBaudRate(BAUD9600); myCom->setDataBits(DATA_8); myCom->setParity(PAR_NONE); myCom->setStopBits(STOP_1); myCom->setFlowControl(FLOW_OFF); myCom->setTimeout(100); } Thread::~Thread() { } void Thread::run() { while(stopped) { //msleep(5000); QByteArray temp = myCom->readAll(); //if(temp.size() == 8) //{ emit this->serialFinished(temp); // } } } void Thread::stopThread() { stopped = 0; } <file_sep>#include <iostream> #include <string> #include <fstream> #include "weatherobj.h" using namespace std; void weatherobj::print() { cout<<"zthreshold = "<<zthreshold<<endl; cout<<"cthreshold = "<<cthreshold<< endl; cout<<"threshold = "<<threshold<<endl; cout<<"thresholdline = "<<thresholdline<< endl; cout<<"trackbar_alpha = "<<trackbar_alpha<< endl; } weatherobj::weatherobj() { } weatherobj::weatherobj(int z,int ct,int t,int l,int a) { zthreshold=z; cthreshold=ct; threshold=t; thresholdline=l; trackbar_alpha=a; } int weatherobj::getz(){ return zthreshold; } int weatherobj::getct(){ return cthreshold; } int weatherobj::gett(){ return threshold; } int weatherobj::getl(){ return thresholdline; } int weatherobj::geta(){ return trackbar_alpha; } <file_sep>#ifndef COM_H #define COM_H #include <QCom> #include <posix_qextserialport.h> #include <QTimer> namespace Ui { class Com; } class Com : public QCom { Q_OBJECT public: explicit Com(QWidget *parent = 0); ~Com(); private: Ui::Com *ui; Posix_QextSerialPort *myCom; QTimer *readTimer; private slots: void on_pushButton_clicked(); void readMyCom(); }; #endif // COM_H <file_sep>#ifndef Thread_H #define Thread_H #include <QThread> #include <QString> #include "posix_qextserialport.h" #include "qextserialbase.h" class Thread : public QThread { Q_OBJECT public: Thread(QString com, QObject *parent); ~Thread(); void run(); void stopThread(); signals: void serialFinished(QByteArray temp); private: Posix_QextSerialPort *myCom; int stopped; }; #endif // Thread_H <file_sep>#ifndef DLPY_FRAMES_H #define DLPY_FRAMES_H #include <QWidget> #include "opencv2/imgproc/imgproc.hpp" #include "opencv2/highgui/highgui.hpp" #include <string.h> #include <QTimer> #include <QPaintEvent> #include <QPainter> #include <QGraphicsScene> #include "utils.h" namespace Ui { class dlpy_frames; } class dlpy_frames : public QWidget { Q_OBJECT public: dlpy_frames(int state,string filename,QWidget *parent = 0); ~dlpy_frames(); stdline lineA_l,lineA_r,lineB,lineC,lineD,lineE; //存左右A线的变量。绿线 signals: void setalertsignal(int alert); public slots: void setparaslot(int z,int c,int t,int l,int a); void setlineslot(int l); protected: void changeEvent(QEvent *e); private slots: void nextFrame(); private: Ui::dlpy_frames *ui; cv::VideoCapture captRefrnc; //读取视频类 cv::Mat frameReference; //创建mat,用于保存图像内容 cv::VideoWriter writer; //将图片序列保存成视频文件 QGraphicsScene *scene; std::string Reference; QImage *qimage; QTimer *timer; stdline line_l,line_r,line_mid; //识别出来的左右线。红线 Mat img1,img2,img3,img3_l,img3_r,mask_image_l,mask_image_r,img_alert,img_alert_1,img_alert_2,img_temp; Rect roi_l,roi_r; vector<Vec4i> vec1,vec2; Point mask_points[2][4],s_point; static const double PI=3.1415926; // 阈值化(零)【zthreshold_value】:主要在opencv中的threshold函数使用,好像是去噪(具体请查opencv函数) // 对比度【alpha_value】:增加或降低对比度,为了突出图像用的。 //粒子筛选【cthreshold_value】:也是起到突出图像的作用,用的opencv中的Canny函数(具体请查opencv函数) //边缘检测粗细度【threshold_value】:是在HoughLinesP函数中用到,起到符合线段的识别作用(具体请查opencv函数) //线段长度筛选【thresholdline_value】:也是在HoughLinesP函数中用到,起到显示筛选最短的线段的用(具体请查opencv函数) int zthreshold_value,threshold_value,cthreshold_value,thresholdline_value,alpha_value,times,flag; Mat drawmaxline(Mat src,vector<Vec4i> lines,int whl); //下面是处理路面倾斜的判断函数. // 带有2L的函数是根据两线处理判断,1L是根据单线处理判断 void compareall(); void compare2L_mid(stdline linemid,Point pmid,int py); int compare2L(stdline a,stdline b,int alerter,int py); void compare1L(stdline a,stdline b,int alerter,int py); }; #endif // DLPY_FRAMES_H <file_sep>#include <QApplication> #include "dlpy_main.h" int main(int argc, char *argv[]) { QApplication a(argc, argv); // if(argc==3&&(string)argv[1]=="-r"){ // std::cout<<"read"<<std::endl; // dlpy_main *w=new dlpy_main(1,(string)argv[2]); // w->show(); // }else if(argc==3&&(string)argv[1]=="-w"){ // dlpy_main *w=new dlpy_main(2,(string)argv[2]); // w->show(); // }else{ // std::cout<<"example: ./dlpy_qt -r xxx.avi"<<std::endl; // return 0; // } // sensor_data *data = new sensor_data(); // data->show(); // widget *wid = new widget(); // wid->show(); dlpy_main *w = new dlpy_main(1,"/home/hugo/code/bin/test12.avi"); w->setWindowFlags(Qt::FramelessWindowHint); w->show(); return a.exec(); } <file_sep>#include "dlpy_frames.h" #include "ui_dlpy_frames.h" dlpy_frames::dlpy_frames(int state,string filename,QWidget *parent) : QWidget(parent), ui(new Ui::dlpy_frames) { setGeometry(0, 50, 640, 480); ui->setupUi(this); Reference=filename; //读取或保存的视频文件名 if(state==2){ //摄像头模式 flag=2; captRefrnc=cv::VideoCapture(0); captRefrnc.set(CV_CAP_PROP_FRAME_WIDTH,640); //视频流的帧宽度 captRefrnc.set(CV_CAP_PROP_FRAME_HEIGHT,480); //视频流的帧高度 captRefrnc.set(CV_CAP_PROP_FPS,30); //帧率 writer=VideoWriter(Reference, CV_FOURCC('P', 'I', 'M', '1'), 30.0, Size(640, 480)); //将摄像头获取的图像转换成avi格式视频 }else if(state==1){ //读取视频模式 flag=1; captRefrnc=cv::VideoCapture(Reference); //打开视频文件 }else{ exit(-1); } //检测视频是否正常打开:成功打开时,isOpened返回true if (captRefrnc.isOpened()) { captRefrnc.read(frameReference); this->setFixedSize(frameReference.cols+6,frameReference.rows+6); //设置初始化环境参数值 zthreshold_value = 10; //域化值 threshold_value = 0; //粒子筛选 cthreshold_value = 60; //边缘检测粗细度 thresholdline_value = 42; //线段长度筛选 alpha_value = 100; //对比度 times=0; mask_image_l = Mat::zeros(frameReference.size(),CV_8UC1); mask_image_r = Mat::zeros(frameReference.size(),CV_8UC1); //右侧车道红线? mask_points[0][0]=Point((frameReference.cols)/2-1,(frameReference.rows)*13/20-1); mask_points[0][1]=Point((frameReference.cols)*50/120-1,(frameReference.rows)*13/20-1); mask_points[0][2]=Point(frameReference.cols*5/160-1,(frameReference.rows)*55/60-1); mask_points[0][3]=Point((frameReference.cols)*35/120-1,(frameReference.rows)*45/50-1); mask_points[0][4]=Point((frameReference.cols)/2-1,(frameReference.rows)*13/20-1); //左侧车道红线? mask_points[1][0]=Point((frameReference.cols)/2,(frameReference.rows)*13/20-1); mask_points[1][1]=Point((frameReference.cols)*70/120,(frameReference.rows)*13/20-1); mask_points[1][2]=Point(frameReference.cols*155/160,(frameReference.rows)*55/60-1); mask_points[1][3]=Point((frameReference.cols)*85/120,(frameReference.rows)*45/50-1); mask_points[1][4]=Point((frameReference.cols)/2,(frameReference.rows)*13/20-1); const Point* ppt1[1] = {mask_points[0]}; const Point* ppt2[1] = {mask_points[1]}; int npt[] = {5,5}; /** * @brief fillPoly * 多边形被画到mask_image_l上 * 多边形的顶点集为ppt1 * 要绘制的多边形顶点数目为npt * 要绘制的多边形数量为1 * 多边形颜色定义为Scalar(255) */ fillPoly(mask_image_l,ppt1,npt,1,Scalar(255),8); //用指定颜色填充指定闭合的多边形 fillPoly(mask_image_r,ppt2,npt,1,Scalar(255),8); //scalar(B,G,R) //上面这段代码是图形预处理工作(定义图类型的局部变量等),调用的都是opencv内置函数,具体每句代码的含义,请查阅opencv函数. this->qimage=new QImage; *qimage=mat2qimage(frameReference); timer = new QTimer(this); timer->setInterval(60); //打开下一帧图像的时间间隔 connect(timer,SIGNAL(timeout()),this,SLOT(nextFrame())); timer->start(); } } dlpy_frames::~dlpy_frames() { delete ui; } void dlpy_frames::changeEvent(QEvent *e) { QWidget::changeEvent(e); switch (e->type()) { case QEvent::LanguageChange: //改变语言 ui->retranslateUi(this); break; default: break; } } //下一帧图像 void dlpy_frames::nextFrame() { this->scene = new QGraphicsScene; captRefrnc.read(frameReference); if(flag==2)writer << frameReference; //将摄像头拍摄的图片保存为视频文件 img1= frameReference; img1.copyTo(img3); img2=pre(img1,alpha_value,zthreshold_value,cthreshold_value); //对当前图像进行预处理 img3_l = Mat::zeros(img1.size(),CV_8UC1); img3_r = Mat::zeros(img1.size(),CV_8UC1); img2.copyTo(img3_l,mask_image_l); img2.copyTo(img3_r,mask_image_r); vec1=detline(img3_l,threshold_value,thresholdline_value); //threshold_value 符合线段的识别作用 vec2=detline(img3_r,threshold_value,thresholdline_value); //thresholdline_value 显示筛选最短线段的作用 img3=drawmaxline(img1,vec1,1); img3=drawmaxline(img3,vec2,2); compareall(); //路面倾斜??不是吧,道路偏移吧! //大概是通过判断绿色标线中线与车道中线位置来判断是否报警 *qimage=mat2qimage(img3); scene->addPixmap(QPixmap::fromImage(*qimage)); ui->graphicsView->setScene(scene); ui->graphicsView->resize(qimage->width()+6, qimage->height()+6); ui->graphicsView->show(); } //画最长线 Mat dlpy_frames::drawmaxline(Mat src,vector<Vec4i> lines,int whl) { Mat dst; Point mp1,mp2; src.copyTo(dst); double max=0; for( size_t i = 0; i < lines.size(); i++ ) { Vec4i l = lines[i]; double c=sqrt((l[0]-l[2])*(l[0]-l[2]) + (l[1]-l[3])*(l[1]-l[3])); if(c>=max){ mp1=Point(l[0], l[1]);mp2=Point(l[2], l[3]);max=c; } } if(max>0){ if(whl==1){ //左红线 line_l.x=mp1.x; line_l.y=mp1.y; line_l.k=getlineK(mp1.x,mp1.y,mp2.x,mp2.y); //斜率 line_l.b=getlineB(mp1.x,mp1.y,mp2.x,mp2.y); //截矩 line_l.x2=mp2.x; line_l.y2=mp2.y; line_l.ifcp=1; line(dst, Point(lineA_l.x, lineA_l.y), Point(240, getlineY(lineA_l,240)), Scalar(0,255,0), 2, CV_AA); //A线定位--左侧绿线 //line:画线 dst-要画线所在的图像,起点,终点, 颜色G=255, 粗细, } if(whl==2){ //右红线 line_r.x=mp1.x; line_r.y=mp1.y; line_r.k=getlineK(mp1.x,mp1.y,mp2.x,mp2.y); line_r.b=getlineB(mp1.x,mp1.y,mp2.x,mp2.y); line_r.x2=mp2.x; line_r.y2=mp2.y; line_r.ifcp=1; line(dst, Point(lineA_r.x, lineA_r.y), Point(getlineX(lineA_r,420), 420), Scalar(0,255,0), 2, CV_AA); //B线定位--右侧绿线 } line(dst, mp1, mp2, Scalar(0,0,255), 2, CV_AA); } return dst; } //没用处? /* int dlpy_frames::compare2L(stdline a,stdline b,int alerter,int py){ int rs=0; circle(img3,s_point,1,Scalar( 0, 255, 0 ),-1,8); double arc=atan((a.k-b.k)/(1+a.k*b.k))*180/PI; if(alerter==2){ } if(arc<-2||arc>2){ int tmpy=getlineY(b,s_point.x)+py; Point k_point=Point(s_point.x,tmpy); if(alerter==2){ if(tmpy>=(s_point.y+8)){emit setalertsignal(4);rs=1;} else if(tmpy>=(s_point.y+3)){emit setalertsignal(2);rs=1;} else{} circle(img3,k_point,1,Scalar( 255, 0, 0 ),-1,8); } if(alerter==1){ if(tmpy>=(s_point.y+8)){emit setalertsignal(3);rs=1;} else if(tmpy>=(s_point.y+3)){emit setalertsignal(1);rs=1;} else{} circle(img3,k_point,1,Scalar( 0, 0, 255 ),-1,8); } } return rs; } */ //比较一条车道线情况 void dlpy_frames::compare1L(stdline a,stdline b,int alerter,int py){ double arc=atan((a.k-b.k)/(1+a.k*b.k))*180/PI; //arctan? if(arc<-2||arc>2){ int tmpy=getlineY(b,s_point.x)+py; //py可能是控制误差? Point k_point=Point(s_point.x,tmpy); circle(img3,s_point,1,Scalar( 0, 255, 0 ),-1,8); circle(img3,k_point,1,Scalar( 255, 0, 0 ),-1,8); if(alerter==1){ if(tmpy<=(s_point.y-8)){emit setalertsignal(4);times=0;} else if(tmpy<=(s_point.y-3)&&times>2){emit setalertsignal(2);times=0;} else if(tmpy>=(s_point.y+8)){emit setalertsignal(3);times=0;} else if(tmpy>=(s_point.y+3)&&times>2){emit setalertsignal(1);times=0;} else{} } if(alerter==2){ if(tmpy<=(s_point.y-8)){emit setalertsignal(3);times=0;} else if(tmpy<=(s_point.y-3)&&times>2){emit setalertsignal(1);times=0;} else if(tmpy>=(s_point.y+8)){emit setalertsignal(4);times=0;} else if(tmpy>=(s_point.y+3)&&times>2){emit setalertsignal(2);times=0;} else{} } } times++; //times的作用? } //比较两条左右红线的中间线和两条绿线的交点 void dlpy_frames::compare2L_mid(stdline linemid,Point pmid,int py){ int tmpx=getlineX(linemid,pmid.y); //计算与绿线交点y坐标相同的红线中线上的x坐标 //通过比对同一y坐标下x坐标的距离,来判断报警程度 if(tmpx<pmid.x){ //红线中线x<绿线交点x 说明道路红线中线跑到车的左边=====>车压到了右线 if((tmpx+20)<pmid.x){emit setalertsignal(4);times=0;} // else if((tmpx+5)<pmid.x&&times>2){emit setalertsignal(2);times=0;} else{} } if(tmpx>pmid.x){ //压左线 if((tmpx-20)>pmid.x){emit setalertsignal(3);times=0;} else if((tmpx-5)>pmid.x&&times>2){emit setalertsignal(1);times=0;} else{} times++; } circle(img3,s_point,1,Scalar( 0, 255, 0 ),-1,8); //画出绿线交点 circle(img3,Point(tmpx,pmid.y),1,Scalar( 255, 0, 0 ),-1,8); //画出红线中线同一y坐标点 } //比较绿线和红线 void dlpy_frames::compareall(){ if(line_l.ifcp==1&&line_r.ifcp==1){ //左右红线都存在 line_mid=getmidline(line_l,line_r); //获取左右标线的中间线 Point c1=Point(line_mid.x,line_mid.y); //中间线起点 Point c2=Point(line_mid.x2,line_mid.y2); //中间线终点 Point cl1=getmidpoint(lineA_l.x,lineA_l.y,getlineX(lineA_r,lineA_l.y),lineA_l.y); //计算右侧绿线的X坐标,然后算出与左侧起点的中点 Point cl2=getmidpoint(lineA_l.x2,lineA_l.y2,getlineX(lineA_r,lineA_l.y2),lineA_l.y2); line(img3, c1, c2, Scalar(255,0,0), 2, CV_AA); //画出红线中间线 line(img3, cl1, cl2, Scalar(0,255,0), 2, CV_AA); //画出绿线中间线 compare2L_mid(line_mid,s_point,0); //比较左右标线的中间线和两条绿线的交点 } else if(line_l.ifcp==1&&line_r.ifcp==0){ //只存在左红线 compare1L(lineA_l,line_l,1,3); //比较左侧绿线、左侧红线、py=3 //py是啥意思? } else if(line_l.ifcp==0&&line_r.ifcp==1){ //只存在右红线 compare1L(lineA_r,line_r,2,3); //比较右侧绿线、右侧红线、py=3 } else{} line_l.ifcp=0; //为何要重新赋0? line_r.ifcp=0; } //设置环境参数 void dlpy_frames::setparaslot(int z,int c,int t,int l,int a){ zthreshold_value=z; cthreshold_value = c; threshold_value = t; thresholdline_value = l; alpha_value =a; } //将左右绿线直线各个参数设置为红线参数 //返回左右两条绿线的交点x、y坐标 void dlpy_frames::setlineslot(int l){ if(l==1) //l=1 A线定位 { lineA_l.x=line_l.x; lineA_l.y=line_l.y; lineA_l.x2=line_l.x2; lineA_l.y2=line_l.y2; lineA_l.k=line_l.k; lineA_l.b=line_l.b; lineA_r.x=line_r.x; lineA_r.y=line_r.y; lineA_r.x2=line_r.x2; lineA_r.y2=line_r.y2; lineA_r.k=line_r.k; lineA_r.b=line_r.b; s_point=getlinespoint(lineA_l,lineA_r); } } <file_sep># dlpy_qt ## 基于QT4+Opencv的道路道路偏移检测与预警系统 *开发环境:Ubuntu14.04+QT4.8.5+Opencv2.4.8* 已经实现的功能: - 道路偏移检测 - 道路偏移预警 - 串口读取外部传感器数据 <file_sep>#ifndef DLPY_MAIN_H #define DLPY_MAIN_H #include <QMainWindow> #include "dlpy_frames.h" #include "weatherobj.h" #include <fstream> #include <iostream> #include "widget.h" namespace Ui { class dlpy_main; } class dlpy_main : public QMainWindow { Q_OBJECT public: dlpy_main(int state,string filename,QWidget *parent = 0); ~dlpy_main(); dlpy_frames *frame; widget *wid; signals: void setparasignal(int z,int c,int t,int l,int a); void setlinesignal(int l); public slots: void setalertslot(int alert); protected: void changeEvent(QEvent *e); private: Ui::dlpy_main *ui; Mat img_alert,img_alert_1,img_alert_2,img_temp; Rect roi_l,roi_r; int times_alert,last_alert; QTimer *timer; weatherobj w0,w1,w2,w3,w4,w5,w6,w7; private slots: void on_button_exit_clicked(); void on_button_save_clicked(); void on_button_load_clicked(); void on_horizontalScrollBar_sliderReleased(); void on_button_A_clicked(); void alertFrame(); void on_hs_alpha_sliderReleased(); void on_hs_thresholdline_sliderReleased(); void on_hs_threshold_sliderReleased(); void on_hs_cthreshold_sliderReleased(); void on_hs_alpha_valueChanged(int value); void on_hs_thresholdline_valueChanged(int value); void on_hs_threshold_valueChanged(int value); void on_hs_zthreshold_sliderReleased(); void on_hs_cthreshold_valueChanged(int value); void on_hs_zthreshold_valueChanged(int value); void on_button_ext_clicked(); //void on_button_receive_clicked(); // void readMyCom(); }; #endif // DLPY_MAIN_H <file_sep>#include "dlpy_main.h" #include "ui_dlpy_main.h" #include "utils.h" dlpy_main::dlpy_main(int state,string filename,QWidget *parent) : QMainWindow(parent), ui(new Ui::dlpy_main) { ui->setupUi(this); setGeometry(697, 50, 220, 480); wid = new widget(); wid->setWindowFlags(Qt::FramelessWindowHint); wid->show(); frame = new dlpy_frames(state,filename); frame->setWindowFlags(Qt::FramelessWindowHint); //隐藏标题栏 frame->show(); connect(this,SIGNAL(setparasignal(int,int,int,int,int)),frame,SLOT(setparaslot(int,int,int,int,int))); connect(frame,SIGNAL(setalertsignal(int)),this,SLOT(setalertslot(int))); //每一帧图像信号槽初始化警报参数 connect(this,SIGNAL(setlinesignal(int)),frame,SLOT(setlineslot(int))); ///////////////////////////////////////////////////////////////////////////////////////////// //img_alert: 显示所有 //img_alert_1: 红色警报 //img_alert_3: 无警报 //img_temp:所有 //img:所有 //////////////////////////////////////////////////////////////////////////////////////////// img_alert = cv::imread("/home/hugo/code/bin/alert.bmp"); img_alert_1 = cv::imread("/home/hugo/code/bin/alert_2.bmp"); img_alert_2 = cv::imread("/home/hugo/code/bin/alert_3.bmp"); img_temp = cv::imread("/home/hugo/code/bin/alert.bmp"); roi_l=Rect(0, 0, (img_alert.cols-1)/2, img_alert.rows-1); roi_r=Rect((img_alert.cols-1)/2, 0, (img_alert.cols-1)/2, img_alert.rows-1); QGraphicsScene *scene = new QGraphicsScene; QImage img; img.load("/home/hugo/code/bin/alert.bmp"); QPixmap mp; mp=mp.fromImage(img); times_alert=0; last_alert=0; w0=weatherobj(10,20,30,40,50); w1=weatherobj(60,70,80,90,100); w2=weatherobj(100,110,120,130,140); w3=weatherobj(10,60,0,42,100); w4=weatherobj(150,160,170,180,190); w5=weatherobj(200,210,220,230,240); w6=weatherobj(10,25,10,32,90); w7=weatherobj(200,240,220,250,123); QImage *qimg=new QImage; *qimg=mat2qimage(img_temp); /*cvtColor(img_temp, img_temp, CV_BGR2RGB); qimg = new QImage((unsigned char*)img_temp.data, // uchar* data img_temp.cols, img_temp.rows, // width height img_temp.step, //bytesPerLine QImage::Format_RGB888); //format*/ scene->addPixmap(QPixmap::fromImage(*qimg)); ui->graphicsView->setScene(scene); ui->graphicsView->resize(qimg->width() + 10, qimg->height() + 10); ui->graphicsView->show(); timer = new QTimer(this); timer->setInterval(30); connect(timer,SIGNAL(timeout()),this,SLOT(alertFrame())); //每隔30s读取一次报警图片 timer->start(); } dlpy_main::~dlpy_main() { delete ui; } //设置警报参数 void dlpy_main::setalertslot(int alert){ times_alert=3; last_alert=alert; } void dlpy_main::changeEvent(QEvent *e) { QMainWindow::changeEvent(e); switch (e->type()) { case QEvent::LanguageChange: ui->retranslateUi(this); break; default: break; } } //警报显示 void dlpy_main::alertFrame() { Mat roi_alert,roi_alert_tmp,roi_temp; if(times_alert>0) { if(times_alert%2==0) { if(last_alert==1){roi_temp=img_temp(roi_l);roi_alert_tmp = img_alert_1(roi_l);} if(last_alert==2){roi_temp=img_temp(roi_r);roi_alert_tmp = img_alert_1(roi_r);} if(last_alert==3){roi_temp=img_temp(roi_l);roi_alert_tmp = img_alert_2(roi_l);} if(last_alert==4){roi_temp=img_temp(roi_r);roi_alert_tmp = img_alert_2(roi_r);} roi_alert_tmp.copyTo(roi_temp); }else{ img_alert.copyTo(img_temp); } times_alert--; //控制闪烁 QGraphicsScene *scene = new QGraphicsScene; QImage *qimg=new QImage; *qimg=mat2qimage(img_temp); scene->addPixmap(QPixmap::fromImage(*qimg)); ui->graphicsView->setScene(scene); ui->graphicsView->resize(qimg->width() + 10, qimg->height() + 10); ui->graphicsView->show(); }else{ } } void dlpy_main::on_hs_zthreshold_valueChanged(int value) { ui->label_z->setText(QString::number(ui->hs_zthreshold->value(), 10)); ui->label_c->setText(QString::number(ui->hs_cthreshold->value(), 10)); ui->label_t->setText(QString::number(ui->hs_threshold->value(), 10)); ui->label_tl->setText(QString::number(ui->hs_thresholdline->value(), 10)); ui->label_a->setText(QString::number(ui->hs_alpha->value(), 10)); } void dlpy_main::on_hs_cthreshold_valueChanged(int value) { ui->label_z->setText(QString::number(ui->hs_zthreshold->value(), 10)); ui->label_c->setText(QString::number(ui->hs_cthreshold->value(), 10)); ui->label_t->setText(QString::number(ui->hs_threshold->value(), 10)); ui->label_tl->setText(QString::number(ui->hs_thresholdline->value(), 10)); ui->label_a->setText(QString::number(ui->hs_alpha->value(), 10)); } void dlpy_main::on_hs_threshold_valueChanged(int value) { ui->label_z->setText(QString::number(ui->hs_zthreshold->value(), 10)); ui->label_c->setText(QString::number(ui->hs_cthreshold->value(), 10)); ui->label_t->setText(QString::number(ui->hs_threshold->value(), 10)); ui->label_tl->setText(QString::number(ui->hs_thresholdline->value(), 10)); ui->label_a->setText(QString::number(ui->hs_alpha->value(), 10)); } void dlpy_main::on_hs_thresholdline_valueChanged(int value) { ui->label_z->setText(QString::number(ui->hs_zthreshold->value(), 10)); ui->label_c->setText(QString::number(ui->hs_cthreshold->value(), 10)); ui->label_t->setText(QString::number(ui->hs_threshold->value(), 10)); ui->label_tl->setText(QString::number(ui->hs_thresholdline->value(), 10)); ui->label_a->setText(QString::number(ui->hs_alpha->value(), 10)); } void dlpy_main::on_hs_alpha_valueChanged(int value) { ui->label_z->setText(QString::number(ui->hs_zthreshold->value(), 10)); ui->label_c->setText(QString::number(ui->hs_cthreshold->value(), 10)); ui->label_t->setText(QString::number(ui->hs_threshold->value(), 10)); ui->label_tl->setText(QString::number(ui->hs_thresholdline->value(), 10)); ui->label_a->setText(QString::number(ui->hs_alpha->value(), 10)); } void dlpy_main::on_hs_zthreshold_sliderReleased() { emit setparasignal(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } void dlpy_main::on_hs_cthreshold_sliderReleased() { emit setparasignal(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } void dlpy_main::on_hs_threshold_sliderReleased() { emit setparasignal(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } void dlpy_main::on_hs_thresholdline_sliderReleased() { emit setparasignal(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } void dlpy_main::on_hs_alpha_sliderReleased() { emit setparasignal(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } //A线定位 void dlpy_main::on_button_A_clicked() { emit setlinesignal(1); } void dlpy_main::on_horizontalScrollBar_sliderReleased() { if(ui->horizontalScrollBar->value()==0){ ui->hs_zthreshold->setValue(w0.getz()); ui->hs_cthreshold->setValue(w0.getct()); ui->hs_threshold->setValue(w0.gett()); ui->hs_thresholdline->setValue(w0.getl()); ui->hs_alpha->setValue(w0.geta()); ui->label_w->setText(QString::fromUtf8("晴天")); } if(ui->horizontalScrollBar->value()==1){ ui->hs_zthreshold->setValue(w1.getz()); ui->hs_cthreshold->setValue(w1.getct()); ui->hs_threshold->setValue(w1.gett()); ui->hs_thresholdline->setValue(w1.getl()); ui->hs_alpha->setValue(w1.geta()); ui->label_w->setText(QString::fromUtf8("阴天")); } if(ui->horizontalScrollBar->value()==2){ ui->hs_zthreshold->setValue(w2.getz()); ui->hs_cthreshold->setValue(w2.getct()); ui->hs_threshold->setValue(w2.gett()); ui->hs_thresholdline->setValue(w2.getl()); ui->hs_alpha->setValue(w2.geta()); ui->label_w->setText(QString::fromUtf8("雨天")); } if(ui->horizontalScrollBar->value()==3){ ui->hs_zthreshold->setValue(w3.getz()); ui->hs_cthreshold->setValue(w3.getct()); ui->hs_threshold->setValue(w3.gett()); ui->hs_thresholdline->setValue(w3.getl()); ui->hs_alpha->setValue(w3.geta()); ui->label_w->setText(QString::fromUtf8("雪天")); } if(ui->horizontalScrollBar->value()==4){ ui->hs_zthreshold->setValue(w4.getz()); ui->hs_cthreshold->setValue(w4.getct()); ui->hs_threshold->setValue(w4.gett()); ui->hs_thresholdline->setValue(w4.getl()); ui->hs_alpha->setValue(w4.geta()); ui->label_w->setText(QString::fromUtf8("自定义1")); } if(ui->horizontalScrollBar->value()==5){ ui->hs_zthreshold->setValue(w5.getz()); ui->hs_cthreshold->setValue(w5.getct()); ui->hs_threshold->setValue(w5.gett()); ui->hs_thresholdline->setValue(w5.getl()); ui->hs_alpha->setValue(w5.geta()); ui->label_w->setText(QString::fromUtf8("自定义2")); } if(ui->horizontalScrollBar->value()==6){ ui->hs_zthreshold->setValue(w6.getz()); ui->hs_cthreshold->setValue(w6.getct()); ui->hs_threshold->setValue(w6.gett()); ui->hs_thresholdline->setValue(w6.getl()); ui->hs_alpha->setValue(w6.geta()); ui->label_w->setText(QString::fromUtf8("自定义3")); } if(ui->horizontalScrollBar->value()==7){ ui->hs_zthreshold->setValue(w7.getz()); ui->hs_cthreshold->setValue(w7.getct()); ui->hs_threshold->setValue(w7.gett()); ui->hs_thresholdline->setValue(w7.getl()); ui->hs_alpha->setValue(w7.geta()); ui->label_w->setText(QString::fromUtf8("自定义4")); } emit setparasignal(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } //载入保存的参数 void dlpy_main::on_button_load_clicked() { std::ifstream rconfig; rconfig.open("fconfig.txt",std::ios::binary); rconfig.read((char *)&w0,sizeof(weatherobj)); rconfig.read((char *)&w1,sizeof(weatherobj)); rconfig.read((char *)&w2,sizeof(weatherobj)); rconfig.read((char *)&w3,sizeof(weatherobj)); rconfig.read((char *)&w4,sizeof(weatherobj)); rconfig.read((char *)&w5,sizeof(weatherobj)); rconfig.read((char *)&w6,sizeof(weatherobj)); rconfig.read((char *)&w7,sizeof(weatherobj)); rconfig.read((char *)&(frame->lineA_l),sizeof(stdline)); rconfig.read((char *)&(frame->lineA_r),sizeof(stdline)); rconfig.close(); } //保存参数函数 void dlpy_main::on_button_save_clicked() { if(ui->horizontalScrollBar->value()==0){ w0=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } if(ui->horizontalScrollBar->value()==1){ w1=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } if(ui->horizontalScrollBar->value()==2){ w2=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } if(ui->horizontalScrollBar->value()==3){ w3=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } if(ui->horizontalScrollBar->value()==4){ w4=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } if(ui->horizontalScrollBar->value()==5){ w5=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } if(ui->horizontalScrollBar->value()==6){ w6=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } if(ui->horizontalScrollBar->value()==7){ w7=weatherobj(ui->hs_zthreshold->value(),ui->hs_cthreshold->value(),ui->hs_threshold->value(),ui->hs_thresholdline->value(),ui->hs_alpha->value()); } //将参数写到fconfig.txt文档中 std::ofstream wconfig; wconfig.open("fconfig.txt",std::ios::binary); wconfig.write((char *)&w0,sizeof(weatherobj)); wconfig.write((char *)&w1,sizeof(weatherobj)); wconfig.write((char *)&w2,sizeof(weatherobj)); wconfig.write((char *)&w3,sizeof(weatherobj)); wconfig.write((char *)&w4,sizeof(weatherobj)); wconfig.write((char *)&w5,sizeof(weatherobj)); wconfig.write((char *)&w6,sizeof(weatherobj)); wconfig.write((char *)&w7,sizeof(weatherobj)); wconfig.write((char *)&(frame->lineA_l),sizeof(stdline)); wconfig.write((char *)&(frame->lineA_r),sizeof(stdline)); wconfig.close(); } //退出函数 void dlpy_main::on_button_exit_clicked() { exit(-1); } //高级按钮点击事件 void dlpy_main::on_button_ext_clicked() { if(ui->button_ext->text()==QString::fromUtf8("高级")){ setGeometry(430, 50, 420, 480); ui->button_ext->setText(QString::fromUtf8("普通")); } else{ ui->button_ext->setText(QString::fromUtf8("高级")); setGeometry(697, 50, 220, 480); } } <file_sep>#include "utils.h" Mat rgb; //将mat格式图像转换为qimage格式图像 QImage mat2qimage(cv::Mat mat){ rgb = mat.clone(); QImage *img; if(mat.channels() == 3) // RGB image { cvtColor(mat,rgb,CV_BGR2RGB); img =new QImage((unsigned char*)(rgb.data), //(const unsigned char*) rgb.cols,rgb.rows, QImage::Format_RGB888); } else // gray image { img =new QImage((unsigned char*)(mat.data), mat.cols,mat.rows, QImage::Format_Indexed8); } return *img; } //图像预处理 Mat pre(Mat src,int alpha,int zthresholdv,int cthresholdv){ Mat dst, temp; Rect rect; double brightValue = 0.00; //灰度值 cvtColor(src, dst, CV_RGB2GRAY); //src转化为灰度图像dst //车道阈值选取 dst.copyTo(temp); //rect = Rect(0,(dst.rows - 1)/2, dst.cols, (dst.rows - 1) / 2); //截取下半部分图像,保证处理的是路面区域 //temp = dst(rect); /* for (int x = 1; x < temp.rows - 1; x++) { for (int y = 1; y < temp.cols - 1; y++) { //计算每一点像素的累加和 brightValue = brightValue + temp.at<uchar>(x,y); } } brightValue = brightValue /( temp.rows - 1) /( temp.cols - 1 ); //计算平均灰度值 // zthresholdv = brightValue; //将平均灰度值赋给阈值 */ IplImage *img; img = cvCreateImage(cvSize(temp.cols, temp.rows),8,1); //int thres = 0; zthresholdv = otsu(img); cout<<"阈值: "<<zthresholdv<<endl; /* //sobel算子 int scale = 1; int delta = 0; int ddepth = CV_16S; /// 创建 grad_x 和 grad_y 矩阵 Mat grad_x, grad_y; Mat abs_grad_x, abs_grad_y; /// 求 X方向梯度 //Scharr( src_gray, grad_x, ddepth, 1, 0, scale, delta, BORDER_DEFAULT ); Sobel( dst, grad_x, ddepth, 1, 0, 3, scale, delta, BORDER_DEFAULT ); convertScaleAbs( grad_x, abs_grad_x ); /// 求Y方向梯度 //Scharr( src_gray, grad_y, ddepth, 0, 1, scale, delta, BORDER_DEFAULT ); Sobel( dst, grad_y, ddepth, 0, 1, 3, scale, delta, BORDER_DEFAULT ); convertScaleAbs( grad_y, abs_grad_y ); /// 合并梯度(近似) addWeighted( abs_grad_x, 0.5, abs_grad_y, 0.5, 0, dst ); */ //灰度图像dst进行阈值操作得到二值图像 threshold(dst, dst, zthresholdv, 255,3); //二值化 /*convertTo()函数是源数组线性变换成目标数组,第二个参数为目标数组的类型*/ dst.convertTo(dst, -1, (double)(alpha)/100, 0); equalizeHist(dst, dst); //使灰度图像直方图均衡化 GaussianBlur(dst,dst,Size(3,3),0,0); //高斯滤波 //medianBlur(dst, dst,3); Canny(dst, dst, cthresholdv, cthresholdv*3,3); //边缘检测--检测轮廓 return dst; } //返回图像中的直线集合 vector<Vec4i> detline(Mat src,int thresholdv,int thresholdline) { vector<Vec4i> lines; //检测图像中的直线 //输入图像只能是边缘二值图像 HoughLinesP(src, lines, 1, CV_PI/180, thresholdv+1, thresholdline, 10 ); //输出检测到的图像端点(x0,y0,x1,y1) return lines; } //在图像上画线,并返回画线后图像 Mat drawlines(Mat src,vector<Vec4i> lines) { Mat dst; src.copyTo(dst); for( size_t i = 0; i < lines.size(); i++ ) { Vec4i l = lines[i]; line(dst, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0,0,255), 2, CV_AA); //画线 //dst--要画的线所在的图像,直线起点,直线终点,颜色(红色),线条粗细,antialiased线条 } return dst; } //是不是算错了? //根据一条直线以及另一条直线上一点y坐标计算x坐标 int getlineX(stdline line,int Y){ //double tmp=(((double)line.x*line.k-Y+line.y)/line.k); double tmp = (Y - line.y + (double)line.x * line.k) / line.k; //改正 return (int)(tmp+0.5); } //计算y坐标 int getlineY(stdline line,int X){ //double tmp=(-line.k*(X-line.x)+line.y); double tmp = line.y - line.k * (line.x - X); //改正 return (int)(tmp+0.5); } //返回两点斜率 double getlineK(double x1,double y1,double x2,double y2){ //return -(y1-y2)/(x1-x2); //有负号? return (y1-y2)/(x1-x2); } //返回y轴常量 double getlineB(double x1,double y1,double x2,double y2){ return (x1*y2-x2*y1)/(x1-x2); } //返回两点中间点x、y坐标 Point getmidpoint(double x1,double y1,double x2,double y2){ double tempx=(x1+x2)/2; double tempy=(y1+y2)/2; return Point((int)tempx, (int)tempy); } //返回两条直线交点的x、y坐标 Point getlinespoint(stdline line1,stdline line2){ double tempy=(line1.b*line2.k-line1.k*line2.b)/(line2.k-line1.k); double tempx=(line1.b-tempy)/line1.k; return Point((int)tempx, (int)tempy); } //返回两线中间线 stdline getmidline(stdline linel,stdline liner){ stdline line_mid_temp; Point c1=getmidpoint(linel.x,linel.y,getlineX(liner,linel.y),linel.y); //返回左右两条直线的相同y坐标点的中间点 Point c2=getmidpoint(liner.x,liner.y,getlineX(linel,liner.y),liner.y); line_mid_temp.x=c1.x; line_mid_temp.y=c1.y; line_mid_temp.x2=c2.x; line_mid_temp.y2=c2.y; line_mid_temp.k=getlineK(c1.x,c1.y,c2.x,c2.y); line_mid_temp.b=getlineB(c1.x,c1.y,c2.x,c2.y); return line_mid_temp; } int otsu(const IplImage *src_image) //大津法求阈值 { double sum = 0.0; double w0 = 0.0; double w1 = 0.0; double u0_temp = 0.0; double u1_temp = 0.0; double u0 = 0.0; double u1 = 0.0; double delta_temp = 0.0; double delta_max = 0.0; //src_image灰度级 int pixel_count[256]={0}; float pixel_pro[256]={0}; int threshold = 0; uchar* data = (uchar*)src_image->imageData; //统计每个灰度级中像素的个数 for(int i = 0; i < src_image->height; i++) { for(int j = 0;j < src_image->width;j++) { pixel_count[(int)data[i * src_image->width + j]]++; sum += (int)data[i * src_image->width + j]; } } cout<<"平均灰度:"<<sum / ( src_image->height * src_image->width )<<endl; //计算每个灰度级的像素数目占整幅图像的比例 for(int i = 0; i < 256; i++) { pixel_pro[i] = (float)pixel_count[i] / ( src_image->height * src_image->width ); } //遍历灰度级[0,255],寻找合适的threshold for(int i = 0; i < 256; i++) { w0 = w1 = u0_temp = u1_temp = u0 = u1 = delta_temp = 0; for(int j = 0; j < 256; j++) { if(j <= i) //背景部分 { w0 += pixel_pro[j]; u0_temp += j * pixel_pro[j]; } else //前景部分 { w1 += pixel_pro[j]; u1_temp += j * pixel_pro[j]; } } u0 = u0_temp / w0; u1 = u1_temp / w1; delta_temp = (float)(w0 *w1* pow((u0 - u1), 2)) ; if(delta_temp > delta_max) { delta_max = delta_temp; threshold = i; } } return threshold; } <file_sep>#include "widget.h" #include "ui_widget.h" widget::widget(QWidget *parent) : QWidget(parent), ui(new Ui::widget) { setGeometry(0, 0, 800, 25); ui->setupUi(this); th = NULL; } widget::~widget() { delete ui; } void widget::changeEvent(QEvent *e) { QWidget::changeEvent(e); switch (e->type()) { case QEvent::LanguageChange: ui->retranslateUi(this); break; default: break; } } void widget::on_pushButton_in_clicked() { // QString text = ui->comboBox->currentText(); th = new Thread("/dev/ttyACM0", this); th->start(); connect(th, SIGNAL(serialFinished(QByteArray)), this, SLOT(ReadData(QByteArray))); } void widget::ReadData(QByteArray temp) { ui->lineEdit_in->setText(temp); //ui->textBrowser->insertPlainText(tr("\n\n")); } void widget::closeEvent(QCloseEvent *event) { if(th != NULL) { th->stopThread(); th->wait(); } event->accept(); } <file_sep>#ifndef UTILS_H #define UTILS_H #include "opencv2/imgproc/imgproc.hpp" #include "opencv2/highgui/highgui.hpp" #include "qimage.h" #include "qpixmap.h" #include <iostream> using namespace cv; using namespace std; //下面是线的类型定义,以及线的图像处理函数(求线的x,y横纵坐标的,只在线的判断中使用的,和参数调节无任何关系) typedef struct _stdline { int x; int y; int x2; int y2; double k; double b; int ifcp; }stdline; QImage mat2qimage(Mat mat); Mat pre(Mat src,int alpha,int zthresholdv,int cthresholdv); vector<Vec4i> detline(Mat src,int thresholdv,int thresholdline); Mat drawlines(Mat src,vector<Vec4i> lines); int getlineX(stdline line,int Y); int getlineY(stdline line,int X); double getlineK(double x1,double y1,double x2,double y2); double getlineB(double x1,double y1,double x2,double y2); Point getmidpoint(double x1,double y1,double x2,double y2); Point getlinespoint(stdline line1,stdline line2); stdline getmidline(stdline linel,stdline liner); int otsu(const IplImage *src_image); //大津法求阈值 #endif // UTILS_H <file_sep>class weatherobj { private: int zthreshold; int cthreshold; int threshold; int thresholdline; int trackbar_alpha; public: void print(); weatherobj(); weatherobj(int z,int ct,int t,int l,int a); int getz(); int getct(); int gett(); int getl(); int geta(); };
124ad903bc6d70562b43a9151d907f79c095f407
[ "Markdown", "C++" ]
14
C++
hexiangquan/dlpy_qt
841c99be1564bd530f941b0ccb577264c5fb9fe7
85e1fb91b454f380ed35f04a0fbc2c87870ec680
refs/heads/master
<repo_name>danielhdz56/liri-node-app<file_sep>/commands.js const Twitter = require('twitter'); const Spotify = require('node-spotify-api'); const axios = require('axios'); const fs = require('fs'); // Packages made inhouse const keys = require('./keys.js'); // User based authentication var client = new Twitter(keys.twitterKeys); var spotify = new Spotify(keys.spotifyKeys); var runTwitter = () => { console.log('Retriving My Tweets'); console.log('-------------------'); var params = { count: 20 }; client.get('statuses/user_timeline', params, (error, tweets, response) => { if (!error) { tweets.forEach((tweet) => { console.log(tweet.created_at) console.log(tweet.text); console.log('-------------------'); }) } }); }; var runSpotify = (query) => { query = query ? query : 'The Sign Ace of Base'; console.log('Searching Through Spotify'); console.log('-------------------'); spotify.search({ type: 'track', query, limit: 5 }, function (err, data) { if (err) { return console.log('Error occurred: ' + err); } var spotifyObj = data.tracks.items; spotifyObj.forEach((song) => { // Ternary operators used for edge cases where property might be null/undefined console.log(`Artist: ${song.artists[0].name ? song.artists[0].name : 'Not Available'}`); console.log(`Song: ${song.name ? song.name : 'Not Available'}`); console.log(`Preview link: ${song.preview_url ? song.preview_url : 'Not Available'}`); console.log(`Album name: ${song.album.name ? song.album.name : 'Not Available'}`) console.log('--------------') }); }); }; var runOmdb = (query) => { query = query ? query : 'Mr. Nobody'; var encodedQuery = encodeURIComponent(query); //to account for spaces console.log('Searching Through OMDB'); console.log('-------------------'); var omdbUrl = `http://www.omdbapi.com/?i=tt3896198&apikey=40e9cece&t=${encodedQuery}`; axios.get(omdbUrl).then((response) => { var data = response.data; var ratings = data.Ratings; console.log(`Title: ${data.Title}\nYear: ${data.Year}\nWebsite: ${data.Website ? data.Website : 'Not Available'}`); console.log('**********'); ratings.forEach((rating) => { console.log(`Source: ${rating.Source}\nRating: ${rating.Value}`); console.log('--------------'); }); console.log('**********'); console.log(`Produced in: ${data.Country}\nLanguage: ${data.Language}`); console.log('**********'); var plot = data.Plot; plot = plot.replace(/\. /g, ".\n"); console.log(`Plot:\n${plot}`); console.log('**********'); var actors = data.Actors; actors = actors.replace(/\, /g, "\n"); console.log(`Actors:\n${actors}`); console.log('**********'); }).catch((e) => { //console.log(e) console.log(`From throw: ${e.message}`); }) }; var runDoWhatItSays = () => { var readFile = fs.readFileSync('random.txt', 'utf-8', (err, data) => { if (err) throw err; // escapes out if there is an error return data; }); return readFile; //this will pass the data inside of random.txt to doWhatItSays in liri.js }; var fetchLog = () => { try { //Will first try to see if there is such a file var logString = fs.readFileSync('log.txt'); return JSON.parse(logString); } catch (e) { return []; //if there is no file created then it will return an empty array } }; var saveLog = (logs) => { fs.writeFileSync('log.txt', JSON.stringify(logs)); //optimal to save files as a JSON, if later we read all logs }; var runAddLog = (command, query) => { //query is optional var today = new Date(); today = today.toString(); var logs = fetchLog(); //retrieve log to push new command/query var log = { command, query, date: today }; logs.push(log); saveLog(logs); return log; //this will run undefined if no log }; var runInvalidCommand = () => { console.log('Please make sure that you enter a valid command'); console.log('----------') console.log('To view the list of commands availabe type:\nnode liri.js --help'); } module.exports = { runTwitter, runSpotify, runOmdb, runDoWhatItSays, runAddLog, runInvalidCommand }<file_sep>/liri2.js const inquirer = require('inquirer'); const commands = require('./commands.js'); var question = [{ name: 'command', message: 'Which command do you want to run?', type: 'list', choices: [ 'my-tweets', 'spotify-this-song', 'movie-this', 'do-what-it-says' ] }, { name: 'query', message: 'Type in your search.', when: function (answers) { if (answers.command === 'spotify-this-song' || answers.command === 'movie-this') { return true; } } }]; var app = () => { inquirer.prompt(question).then(function (answers) { var command = answers.command; var query = answers.query; var runCommand = (command, query) => { if (command === 'my-tweets') { commands.runTwitter(); commands.runAddLog(command); } else if (command === 'spotify-this-song') { commands.runSpotify(query); commands.runAddLog(command, query); } else if (command === 'movie-this') { commands.runOmdb(query); commands.runAddLog(command, query); } else if (command === 'do-what-it-says') { var doWhatItSays = commands.runDoWhatItSays(); doWhatItSays = doWhatItSays.split('\n'); var randomCommand = Math.floor(Math.random()*doWhatItSays.length); doWhatItSays = doWhatItSays[randomCommand]; var doCommand = doWhatItSays.split(',')[0]; var doQuery = doWhatItSays.split(',')[1]; runCommand(doCommand, doQuery); commands.runAddLog(command); } }; runCommand(command, query); }); }; app();<file_sep>/README.md # liri-node-app ## Description A website that uses socket.io to enable real-time bidrectional event-based communication. * Clients can: * Load my twitter feed * Search for a song through Spotify * Search for a movie through Omdb * APIs Used: * Omdb * Spotify * Twitter ## Setup 1. Clone this repo using the command line ```shellSession git clone https://github.com/danielhdz56/liri-node-app.git ``` 2. Change directory and install packages using npm ```shellSession cd liri-node-app/ npm install ``` ![clone](/assets_readme/setup.gif?raw=true "Clone") ## How to Use version 1 ### Help Access help by using the `--help` or `-h` flag ```shellSession node app-promise.js --help ``` ![help](/assets_readme/help-liri-v1.gif?raw=true "Help") ### Possible entries Command | `node liri.js [inputCommandHere] [inputSearch]` :---: | :---: my-tweets | `node liri.js my-tweets` ![twitter](/assets_readme/tweets-v1.gif?raw=true "Twitter") spotify-this-song | `node liri.js spotify-this-song 'Tongue Tied'` ![spotify](/assets_readme/spotify-v1.gif?raw=true "Spotify") movie-this | `node liri.js movie-this 'Titanic'` ![omdb](/assets_readme/omdb-v1.gif?raw=true "Omdb") do-what-it-says | `node liri.js do-what-it-says` ![random](/assets_readme/random-v1.gif?raw=true "random") <file_sep>/keys.js exports.twitterKeys = { consumer_key: 'CEt0C8bsyQoTm09kzy6zbfm5Y', consumer_secret: '<KEY>', access_token_key: '<KEY>', access_token_secret: '<KEY>', }; exports.spotifyKeys = { id: '13c8eea72bec4c1d9f948866a9236932', secret: '3aea4f65b4844344a5974c25063e1d84' };
f8ccb80fdf27fdfdbdaacfec36a248be5643d66d
[ "JavaScript", "Markdown" ]
4
JavaScript
danielhdz56/liri-node-app
90b03d50f4979cdd21a3a7a52202ebc4c4cf6894
67191dc69a4524ffd2a9044f402db7a1a551e609
refs/heads/main
<file_sep>import React from 'react'; import {yes, no} from './objects.js' class Drake extends React.Component { state = { cardView: false } handleClick = () => { this.setState(prevState => { return { cardView: !prevState.cardView } }) } render() { return ( <div class="display"> {this.state.cardView ? (<h1>{yes["yes-statement"]}</h1>) : (<h1>{no["no-statement"]}</h1>)} {this.state.cardView ? (<img onClick={this.handleClick} alt="yes-drake" src={yes["yes-image"]}/>) : (<img onClick={this.handleClick} alt="no-drake" src={no["no-image"]}/>)} </div> ) } } export default Drake;
9b4aadd679ce4f3b9fb9ce046829e8fea18915ad
[ "JavaScript" ]
1
JavaScript
billyott/react-state-self-assessment-091420
e19c20c2a5e727b6e4b4e465181aeed688a856cb
9f44cf5bcd076f6314ed5181a750fefcb9777317
refs/heads/master
<file_sep>var Table = { number: 0, modifier: 0, choice: null, token: null, nextRoll: true, roll: function (){ return Math.floor( Math.random() * 10 ); }, render: function( roll ) { roll += this.modifier; roll = roll >= 0 ? roll : 0; roll = roll < Tables[this.number].length ? roll : Tables[this.number].length - 1; var result = Tables[this.number][roll]; this.token = result; if ( result.choice ) { this.renderChoice( result.choice ); } else { this.renderMessage( result.message ); } if( result.addToken ) { this.addToken(result.addToken); } if( result.advance ) { if( Tables.length > this.number + 1 ){ this.number += 1; } } }, addToken: function( token ) { var card = { attack: token.strength[0], toughness: token.strength[1], message: '', multi: false, type: token.type }; if( token.modify != null ){ this.modify( card, token.modify ); } }, modify: function( card, modify ){ var roll = this.roll() + modify + this.modifier; if (roll < 0) { roll = 0; } if ( roll >= Tokens[roll].length ) { roll = Tokens[roll].length - 1; } if ( Tokens[roll].message ) { var newLine = card.message == '' ? '' : '<br>'; card.message += newLine + Tokens[roll].message; if ( card.message.indexOf('{color}') > -1 ) { card.message = card.message.replace('{color}', this.getColor() ); } } if ( Tokens[roll].power ) { card.attack += Tokens[roll].power[0]; card.toughness += Tokens[roll].power[1]; } if ( Tokens[roll].additionalToken ) { card.multi = true; var mod = Tokens[roll].modifier == true ? modify : 0; for ( var i = 0; i < Tokens[roll].additionalToken; i++ ) { this.modify( card, mod ); } this.renderCard( card ); } if ( card.multi == false ){ this.renderCard( card ); } }, renderCard: function( card ) { console.log(card); $card = $('<div class="card ' + card.type +'"><span class="remove">remove</span><span class="tap">tap</span></div>'); $message = $('<p>' + card.message + '</p>'); $stats = ''; if ( card.attack ) { $stats = $('<span class="stats"><span>'+ card.attack +'</span>/<span>'+ card.toughness +'</span></span>'); } $card.append($stats).append($message); $('.cards').append($card); }, renderMessage: function( message ) { $message = $('<div class="message-wrapper"><div class="message"><p>' + message + '</p><button>OK</button></div></div>'); $('body').append($message); }, renderChoice: function( choice ) { this.choice = choice; $choice = $('<div class="choice-wrapper"><div class="choice"><button>' +choice[0].message+ '</button><button>' +choice[1].message+ '</button></div></div>'); $('body').append($choice); }, renderSpooky: function( modifier ) { var roll = this.roll() + modifier + this.modifier; if ( roll < 0 ) { roll = 0; } if ( roll >= Spooky.length ) { roll = Spooky.length - 1; } var spooky = Spooky[ roll ]; if ( spooky.nextRoll == false ) { this.nextRoll = false; this.renderMessage( spooky.message ); } else if (spooky.card) { this.renderCard( spooky.card ); } else if (spooky.message ) { this.renderMessage( spooky.message ); } if ( spooky.rollModifier ) { this.modifier += spooky.rollModifier; } }, getColor: function() { newRoll = this.roll(); switch(newRoll) { case 0: return 'black' break; case 1: return 'black' break; case 2: return 'black' break; case 3: return 'white' break; case 4: return 'white' break; case 5: return 'white' break; case 6: return 'red' break; case 7: return 'red' break; case 8: return 'blue' break; case 9: return 'green' break; } } }<file_sep>$(document).ready(function(){ $('#deep-iq').on('click', function(){ DeepIQ.takeTurn(); }); $(window).keypress(function(e){ if (e.which == 32) { DeepIQ.takeTurn(); } }); $('.cards').on('click', '.remove', function(){ $(this).closest('.card').remove(); }); $('.cards').on('click', '.tap', function(){ $(this).closest('.card').toggleClass('tapped'); }); $('body').on('click', '.message button', function(){ if ( Table.token != null && Table.token.spooky != undefined ) { console.log('render'); Table.renderSpooky(Table.token.spooky); Table.token = null; } if ( Table.token != null && Table.token.advanceDeepIQ ) { Table.number = Table.token.advanceDeepIQ; } $(this).closest('.message-wrapper').remove(); }); $('body').on('click', '.choice button', function(){ var index = $(this).index(); var decision = Table.choice[index]; if ( decision.spooky ) { Table.renderSpooky(decision.spooky); } else if ( decision.addToken ) { Table.addToken( decision.addToken ); } else if ( decision.message ) { Table.renderMessage ( decision.message ); } $(this).closest('.choice-wrapper').remove(); }); });<file_sep>var Tables = [ [//table 1 { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Sacrifice your best creature.', advance: 1, }, { message: 'Put a 1/1 token on the battlefield.', advance: 1, addToken: { type: 'creature', strength: [1, 1], modify: -4 } }, { message: 'Put a 1/1 token on the battlefield.', addToken: { type: 'creature', strength: [1, 1], modify: -4 } } ], [//table 2 { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Do nothing.', advance: 1, }, { message: 'Put a 2/2 token on the battlefield.', advance: 1, addToken: { type: 'creature', strength: [2, 2], modify: 0 } }, { message: 'Put a 2/2 token on the battlefield.', advance: 1, addToken: { type: 'creature', strength: [2, 2], modify: 0 } }, { message: 'Put a 2/2 token on the battlefield.', advance: 1, addToken: { type: 'creature', strength: [2, 2], modify: 0 } }, { message: 'Move Deep IQ up to Table IV.', advanceDeepIQ: 3, }, { message: 'Exile your best creature.' }, { message: 'Exile your best creature.' } ],[//3 { message: 'Do nothing.', advance: 1 }, { message: 'Do nothing.', advance: 1 }, { message: 'Do nothing.', advance: 1 }, { message: 'Put a 2/2 token on the battlefield.', advance: 1, addToken: { type: 'creature', strength: [2, 2], modify: 2 } }, { message: 'Put a 2/1 token on the battlefield.', advance: 1, addToken: { type: 'creature', strength: [2, 1], modify: 4 } }, { message: 'Destroy your best land.', advance: 1 }, { message: 'Move Deep IQ up to Table V and put a 1/1 token on the battlefield.', addToken: { type: 'creature', strength: [1, 1], modify: 0 }, advanceDeepIQ: 5 }, { message: 'Put a 1/1 token on the battlefield and Deep IQ gets a free roll on Table II.', freeRoll: 1, addToken: { type: 'creature', strength: [1, 1], modify: 1 } }, { message: 'Sacrifice your best creature.' }, { message: 'Destroy your best artifact or roll on Spooky Chart.', choice: [ { message: 'Destroy your best artifact' }, { message: 'Roll on Spooky Chart (-2)', spooky: -2 } ] } ],[//4 { message: 'Do nothing.', advance: 1 }, { message: 'Do nothing.', advance: 1 }, { message: 'Do nothing.', advance: 1 }, { message: 'Put a 4/4 token on the battlefield (+3).', advance: 1, addToken: { type: 'creature', strength: [4, 4], modify: 3 } }, { message: 'Sacrifice your best creature.', advance: 1 }, { message: 'Destroy your best artifact or enchantment.', advance: 1 }, { message: 'Exile your best creature.' }, { message: 'Sacrifice your two best creatures or take 4 damage.' }, { message: 'Put a 2/4 token on the battlefield (+7) or roll on Spooky Chart (-1).', choice: [ { message: 'Put a 2/4 token on the battlefield (+7)', addToken: { type: 'creature', strength: [2, 4], modify: 7 } },{ message: 'Roll on Spooky Chart (-1)', spooky: -1 } ] }, { message: 'Roll on Spooky Chart (+0).', spooky: 0 } ],[ { message: 'Do nothing.', advance: 1 }, { message: 'Do nothing.', advance: 1 }, { message: 'Do nothing.', advance: 1 }, { message: 'Put a 3/4 token on the battlefield (+4).', advance: 1, addToken: { type: 'creature', strength: [3, 4], modify: 4 } }, { message: 'Put a 2/2 token on the battlefield (+2) and Deep IQ gets a free roll on Table III.', advance: 1, freeRoll: 2, addToken: { type: 'creature', strength: [2, 2], modify: 2 } }, { message: 'Destroy your best creature, enchantment, or artifact.' }, { message: 'Put a 4/4 token on the battlefield (+1).' , addToken: { type: 'creature', strength: [4, 4], modify: 1 } }, { message: 'Destroy all lands or put a 4/1 token on the battlefield (+3).', choice: [ { message: 'Destroy all lands' },{ message: '4/1 token on the battlefield (+3)', addToken: { type: 'creature', strength: [4, 1], modify: 3 } } ] }, { message: 'Sacrifice your best creature or roll on Spooky Chart (+1).', choice: [ { message: 'Sacrifice your best creature' },{ message: 'Roll on Spooky Chart (-1)', spooky: 1 } ] }, { message: 'Roll on Spooky Chart (+2).' , spooky: 2 } ],[ { message: 'Do nothing.' }, { message: 'Do nothing.' }, { message: 'Do nothing.' }, { message: 'Sacrifice all lands, creatures, and artifacts or put a 2/4 token on the battlefield (+3).', choice: [ { message: 'Sacrifice all lands, creatures, and artifacts.' },{ message: 'Put a 2/4 token on the battlefield (+3).', addToken: { type: 'creature', strength: [2, 4], modify: 3 } } ] }, { message: 'Put a 4/5 token on the battlefield (+6).' , addToken: { type: 'creature', strength: [4, 5], modify: 6 } }, { message: 'Destroy your best creature or you take 6 damage.' }, { message: 'Destroy your best creature or you take 6 damage.' }, { message: 'Destroy your best artifact, enchantment, or land.' }, { message: 'Exile your best creature or roll on Spooky Chart (+3).', choice: [ { message: 'Exile your best creature' },{ message: 'Roll on Spooky Chart (+3)', spooky: 3 } ] }, { message: 'Roll on Spooky Chart (+4).', spooky: 4 } ] ]; var Tokens = [ { message: 'No extra abilities.' }, { message: 'First strike. +2/+0' }, { message: 'Regeneration.' }, { message: 'Defender. +0/+3' }, { message: 'First strike.' }, { message: 'Protection from: {color}' }, { message: 'Deathtouch.' }, { message: 'Flying, lifelink. +2/+2' }, { message: 'Haste and trample.' }, { additionalToken: 2, modifier: false }, { message: 'Flying and trample.' }, { message: 'Protection from {color} and vigilance.' }, { message: 'When this creature enters the battlefield, sacrifice one of your creatures at random.' }, { message: 'First strike and shroud.' }, { message: 'Protection from {color}, deathtouch, and your weakest creature becomes unblockable.', additionalToken: 1, modifier: true }, { message: 'When this creature enters the battlefield, exile target permanent you control.', } ]; Spooky = [ { message: 'Deep IQ plays an enchantment token. While this is on the battlefield, all of its creature tokens gain +1/+1.', card: { type: 'enchantment', message: 'All creatures gain +1/+1' } }, { message: 'Deep IQ plays an artifact token. While this is on the battlefield, reroll the first "Do nothing" result of every turn.', card: { type: 'artifact', message: 'Reroll the first "Do nothing" result of every turn.' } }, { message: 'Deep IQ plays an enchantment token. While this is on the battlefield, Deep IQ gets +1 to all die rolls.', rollModifier: 1, card: { type: 'enchantment', message: 'Deep IQ gets +1 to all die rolls.' } }, { message: 'Destroy all of your creatures, or all of your artifacts, or all of your enchantments. Deep IQ\'s next roll will be "Do nothing."', nextRoll: false }, { message: 'Deep IQ gains 5 life and moves up to Table VI if it isn\'t already there.', advanceDeepIQ: 5 }, { message: 'You take 10 damage.' }, //{ message: 'Deep IQ plays an artifact token. While this is on the battlefield, it gets two table rolls every turn and takes the best one.' //}, { message: 'Destroy all of your lands of one basic type (whichever is most inconvenient). Deep IQ\'s next roll will be "Do nothing."', nextRoll: false }, { message: 'Exile the top twenty cards in your library.' }, { message: 'Deep IQ plays an artifact token. When this comes on the battlefield, tap your best creature and it remains tapped as long as this artifact remains on the battlefield. If you lose your best creature, the next best creature becomes tapped, and so on.', card: { type: 'artifact', message: 'When this comes on the battlefield, tap your best creature and it remains tapped as long as this artifact remains on the battlefield. If you lose your best creature, the next best creature becomes tapped, and so on.' } }, //{ message: 'All of Deep IQ\'s tokens get a free roll on the token chart (+0). These additional abilities are permanent.' //}, //{ message: 'Deep IQ plays an enchantment token. While this is on the battlefield, it gets a free roll on Table II every time one of its permanents is destroyed or exiled.' //}, { message: 'Deep IQ gains 20 life.' }, { message: 'Destroy all of your permanents. Deep IQ\'s next roll will be "Do nothing."', nextRoll: false }, ] <file_sep>var DeepIQ = { turn: 0, health: 20, rollModifier: 0, creatureModifier: [0,0], creatures: [], roll: function (){ return Math.floor( Math.random() * 10 ); }, takeTurn: function() { this.turn += 1; $('.turn').html('Turn: '+this.turn); newRoll = this.roll(); if ( Table.nextRoll ) { Table.render(newRoll); } else { Table.nextRoll = true; Table.renderMessage( "Do nothing." ); } } }
1d8c3504931c9fcb2b1e5725ccd001993124a905
[ "JavaScript" ]
4
JavaScript
gjthoman/deepiq
568c2e9667b6c50e067f1dd7b5fdd60e04a247a4
70ba7084609c6c41e96c52b8289d9dc05feb84d3
refs/heads/master
<file_sep>print('hello trung duc')
b6d69a44768f26c84f3c6ca2375028fc9002ba78
[ "Python" ]
1
Python
smile2k/test3
89d0b6783de69598e20b4d554bb61d7325485695
df6e1e946b10da2da5d78426bca958123634a6bd
refs/heads/master
<repo_name>AndreNeves97/star-wars-people-ui<file_sep>/src/app/people/presenter/pages/people-list/people-list-data-state.type.ts import { OrderBy } from 'src/app/core/order-by/order-by.type'; import { PeopleDisplayAttributes } from 'src/app/people/domain/entities/people-display-attributes.type'; import { PeopleOrderableAttributes } from 'src/app/people/domain/entities/people-sort-attributes.type'; import { People } from 'src/app/people/domain/entities/people.type'; export class PeopleListDataState { has_error: boolean; is_loading: boolean; data!: People[] | null; constructor(has_error: boolean, is_loading: boolean, data: People[]) { this.has_error = has_error; this.is_loading = is_loading; this.data = data; } public static loading(data: People[] | null = null): PeopleListDataState { return { has_error: false, is_loading: true, data, }; } public static error(data: People[] | null = null): PeopleListDataState { return { has_error: true, is_loading: false, data, }; } public static success(data: People[]): PeopleListDataState { return { has_error: false, is_loading: false, data, }; } } <file_sep>/src/app/core/components/table/directives/sortable/sortable.directive.ts import { Directive, EventEmitter, Input, OnInit, Output } from '@angular/core'; import { OrderByMode } from 'src/app/core/order-by/order-by-mode.type'; import { OrderBy } from 'src/app/core/order-by/order-by.type'; @Directive({ selector: 'th[sortable]', host: { '[class.asc]': 'direction === "asc"', '[class.desc]': 'direction === "desc"', '(click)': 'rotate()', }, }) export class SortableDirective { @Input() sortable: string = ''; @Input() direction!: OrderByMode | null; @Output() sort = new EventEmitter<OrderBy<any>>(); rotate() { this.direction = this.getNewDirection(); this.sort.emit({ order_by_attr: this.sortable, order_by_mode: this.direction, }); } getNewDirection(): OrderByMode | null { if (this.direction === OrderByMode.ASC) { return OrderByMode.DESC; } if (this.direction === OrderByMode.DESC) { return null; } return OrderByMode.ASC; } } <file_sep>/src/app/people/presenter/pages/people-list/people-list-view-state.type.ts import { OrderBy } from 'src/app/core/order-by/order-by.type'; import { PeopleDisplayAttributes } from 'src/app/people/domain/entities/people-display-attributes.type'; import { PeopleOrderableAttributes } from 'src/app/people/domain/entities/people-sort-attributes.type'; export class PeopleListViewState { page: number; order_by: OrderBy<PeopleOrderableAttributes>; display_attributes: PeopleDisplayAttributes; constructor( page: number, order_by: OrderBy<PeopleOrderableAttributes>, display_attributes: PeopleDisplayAttributes ) { this.page = page; this.order_by = order_by; this.display_attributes = display_attributes; } public static default(): PeopleListViewState { return { page: 1, order_by: OrderBy.default<PeopleOrderableAttributes>(), display_attributes: PeopleDisplayAttributes.default(), }; } } <file_sep>/README.md # Star Wars People UI This projects shows the people from the `Star Wars API (SWAPI)`: https://swapi.dev/ See project preview: https://star-wars-people-c4458.web.app/ ### Components archutecture ![./archtecture.png](./archtecture.png) <file_sep>/src/app/people/presenter/pages/people-list/components/people-list-view-controllers/people-list-view-controllers.component.ts import { Component, OnDestroy, OnInit } from '@angular/core'; import { FormControl, FormGroup } from '@angular/forms'; import { Subject } from 'rxjs'; import { debounceTime, startWith, takeUntil, tap } from 'rxjs/operators'; import { PeopleListController } from '../../people-list.controller'; @Component({ selector: 'app-people-list-view-controllers', templateUrl: './people-list-view-controllers.component.html', styleUrls: ['./people-list-view-controllers.component.scss'], }) export class PeopleListViewControllersComponent implements OnInit, OnDestroy { private destroy$ = new Subject<void>(); nameFilterFormControl = new FormControl(); constructor(private controller: PeopleListController) {} private listenNameFilterChange() { this.nameFilterFormControl.valueChanges .pipe(takeUntil(this.destroy$), startWith(''), debounceTime(400)) .subscribe((text) => this.setNameFilter(text)); } private setNameFilter(text: string) { this.controller.setNameFilter(text); } ngOnInit(): void { this.listenNameFilterChange(); } ngOnDestroy(): void { this.destroy$.next(); this.destroy$.complete(); } } <file_sep>/src/app/people/people.module.ts import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; import { PeopleRoutingModule } from './people-routing.module'; import { PeopleListPageComponent } from './presenter/pages/people-list/people-list-page/people-list-page.component'; import { PeopleListTableComponent } from './presenter/pages/people-list/components/people-list-table/people-list-table.component'; import { NgbModule } from '@ng-bootstrap/ng-bootstrap'; import { PeopleListViewControllersComponent } from './presenter/pages/people-list/components/people-list-view-controllers/people-list-view-controllers.component'; import { SortableDirective } from '../core/components/table/directives/sortable/sortable.directive'; import { ReactiveFormsModule } from '@angular/forms'; @NgModule({ declarations: [ PeopleListPageComponent, PeopleListTableComponent, PeopleListViewControllersComponent, SortableDirective, ], imports: [CommonModule, PeopleRoutingModule, NgbModule, ReactiveFormsModule], }) export class PeopleModule {} <file_sep>/src/app/people/domain/entities/people-sort-attributes.type.ts export enum PeopleOrderableAttributes { NAME = 'name', HEIGHT = 'height', MASS = 'mass', } <file_sep>/src/app/people/infra/people-repository/people-request.type.ts import { OrderBy } from 'src/app/core/order-by/order-by.type'; import { PeopleOrderableAttributes } from '../../domain/entities/people-sort-attributes.type'; export class PeopleRequest { created_start: Date | null; created_end: Date | null; skin_color: string | null; name: string | null; page: number; items_per_page: number = 10; order_by: OrderBy<PeopleOrderableAttributes>; constructor( created_start: Date | null, created_end: Date | null, skin_color: string | null, name: string | null, page: number, order_by: OrderBy<PeopleOrderableAttributes> ) { this.created_start = created_start; this.created_end = created_end; this.skin_color = skin_color; this.name = name; this.page = page; this.order_by = order_by; } } <file_sep>/src/app/core/url-utils/url-utils.service.ts import { Injectable } from '@angular/core'; @Injectable({ providedIn: 'root', }) export class UrlUtilsService { public serializeObjToUrlParams(obj: { key: string }) { return Object.entries(obj) .map(([key, value]) => { return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`; }) .join('&'); } } <file_sep>/src/app/core/api/api.service.ts import { HttpClient } from '@angular/common/http'; import { Injectable } from '@angular/core'; @Injectable({ providedIn: 'root', }) export class ApiService { private baseUrl = 'https://swapi.dev/api'; constructor(private httpClient: HttpClient) {} public get(url: string) { return this.httpClient.get(`${this.baseUrl}/${url}`); } } <file_sep>/src/app/people/presenter/pages/people-list/people-list-filter-state.type.ts export class PeopleListFilterState { created_start: Date | null; created_end: Date | null; skin_color: string | null; name: string | null; constructor( created_start: Date | null, created_end: Date | null, skin_color: string | null, name: string | null ) { this.created_start = created_start; this.created_end = created_end; this.skin_color = skin_color; this.name = name; } static default(): PeopleListFilterState { return { created_start: null, created_end: null, skin_color: null, name: null, }; } } <file_sep>/src/app/people/domain/entities/people-display-attributes.type.ts export class PeopleDisplayAttributes { values: PeopleDisplayAttributesMap; constructor(values: PeopleDisplayAttributesMap) { this.values = values; } public static default(): PeopleDisplayAttributes { return { values: { name: true, height: true, mass: true, hair_color: true, skin_color: true, eye_color: true, birth_year: true, gender: true, created: true, }, }; } } export enum PeopleAvailableDisplayAttributes { NAME = 'name', HEIGHT = 'height', MASS = 'mass', HAIR_COLOR = 'hair_color', SKIN_COLOR = 'skin_color', EYE_COLOR = 'eye_color', BIRTH_YEAR = 'birth_year', GENDER = 'gender', CREATED = 'created', } export type PeopleDisplayAttributesMap = { [attribute in PeopleAvailableDisplayAttributes]: boolean; }; <file_sep>/src/app/people/presenter/pages/people-list/components/people-list-table/people-list-table.component.ts import { Component, Input, OnChanges, OnDestroy, OnInit, QueryList, SimpleChanges, ViewChildren, } from '@angular/core'; import { SortableDirective } from 'src/app/core/components/table/directives/sortable/sortable.directive'; import { OrderBy } from 'src/app/core/order-by/order-by.type'; import { PeopleOrderableAttributes } from 'src/app/people/domain/entities/people-sort-attributes.type'; import { People } from 'src/app/people/domain/entities/people.type'; import { PeopleListController } from '../../people-list.controller'; @Component({ selector: 'app-people-list-table', templateUrl: './people-list-table.component.html', styleUrls: ['./people-list-table.component.scss'], }) export class PeopleListTableComponent implements OnInit, OnChanges { @Input() data!: People[]; @ViewChildren(SortableDirective) headers!: QueryList<SortableDirective>; constructor(private controller: PeopleListController) {} onSort(order_by: OrderBy<PeopleOrderableAttributes>) { this.headers.forEach((header) => { if (header.sortable !== order_by.order_by_attr) { header.direction = null; } }); this.controller.setOrderBy(order_by); } ngOnInit(): void {} ngOnChanges(changes: SimpleChanges): void {} } <file_sep>/src/app/people/infra/people-repository/people-repository.service.ts import { Injectable } from '@angular/core'; import { Observable } from 'rxjs'; import { map } from 'rxjs/operators'; import { OrderByMode } from 'src/app/core/order-by/order-by-mode.type'; import { PeopleDatasourceService } from 'src/app/people/external/people-datasource/people-datasource.service'; import { People } from '../../domain/entities/people.type'; import { PeopleRequest } from './people-request.type'; @Injectable({ providedIn: 'root', }) export class PeopleRepositoryService { constructor(private peopleDatasourceService: PeopleDatasourceService) {} public getPeople(request: PeopleRequest): Observable<People[]> { return this.peopleDatasourceService .getPeople(request.page, request.name) .pipe( map((data) => this.getPeopleWithId(data, request)), map((data) => this.sortPeople(data, request)) ); } private getPeopleWithId(data: People[], request: PeopleRequest): People[] { const page_offset = (request.page - 1) * 10; return data.map((people, index) => { const id = page_offset + index; return { ...people, id, }; }); } private sortPeople(data: People[], request: PeopleRequest): People[] { const order_by = request.order_by; if (!order_by.order_by_mode) { return data; } return data.sort((p1, p2) => { if (!order_by.order_by_attr) { return 0; } const v1 = p1[order_by.order_by_attr]; const v2 = p2[order_by.order_by_attr]; if (order_by.order_by_mode === OrderByMode.ASC) { return v1 < v2 ? -1 : 1; } return v1 < v2 ? 1 : -1; }); } } <file_sep>/src/app/core/order-by/order-by.type.ts import { OrderByMode } from './order-by-mode.type'; export class OrderBy<T> { order_by_attr!: T | null; order_by_mode!: OrderByMode | null; constructor(order_by_attr: T | null, order_by_mode: OrderByMode | null) { this.order_by_attr = order_by_attr; this.order_by_mode = order_by_mode; } public static default<T>(): OrderBy<T> { return { order_by_attr: null, order_by_mode: null }; } } <file_sep>/src/app/people/domain/entities/people.type.ts export class People { id!: number; name: string; height: number; mass: number; hair_color: string; skin_color: string; eye_color: string; birth_year: string; gender: string; created: Date; constructor( name: string, height: number, mass: number, hair_color: string, skin_color: string, eye_color: string, birth_year: string, gender: string, created: Date ) { this.name = name; this.height = height; this.mass = mass; this.hair_color = hair_color; this.skin_color = skin_color; this.eye_color = eye_color; this.birth_year = birth_year; this.gender = gender; this.created = created; } } <file_sep>/src/app/people/presenter/pages/people-list/people-list-page/people-list-page.component.ts import { Component, OnDestroy, OnInit } from '@angular/core'; import { Observable, Subject } from 'rxjs'; import { PeopleListDataState } from '../people-list-data-state.type'; import { PeopleListController } from '../people-list.controller'; @Component({ selector: 'app-people-list-page', templateUrl: './people-list-page.component.html', styleUrls: ['./people-list-page.component.scss'], }) export class PeopleListPageComponent implements OnInit, OnDestroy { private destroy$ = new Subject<void>(); public dataState$: Observable<PeopleListDataState>; constructor(private controller: PeopleListController) { this.dataState$ = this.controller.dataState$; } ngOnInit(): void { this.controller.load(); } ngOnDestroy(): void { this.destroy$.next(); this.destroy$.complete(); } } <file_sep>/src/app/people/presenter/pages/people-list/people-list.controller.ts import { Injectable } from '@angular/core'; import { BehaviorSubject, Subject } from 'rxjs'; import { debounceTime, takeUntil } from 'rxjs/operators'; import { OrderBy } from 'src/app/core/order-by/order-by.type'; import { PeopleOrderableAttributes } from 'src/app/people/domain/entities/people-sort-attributes.type'; import { People } from 'src/app/people/domain/entities/people.type'; import { PeopleRepositoryService } from 'src/app/people/infra/people-repository/people-repository.service'; import { PeopleRequest } from 'src/app/people/infra/people-repository/people-request.type'; import { PeopleListDataState } from './people-list-data-state.type'; import { PeopleListFilterState } from './people-list-filter-state.type'; import { PeopleListViewState } from './people-list-view-state.type'; @Injectable({ providedIn: 'root', }) export class PeopleListController { filterState$: BehaviorSubject<PeopleListFilterState> = new BehaviorSubject<PeopleListFilterState>(PeopleListFilterState.default()); viewState$: BehaviorSubject<PeopleListViewState> = new BehaviorSubject<PeopleListViewState>(PeopleListViewState.default()); dataState$: BehaviorSubject<PeopleListDataState> = new BehaviorSubject<PeopleListDataState>(PeopleListDataState.loading()); loadRequest$: Subject<void> = new Subject<void>(); constructor(private peopleRepositoryService: PeopleRepositoryService) { this.listenLoadRequest(); this.listenFilterState(); this.listenViewState(); } public load() { this.loadRequest$.next(); } public setNameFilter(text: string) { const filterState = this.filterState$.value; this.filterState$.next({ ...filterState, name: text, }); } public setOrderBy(order_by: OrderBy<PeopleOrderableAttributes>) { const viewState = this.viewState$.value; this.viewState$.next({ ...viewState, order_by, }); } private setLoading() { const data = this.dataState$.value.data; this.dataState$.next(PeopleListDataState.loading(data)); } private setError() { const data = this.dataState$.value.data; this.dataState$.next(PeopleListDataState.error(data)); } private setData(data: People[]) { this.dataState$.next(PeopleListDataState.success(data)); } private listenLoadRequest() { this.loadRequest$.pipe(debounceTime(50)).subscribe(() => { this.setLoading(); const request = this.getLoadRequest(); this.peopleRepositoryService.getPeople(request).subscribe( (data) => this.setData(data), () => this.setError() ); }); } private listenFilterState() { this.filterState$.subscribe(() => { this.load(); }); } private listenViewState() { this.viewState$.subscribe(() => { this.load(); }); } private getLoadRequest(): PeopleRequest { const filterState = this.filterState$.value; const viewState = this.viewState$.value; return new PeopleRequest( filterState.created_start, filterState.created_end, filterState.skin_color, filterState.name, viewState.page, viewState.order_by ); } } <file_sep>/src/app/people/external/people-datasource/people-datasource.service.ts import { Injectable } from '@angular/core'; import { Observable, of } from 'rxjs'; import { delay, map, tap } from 'rxjs/operators'; import { ApiService } from 'src/app/core/api/api.service'; import { UrlUtilsService } from 'src/app/core/url-utils/url-utils.service'; import { People } from '../../domain/entities/people.type'; @Injectable({ providedIn: 'root', }) export class PeopleDatasourceService { private resource = 'people'; private dataCache: { [key: string]: People[] } = {}; constructor( private apiService: ApiService, private urlUtilsService: UrlUtilsService ) {} public getPeople( page: number, search: string | null = null ): Observable<People[]> { const cache_key = this.getCacheKey(page, search); if (!!this.dataCache[cache_key]) { return of(this.dataCache[cache_key]); } return this.requestGetPeople(page, search).pipe( tap((data) => { this.dataCache[cache_key] = data; }) ); } private getCacheKey(page: number, search: string | null = null) { if (!!search) { return `search=${search};page=${page}`; } return `page=${page}`; } private requestGetPeople( page: number, search: string | null ): Observable<People[]> { const params: any = { page: encodeURIComponent(page), }; if (!!search) { params.search = encodeURIComponent(search); } const queryString = this.urlUtilsService.serializeObjToUrlParams(params); return this.apiService .get(`${this.resource}/?${queryString}`) .pipe(map((response: any) => this.transformPeopleData(response.results))); } private transformPeopleData(data: any[]): People[] { return data.map((people) => { return new People( people.name, parseInt(people.height), parseInt(people.mass), people.hair_color, people.skin_color, people.eye_color, people.birth_year, people.gender, new Date(people.created) ); }); } }
473354f10d7e37067715fa2c0148d118e6884272
[ "Markdown", "TypeScript" ]
19
TypeScript
AndreNeves97/star-wars-people-ui
fa651a569ec24da2f7ab15c564b696e71b98a73a
545acd394d9a8104adfc5482c7668a48dab28746
refs/heads/master
<repo_name>louistjchen/BranchBound-Partitioner<file_sep>/assignment3/Net.cpp /* * Net.cpp * * Created on: Nov 23, 2018 * Author: chentuju */ #include "Net.h" Net::Net() { m_id = -1; } Net::~Net() { } void Net::setId(int id) { m_id = id; } int Net::getId() { return m_id; } void Net::insertBlock(Block *block) { m_blocks.push_back(block); } std::vector<Block*> &Net::getBlocks() { return m_blocks; } bool Net::containBlock(Block *block) { for(unsigned int i=0; i<m_blocks.size(); i++) if(m_blocks[i] == block) return true; return false; } void Net::printNet() { std::cout << "Net ID: " << m_id << "\tConnected Blocks:"; for(unsigned int i=0; i<m_blocks.size(); i++) std::cout << "\t" << m_blocks[i]->getId(); } <file_sep>/assignment3/Solution.cpp /* * Solution.cpp * * Created on: Nov 23, 2018 * Author: chentuju */ #include "Solution.h" // global variables from Display.cpp extern std::vector<int> displayBlocks; extern float xWindowSize; extern float yWindowSize; extern float yOffset; extern int drawDepth; std::vector<std::vector<std::vector<int>>> partitionAdjMatrix; std::vector<Net*> partitionNetList; Solution::Solution() { m_numBlock = 12; m_numNet = 18; m_maxBlockPerPartition = m_numBlock / Solution::NUMPARTITION; m_lastBlockIndex = -1; m_lastBlockId = -1; m_cutSize = 0; m_drawNode.order = 0; m_drawNode.id = -1; m_drawNode.x = xWindowSize / 2.0; m_drawNode.y = 0.0; } Solution::Solution(int numBlock, int numNet) { m_numBlock = numBlock; m_numNet = numNet; m_maxBlockPerPartition = m_numBlock / Solution::NUMPARTITION; m_partitions.resize(Solution::NUMPARTITION); m_blocks.resize(m_numBlock+1); for(unsigned int i=0; i<m_blocks.size(); i++) m_blocks[i] = Solution::UNSET; m_lastBlockIndex = -1; m_lastBlockId = -1; m_cutSize = 0; m_drawNode.order = 0; m_drawNode.id = -1; m_drawNode.x = xWindowSize / 2.0; m_drawNode.y = 0.0; } Solution::Solution(const Solution &solution) { m_numBlock = solution.m_numBlock; m_numNet = solution.m_numNet; m_maxBlockPerPartition = solution.m_maxBlockPerPartition; m_partitions.resize(Solution::NUMPARTITION); for(unsigned int i=0; i<m_partitions.size(); i++) m_partitions[i] = solution.m_partitions[i]; m_blocks.resize(solution.m_blocks.size()); for(unsigned int i=0; i<m_blocks.size(); i++) m_blocks[i] = solution.m_blocks[i]; m_lastBlockIndex = solution.m_lastBlockIndex; m_lastBlockId = solution.m_lastBlockId; m_cutSize = solution.m_cutSize; m_cutNets = solution.m_cutNets; m_drawNode.order = solution.m_drawNode.order; m_drawNode.id = solution.m_drawNode.id; m_drawNode.x = solution.m_drawNode.x; m_drawNode.y = solution.m_drawNode.y; } Solution::~Solution() { } void Solution::pushBlock(int block, int region) { // exit if block is already partitioned if(m_blocks[block] != Solution::UNSET) { if(m_blocks[block] == region) std::cout << "[ERROR Solution::pushBlock] Block " << block << " is already partitioned to same partition " << region << std::endl; else std::cout << "[ERROR Solution::pushBlock] Block " << block << " is already partitioned to partition " << region << std::endl; exit(-1); } // exit if partition already contains block if(m_partitions[region].find(block) != m_partitions[region].end()) { std::cout << "[ERROR Solution::pushBlock] Partition " << region << "already contains block " << block << std::endl; exit(-1); } // update m_cutSize and m_cutNets by comparing with existing blocks in different region for(int i=0; i<(int)m_partitions.size(); i++) { if(i != region) { for(auto it=m_partitions[i].begin(); it!=m_partitions[i].end(); it++) { int block2 = *it; for(unsigned int j=0; j<partitionAdjMatrix[block][block2].size(); j++) { int net = partitionAdjMatrix[block][block2][j]; // insert cut net and increment cut size if current net is new if(m_cutNets.find(net) == m_cutNets.end()) { m_cutNets.insert(net); m_cutSize++; } } } } } // update m_partitions and m_blocks and keep track of last added block m_lastBlockIndex++; m_lastBlockId = block; m_blocks[block] = region; m_partitions[region].insert(block); } std::vector<int> &Solution::getPartitionBlocks() { return m_blocks; } int Solution::getLowerBound() { int lowerBound = 0; // simply return m_cutSize if solution is complete if(!isSolutionComplete()) { std::vector<std::vector<int>> lastBlockConnectedUnset; for(unsigned int i=0; i<partitionAdjMatrix[m_lastBlockId].size(); i++) if(partitionAdjMatrix[m_lastBlockId][i].size() > 0 && m_blocks[i] == Solution::UNSET) lastBlockConnectedUnset.push_back(partitionAdjMatrix[m_lastBlockId][i]); int lastPartitionAvail = m_maxBlockPerPartition - m_partitions[m_blocks[m_lastBlockId]].size(); int top = (int)lastBlockConnectedUnset.size() - lastPartitionAvail; if(top > 0) { std::vector<int> temp; for(unsigned int i=0; i<lastBlockConnectedUnset.size(); i++) { int count = 0; for(unsigned int j=0; j<lastBlockConnectedUnset[i].size(); j++) // if the net connects last block and any unset block is uncut if(m_cutNets.find(lastBlockConnectedUnset[i][j]) == m_cutNets.end()) count++; temp.push_back(count); } // sort potential cut cost in ascending order sort(temp.begin(), temp.end()); for(int i=0; i<top; i++) lowerBound += temp[i]; } } return m_cutSize + lowerBound; } int Solution::getUpperBound() { int upperBound = 0; // simply return m_cutSize if solution is complete if(!isSolutionComplete()) { std::unordered_set<int> tempNetSet; // go through netList, for each uncut net if any block is not partitioned, upperBound++ for(unsigned int i=0; i<partitionNetList.size(); i++) { if(m_cutNets.find(partitionNetList[i]->getId()) == m_cutNets.end() && tempNetSet.find(partitionNetList[i]->getId()) == tempNetSet.end()) for(unsigned int j=0; j<partitionNetList[i]->getBlocks().size(); j++) { if(m_blocks[partitionNetList[i]->getBlocks()[j]->getId()] == Solution::UNSET) { upperBound++; tempNetSet.insert(partitionNetList[i]->getId()); break; } } } } return m_cutSize + upperBound; } int Solution::getLastBlockIndex() { return m_lastBlockIndex; } void Solution::sanityCheck() { // check if each block has valid partition region for(unsigned int i=1; i<m_blocks.size(); i++) if(m_blocks[i] > Solution::RIGHT) { std::cout << "[ERROR Solution::sanityCheck] Block " << i << " having invalid partition region " << m_blocks[i] << std::endl; exit(-1); } // check if each partition has size equal to m_maxBlockPerPartition for(unsigned int i=0; i<m_partitions.size(); i++) { int partitionSize = (int)m_partitions[i].size(); if(partitionSize != m_maxBlockPerPartition) { std::cout << "[ERROR Solution::sanityCheck] Partition region " << i << " have invalid size " << partitionSize << " (should be " << m_maxBlockPerPartition << ")\n"; exit(-1); } } // check if m_cutSize has exceeded number of nets if(m_cutSize > m_numNet) { std::cout << "[ERROR Solution::sanityCheck] Partition cut size of " << m_cutSize << " has exceeded total number of nets " << m_numNet << std::endl; exit(-1); } // check if m_cutNets has unequal size to m_cutSize if((int)m_cutNets.size() != m_cutSize) { std::cout << "[ERROR Solution::sanityCheck] Partition cut nets have " << m_cutNets.size() << " nets unequal to current cut size" << m_cutSize << std::endl; exit(-1); } } void Solution::updateDrawNode(int region) { m_drawNode.id = displayBlocks[m_drawNode.order]; int depth = m_drawNode.order + 1; float numGap = pow(3, depth); float xOffset = xWindowSize / numGap; float x; float y = m_drawNode.y + yOffset; if(region == Solution::LEFT) x = m_drawNode.x - xOffset; else if(region == Solution::MIDDLE) x = m_drawNode.x; else if(region == Solution::RIGHT) x = m_drawNode.x + xOffset; else { std::cout << "[ERROR Solution::updateDrawNode] " << "Partition number " << region << " is invalid" << std::endl; exit(-1); } // draw LEFT, MIDDLE, RIGHT branches and cutsize char str[8]; setcolor(RED); if(m_drawNode.order <= drawDepth) { drawline(m_drawNode.x, m_drawNode.y, x, y); sprintf(str, "%d", m_cutSize); drawtext((m_drawNode.x+x)/2.0-7.0, (m_drawNode.y+y)/2.0, str, 100.0); } // push 3 branched nodes onto queue m_drawNode.order = depth; m_drawNode.id = displayBlocks[m_drawNode.order]; m_drawNode.y = y; m_drawNode.x = x; } bool Solution::isBlockPartitioned(int block) { return (m_blocks[block] <= Solution::RIGHT); } bool Solution::isPartitionFull(int region) { if(region > Solution::RIGHT) { std::cout << "[ERROR Solution::isPartitionFull] Input region " << region << " is invalid" << std::endl; exit(-1); } int avail = m_maxBlockPerPartition - (int)m_partitions[region].size(); if(avail > 0) return false; else if(avail == 0) return true; else { std::cout << "[ERROR Solution::isPartitionFull] Solution has previously " << "exceeded maximum allowed blocks " << m_maxBlockPerPartition << " by " << abs(avail) << " in partition " << region << std::endl; exit(-1); } } bool Solution::isSolutionComplete() { int currentNumBlock = 0; for(unsigned int i=0; i<m_partitions.size(); i++) currentNumBlock += m_partitions[i].size(); return (currentNumBlock == m_numBlock); } void Solution::printBlocks() { for(unsigned int i=1; i<m_blocks.size(); i++) std::cout << "Block " << i << " is in partition region " << m_blocks[i] << std::endl; } void Solution::printPartitions(std::unordered_map<int,Block*> &blockMap) { for(unsigned int i=0; i<m_partitions.size(); i++) { std::cout << "Partition region " << i << " contains:"; for(auto it=m_partitions[i].begin(); it!=m_partitions[i].end(); it++) std::cout << "\t" << *it; std::cout << std::endl; } std::cout << "Solution cut size = " << m_cutSize << std::endl; } <file_sep>/assignment3/Display.h /* * Display.h * * Created on: Nov 23, 2018 * Author: chentuju */ #ifndef DISPLAY_H_ #define DISPLAY_H_ #include <cmath> #include <queue> #include "graphics.h" #include "Solution.h" typedef struct _Node { int order; int id; float x; float y; } Node; void openDisplay(); void closeDisplay(); void drawScreen(); void act_on_button_press(float x, float y); void act_on_mouse_move(float x, float y); void act_on_key_press(char c); #endif /* DISPLAY_H_ */ <file_sep>/assignment3/Display.cpp /* * Display.cpp * * Created on: Nov 23, 2018 * Author: chentuju */ #include "Display.h" std::vector<int> displayBlocks; float xWindowSize = 1200.0; float yWindowSize = 600.0; float yOffset = 0.0; int drawDepth; bool drawn = false; void openDisplay() { // drawDepth = (int)displayBlocks.size(); drawDepth = 13; yOffset = yWindowSize / (float)drawDepth; init_graphics("ECE1387 Assignment 3 - B&B Partitioner (developed by <NAME>)", WHITE); init_world (0.0, 0.0, xWindowSize, yWindowSize); update_message("Press \"Proceed\" to start branch-and-bound partitioner."); event_loop(act_on_button_press, NULL, NULL, drawScreen); drawn = true; } void closeDisplay() { update_message("Press \"Proceed\" to exit."); event_loop(act_on_button_press, NULL, NULL, drawScreen); } void drawScreen() { if(drawn) return; set_draw_mode(DRAW_NORMAL); clearscreen(); std::queue<Node> q; Node a; a.order = 0; a.id = displayBlocks[a.order]; a.x = xWindowSize / 2.0; a.y = 0.0; q.push(a); setfontsize(15); setcolor(BLACK); while(!q.empty()) { Node node = q.front(); q.pop(); if(node.order == drawDepth) continue; // get LEFT, MIDDLE, RIGHT points int depth = node.order + 1; float numGap = pow(3, depth); float xOffset = xWindowSize / numGap; float x_left = node.x - xOffset; float x_right = node.x + xOffset; float y = node.y + yOffset; // draw node char str[8]; if(depth == 1) { sprintf(str, "%d in LEFT", node.id); drawtext(node.x, node.y-7.0, str, 100.0); } else { sprintf(str, "%d", node.id); drawtext(node.x-7.0, node.y, str, 100.0); } // draw LEFT, MIDDLE, RIGHT branches drawline(node.x, node.y, x_left, y); drawline(node.x, node.y, node.x, y); drawline(node.x, node.y, x_right, y); // push 3 branched nodes onto queue node.order = depth; node.id = displayBlocks[node.order]; node.y = y; q.push(node); node.x = x_left; q.push(node); node.x = x_right; q.push(node); } } void act_on_button_press(float x, float y) { /* Called whenever event_loop gets a button press in the graphics * * area. Allows the user to do whatever he/she wants with button * * clicks. */ // printf("User clicked a button at coordinates (%f, %f)\n", x, y); } void act_on_mouse_move (float x, float y) { /* function to handle mouse move event, the current mouse position * * in the current world coordinate as defined as MAX_X and MAX_Y in * * init_world is returned. */ // printf ("Mouse move at (%f,%f)\n", x, y); } void act_on_key_press (char c) { /* function to handle keyboard press event, the ASCII character is returned */ // printf ("Key press: %c\n", c); } <file_sep>/assignment3/makefile # This is the makefile for ECE1387 Assignment 3 Branch & Bound Partitioner # Developer: <NAME> (1000303502) # Date: Nov 29, 2018 all: g++ -O0 -g3 -Wall -c -fmessage-length=0 -std=c++0x -o main.o main.cpp g++ -O0 -g3 -Wall -c -fmessage-length=0 -std=c++0x -o Partitioner.o Partitioner.cpp g++ -O0 -g3 -Wall -c -fmessage-length=0 -std=c++0x -o Block.o Block.cpp g++ -O0 -g3 -Wall -c -fmessage-length=0 -std=c++0x -o Net.o Net.cpp g++ -O0 -g3 -Wall -c -fmessage-length=0 -std=c++0x -o Solution.o Solution.cpp g++ -O0 -g3 -Wall -c -fmessage-length=0 -std=c++0x -o Display.o Display.cpp g++ -O0 -g3 -Wall -c -fmessage-length=0 -std=c++0x -o graphics.o graphics.cpp g++ -o Partitioner-LouisChen main.o Partitioner.o Block.o Net.o Solution.o Display.o graphics.o -lX11 clean: rm -f main.o Partitioner.o Block.o Net.o Solution.o Display.o graphics.o Partitioner-LouisChen <file_sep>/assignment3/Partitioner.h /* * Partitioner.h * * Created on: Nov 22, 2018 * Author: chentuju */ #ifndef PARTITIONER_H_ #define PARTITIONER_H_ #include <iostream> #include <fstream> #include <sys/time.h> #include <cstdlib> #include <cstring> #include <iomanip> #include <algorithm> #include <vector> #include <unordered_map> #include <stack> #include <queue> #include "Block.h" #include "Net.h" #include "Solution.h" #include "Display.h" class Partitioner { public: Partitioner(); ~Partitioner(); // main methods void init(const char *inputFile, const char *traversal, const char *display); void run(); protected: // functional methods - init() void constructBlockNet(const char *inputFile); void constructAdjMatrix(); // functional methods - run() void initialSolution(); void branchBoundDFS(); void branchBoundLBF(); int computeCutSize(Solution &solution); // sort methods static bool sortBlockList(Block *a, Block *b); static bool sortNetList(Net *a, Net *b); // debugging methods void printBlockList(); void printNetList(); void printAdjMatrix(); private: // block-net connectivity info std::vector<Block*> m_blockList; std::vector<Net*> m_netList; std::unordered_map<int,Block*> m_blockMap; std::unordered_map<int,Net*> m_netMap; std::vector<std::vector<std::vector<int>>> m_adjMatrix; // partition info Solution m_bestSolution; int m_bestCost; long long m_visitCount; // flag to select depth-first traversal or lowest-bound-first traversal bool m_useDFSNotLBF; // flag for display bool m_display; }; #endif /* PARTITIONER_H_ */ <file_sep>/assignment3/Block.h /* * Block.h * * Created on: Nov 23, 2018 * Author: chentuju */ #ifndef BLOCK_H_ #define BLOCK_H_ #include <iostream> #include <iomanip> #include <vector> #include "Net.h" class Net; class Block { public: Block(); ~Block(); // functional methods void setId(int id); int getId(); void insertNet(Net* net); std::vector<Net*> &getNets(); std::vector<int> isConnected(Block *block); // debugging methods void printBlock(); private: int m_id; std::vector<Net*> m_nets; }; #endif /* BLOCK_H_ */ <file_sep>/assignment3/main.cpp /* * main.cpp * * Created on: Nov 22, 2018 * Author: chentuju */ #include <iostream> #include "Partitioner.h" int main(int argc, char **argv) { if(argc != 4) { std::cout << "[ERROR main] Number of input argument is not correct" << std::endl; exit(-1); } Partitioner partitioner; partitioner.init(argv[1], argv[2], argv[3]); partitioner.run(); return 0; } <file_sep>/assignment3/Block.cpp /* * Block.cpp * * Created on: Nov 23, 2018 * Author: chentuju */ #include "Block.h" Block::Block() { m_id = -1; } Block::~Block() { } void Block::setId(int id) { m_id = id; } int Block::getId() { return m_id; } void Block::insertNet(Net *net) { m_nets.push_back(net); } std::vector<Net*> &Block::getNets() { return m_nets; } std::vector<int> Block::isConnected(Block *block) { std::vector<int> connectedNets; for(unsigned int i=0; i<m_nets.size(); i++) if(m_nets[i]->containBlock(block)) connectedNets.push_back(m_nets[i]->getId()); return connectedNets; } void Block::printBlock() { std::cout << "Block ID: " << m_id << "\tConnected Nets:"; for(unsigned int i=0; i<m_nets.size(); i++) std::cout << "\t" << m_nets[i]->getId(); } <file_sep>/assignment3/Solution.h /* * Solution.h * * Created on: Nov 23, 2018 * Author: chentuju */ #ifndef SOLUTION_H_ #define SOLUTION_H_ #include <iostream> #include <cmath> #include <vector> #include <unordered_set> #include <unordered_map> #include <queue> #include <algorithm> #include "Block.h" #include "graphics.h" class Solution { public: Solution(); Solution(int numBlock, int numNet); Solution(const Solution &solution); ~Solution(); // functional methods void pushBlock(int block, int region); std::vector<int> &getPartitionBlocks(); int getLowerBound(); int getUpperBound(); int getLastBlockIndex(); void sanityCheck(); void updateDrawNode(int region); // query methods bool isBlockPartitioned(int block); bool isPartitionFull(int region); bool isSolutionComplete(); // debugging methods void printBlocks(); void printPartitions(std::unordered_map<int,Block*> &blockMap); // enums for partition regions enum { LEFT, MIDDLE, RIGHT, NUMPARTITION, UNSET }; private: int m_numBlock; int m_numNet; int m_maxBlockPerPartition; int m_lastBlockIndex; int m_lastBlockId; std::vector<int> m_blocks; std::vector<std::unordered_set<int>> m_partitions; int m_cutSize; std::unordered_set<int> m_cutNets; struct _Node { int order; int id; float x; float y; } m_drawNode; }; #endif /* SOLUTION_H_ */ <file_sep>/assignment3/Partitioner.cpp /* * Partitioner.cpp * * Created on: Nov 22, 2018 * Author: chentuju */ #include "Partitioner.h" extern std::vector<std::vector<std::vector<int>>> partitionAdjMatrix; extern std::vector<Net*> partitionNetList; extern std::vector<int> displayBlocks; Partitioner::Partitioner() { m_visitCount = 0; m_useDFSNotLBF = true; m_display = false; } Partitioner::~Partitioner() { // free all blocks for(unsigned int i=0; i<m_blockList.size(); i++) if(m_blockList[i]) delete m_blockList[i]; // free all nets for(unsigned int i=0; i<m_netList.size(); i++) if(m_netList[i]) delete m_netList[i]; } void Partitioner::init(const char *inputFile, const char *traversal, const char *display) { if(strcmp(traversal, "-traversal=dfs") == 0) m_useDFSNotLBF = true; else if(strcmp(traversal, "-traversal=lbf") == 0) m_useDFSNotLBF = false; else { std::cout << "[ERROR Partitioner::init] Invalid traversal order <" << traversal << ">" << std::endl; exit(-1); } if(strcmp(display, "-display=on") == 0) m_display = true; else if(strcmp(display, "-display=off") == 0) m_display = false; else { std::cout << "[ERROR Partitioner::init] Invalid display mode <" << display << ">" << std::endl; exit(-1); } constructBlockNet(inputFile); constructAdjMatrix(); } void Partitioner::run() { // sort m_blockList in descending fan-out order (for best pruning) std::sort(m_blockList.begin(), m_blockList.end(), Partitioner::sortBlockList); // sort m_netList in ascending fan-out order (for best initial solution) std::sort(m_netList.begin(), m_netList.end(), Partitioner::sortNetList); // find initial solution and compute best cost initialSolution(); std::cout << "-------------------- Initial Partition --------------------" << std::endl; m_bestSolution.printPartitions(m_blockMap); std::cout << "-----------------------------------------------------------" << std::endl; // invoke graphics if(m_display) { for(unsigned int i=0; i<m_blockList.size(); i++) displayBlocks.push_back(m_blockList[i]->getId()); openDisplay(); } // start timer struct timeval t1, t2; gettimeofday(&t1, NULL); // use iterative rather than recursive traversal if(m_useDFSNotLBF) branchBoundDFS(); else branchBoundLBF(); // stop timer gettimeofday(&t2, NULL); double time1 = t1.tv_sec * 1000.0 + t1.tv_usec / 1000.0; double time2 = t2.tv_sec * 1000.0 + t2.tv_usec / 1000.0; std::cout << "--------------------- Final Partition ---------------------" << std::endl; m_bestSolution.printPartitions(m_blockMap); std::cout << "-----------------------------------------------------------" << std::endl; std::cout << "---> Verified cut size = " << computeCutSize(m_bestSolution) << std::endl; std::cout << "---> # nodes expanded = " << m_visitCount << std::endl; // print total run-time std::cout << "---> Partitioner took " << (time2-time1) << " ms to finish" << std::endl; // close graphics if(m_display) closeDisplay(); } void Partitioner::constructBlockNet(const char *inputFile) { char buf[8]; std::ifstream file; file.open(inputFile, std::ifstream::in); // loop through every line in the input file while(file >> buf) { int num = atoi(buf); if(num == -1) break; // allocate, push, and hash a new block Block *block = new Block(); block->setId(num); m_blockList.push_back(block); m_blockMap[num] = block; // loop through every netnum in a line while(file >> buf) { num = atoi(buf); if(num == -1) break; // allocate, push, and hash the net if it does not exist Net *net = NULL; if(m_netMap.find(num) == m_netMap.end()) { net = new Net(); net->setId(num); m_netList.push_back(net); m_netMap[num] = net; } else net = m_netMap[num]; // link block and net net->insertBlock(block); block->insertNet(net); } } file.close(); partitionNetList = m_netList; } void Partitioner::constructAdjMatrix() { m_adjMatrix.resize(m_blockList.size()+1); for(unsigned int i=0; i<m_adjMatrix.size(); i++) m_adjMatrix[i].resize(m_blockList.size()+1); for(unsigned int i=1; i<m_adjMatrix.size(); i++) { for(unsigned int j=1; j<=i; j++) { if(i != j && i != 0 && j != 0) { Block *block1 = m_blockMap[i]; Block *block2 = m_blockMap[j]; std::vector<int> connectedNets = block1->isConnected(block2); for(unsigned int k=0; k<connectedNets.size(); k++) { m_adjMatrix[i][j].push_back(connectedNets[k]); m_adjMatrix[j][i].push_back(connectedNets[k]); } } } } partitionAdjMatrix = m_adjMatrix; } void Partitioner::initialSolution() { m_bestSolution = Solution(m_blockList.size(), m_netList.size()); // traverse through all nets in descending fan-out order // try to group as many as possible within one net for(unsigned int i=0; i<m_netList.size(); i++) { std::vector<Block*> blockList = m_netList[i]->getBlocks(); for(unsigned int j=0; j<blockList.size(); j++) { Block *block = blockList[j]; int blockId = block->getId(); // if current block not partitioned yet if(!m_bestSolution.isBlockPartitioned(blockId)) { bool done = false; for(int k=0; k<Solution::NUMPARTITION && !done; k++) { if(!m_bestSolution.isPartitionFull(k)) { m_bestSolution.pushBlock(blockId, k); done = true; } } if(!done) { std::cout << "[ERROR Partitioner::initialSolution] " << "Solution::blockPartitioned shows unpartitioned " << "but Solution::partitionFull gives unavailability" << std::endl; exit(-1); } } } } m_bestSolution.sanityCheck(); m_bestCost = m_bestSolution.getLowerBound(); } void Partitioner::branchBoundDFS() { // create a stack to perform depth-first traversal std::stack<Solution*> st; Solution *startSolution = new Solution(m_blockList.size(), m_netList.size()); // lock 1st block to only 1 partition by symmetry int nextBlockIndex = startSolution->getLastBlockIndex() + 1; int nextBlockId = m_blockList[nextBlockIndex]->getId(); startSolution->pushBlock(nextBlockId, Solution::LEFT); // lock 2nd block to only 2 partitions by symmetry Solution *startSolution2 = new Solution(*startSolution); nextBlockIndex = startSolution->getLastBlockIndex() + 1; nextBlockId = m_blockList[nextBlockIndex]->getId(); startSolution->pushBlock(nextBlockId, Solution::LEFT); startSolution2->pushBlock(nextBlockId, Solution::MIDDLE); if(m_display) { startSolution->updateDrawNode(Solution::LEFT); startSolution2->updateDrawNode(Solution::MIDDLE); } st.push(startSolution); st.push(startSolution2); while(!st.empty()) { Solution *solution = st.top(); st.pop(); m_visitCount++; int solutionLowerBound = solution->getLowerBound(); // int solutionUpperBound = solution->getUpperBound(); // stop expansion and update best if current solution is complete if(solution->isSolutionComplete() && solutionLowerBound < m_bestCost) { m_bestSolution = *solution; m_bestCost = solutionLowerBound; delete solution; continue; } // update m_bestCost if upper bound of current solution < m_bestCost // if(solutionUpperBound < m_bestCost) { // std::cout << "[DEBUG Partitioner::branchBoundDFS] Upper bound pruned" // << std::endl; // m_bestCost = solutionUpperBound; // } // prune if lower bound of current solution >= best solution if(solutionLowerBound >= m_bestCost) { delete solution; continue; } // create and push solution object while preserving balance constraint nextBlockIndex = solution->getLastBlockIndex() + 1; if(nextBlockIndex < (int)m_blockList.size()) { nextBlockId = m_blockList[nextBlockIndex]->getId(); for(int i=0; i<Solution::NUMPARTITION; i++) { if(!solution->isPartitionFull(i)) { Solution *nextSolution = new Solution(*solution); nextSolution->pushBlock(nextBlockId, i); if(m_display) nextSolution->updateDrawNode(i); st.push(nextSolution); } } } delete solution; } } class Compare { public: bool operator()(Solution *&a, Solution*&b) { return a->getLowerBound() > b->getLowerBound(); } }; void Partitioner::branchBoundLBF() { // create a priority queue to perform lowest-bound-first traversal std::priority_queue<Solution*,std::vector<Solution*>,Compare> pq; Solution *startSolution = new Solution(m_blockList.size(), m_netList.size()); // lock 1st block to only 1 partition by symmetry int nextBlockIndex = startSolution->getLastBlockIndex() + 1; int nextBlockId = m_blockList[nextBlockIndex]->getId(); startSolution->pushBlock(nextBlockId, Solution::LEFT); // lock 2nd block to only 2 partitions by symmetry Solution *startSolution2 = new Solution(*startSolution); nextBlockIndex = startSolution->getLastBlockIndex() + 1; nextBlockId = m_blockList[nextBlockIndex]->getId(); startSolution->pushBlock(nextBlockId, Solution::LEFT); startSolution2->pushBlock(nextBlockId, Solution::MIDDLE); if(m_display) { startSolution2->updateDrawNode(Solution::MIDDLE); startSolution->updateDrawNode(Solution::LEFT); } pq.push(startSolution); pq.push(startSolution2); while(!pq.empty()) { Solution *solution = pq.top(); pq.pop(); m_visitCount++; int solutionLowerBound = solution->getLowerBound(); // int solutionUpperBound = solution->getUpperBound(); // stop expansion and update best if current solution is complete if(solution->isSolutionComplete() && solutionLowerBound < m_bestCost) { m_bestSolution = *solution; m_bestCost = solutionLowerBound; delete solution; continue; } // update m_bestCost if upper bound of current solution < m_bestCost // if(solutionUpperBound < m_bestCost) { // std::cout << "[DEBUG Partitioner::branchBoundLBF] Upper bound pruned" // << std::endl; // m_bestCost = solutionUpperBound; // } // prune if lower bound of current solution >= best solution if(solutionLowerBound >= m_bestCost) { delete solution; continue; } // create and push solution object while preserving balance constraint nextBlockIndex = solution->getLastBlockIndex() + 1; if(nextBlockIndex < (int)m_blockList.size()) { nextBlockId = m_blockList[nextBlockIndex]->getId(); for(int i=0; i<Solution::NUMPARTITION; i++) { if(!solution->isPartitionFull(i)) { Solution *nextSolution = new Solution(*solution); nextSolution->pushBlock(nextBlockId, i); if(m_display) nextSolution->updateDrawNode(i); pq.push(nextSolution); } } } delete solution; } } int Partitioner::computeCutSize(Solution &solution) { int cutSize = 0; std::vector<int> partitionBlocks = solution.getPartitionBlocks(); // loop through all nets for(unsigned int i=0; i<m_netList.size(); i++) { Net *net = m_netList[i]; int partition[Solution::NUMPARTITION] = {0}; std::vector<Block*> blocks = net->getBlocks(); // for each net see if it contains blocks in different partitions for(unsigned int j=0; j<blocks.size(); j++) { int region = partitionBlocks[blocks[j]->getId()]; if(partition[region] == 0) partition[region] = 1; int count = 0; for(int k=0; k<Solution::NUMPARTITION && count<=1; k++) count += partition[k]; if(count > 1) { cutSize++; break; } } } return cutSize; } bool Partitioner::sortBlockList(Block *a, Block *b) { return a->getNets().size() > b->getNets().size(); } bool Partitioner::sortNetList(Net *a, Net *b) { return a->getBlocks().size() < b->getBlocks().size(); } void Partitioner::printBlockList() { std::cout << "# Blocks = " << m_blockList.size() << "; # Nets = " << m_netList.size() << std::endl; for(unsigned int i=0; i<m_blockList.size(); i++) { m_blockList[i]->printBlock(); std::cout << std::endl; } } void Partitioner::printNetList() { std::cout << "# Blocks = " << m_blockList.size() << "; # Nets = " << m_netList.size() << std::endl; for(unsigned int i=0; i<m_netList.size(); i++) { m_netList[i]->printNet(); std::cout << std::endl; } } void Partitioner::printAdjMatrix() { for(unsigned int i=1; i<m_adjMatrix.size(); i++) for(unsigned int j=1; j<m_adjMatrix[i].size(); j++) if(i != j) { std::cout << "Blocks " << std::setw(3) << i << " and " << std::setw(3) << j << " are connected via nets: "; for(unsigned int k=0; k<m_adjMatrix[i][j].size(); k++) std::cout << std::setw(3) << m_adjMatrix[i][j][k] << " "; std::cout << std::endl; } }
d68faa4d50ec58645efbaa7d8c5e1d51d817f5a3
[ "Makefile", "C++" ]
11
C++
louistjchen/BranchBound-Partitioner
1c70c72ff54237791ce88660374e4e1a213ff344
b7a1e3d1c90b68a240ae27bac8323efc21330563
refs/heads/master
<repo_name>WeiWang0368/Simulation-of-AR-Re-provision<file_sep>/AR-Reprovisioning/src/main/java/weiw/topo/Topology.java package weiw.topo; import org.jgrapht.graph.SimpleWeightedGraph; import org.jgrapht.graph.builder.DirectedWeightedGraphBuilder; import org.jgrapht.graph.builder.UndirectedWeightedGraphBuilder; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * Created by Wei on 2016/4/29. */ public class Topology { public static SimpleWeightedGraph<Integer,AccessEdge> graph; public static List<Link> linkList; public static List<Integer> nodeList; public static final int ServiceNum=50000; public static void TopoReader(String topoFilePath){ linkList=new ArrayList<Link>(); nodeList=new ArrayList<Integer>(); File file = new File(topoFilePath); BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(file)); String tempString = null; int line = 0; while ((tempString = reader.readLine()) != null) { System.out.println("line " + line + ": " + tempString); nodeList.add(line); String[] nodeArray=tempString.split("\t"); for(int i=0;i<nodeArray.length;i++){ if(nodeArray[i].equals("0")||nodeArray[i].equals("999")){ continue; } else{ boolean existFlag=false; for(Link itemLink:linkList){ if(itemLink.src==i&&itemLink.dst==line){ existFlag=true; } } if(existFlag){ continue; } else{ linkList.add(new Link(line,i,1.0)); } } } line++; } reader.close(); } catch (IOException e) { e.printStackTrace(); } finally { if (reader != null) { try { reader.close(); } catch (IOException e1) { } } } } public static void initUndirectedWeightedGraph(){ UndirectedWeightedGraphBuilder<Integer, AccessEdge,SimpleWeightedGraph<Integer, AccessEdge>> builder = new UndirectedWeightedGraphBuilder<Integer, AccessEdge, SimpleWeightedGraph<Integer, AccessEdge>>( new SimpleWeightedGraph<Integer, AccessEdge>(AccessEdge.class) ); // add vertex for (Integer vertex : nodeList){ builder.addVertex(vertex); } // add undirected weighted edges for(Link edge : linkList){ builder.addEdge(edge.src, edge.dst, edge.weight); } graph = builder.build(); } public static Link obtainEdge(int src, int dst){ for(Link edge : linkList){ if((edge.src==src && edge.dst==dst) || (edge.src==dst && edge.dst==src)){ return edge; } } return null; } } <file_sep>/AR-Reprovisioning/src/main/java/weiw/topo/AccessEdge.java package weiw.topo; import org.jgrapht.graph.DefaultEdge; import org.jgrapht.graph.DefaultWeightedEdge; /** * Created by Wei on 2016/4/28. */ public class AccessEdge<V> extends DefaultWeightedEdge { public V obtainSource(){ return (V) super.getSource(); } public V obtainTarget(){ return (V)super.getTarget(); } } <file_sep>/AR-Reprovisioning/src/main/java/weiw/service/HeavyLoadService.java package weiw.service; import weiw.topo.Link; import java.util.Set; /** * Created by Wei on 2016/5/1. */ public class HeavyLoadService { int serviceId; Set<Integer> occupiedTimeSlots; Set<Link> occupiedLinks; Integer heavyLoadMetric; public HeavyLoadService(int serviceId, Set<Integer> occupiedTimeSlots, Set<Link> occupiedLinks) { this.serviceId=serviceId; this.occupiedTimeSlots = occupiedTimeSlots; this.occupiedLinks = occupiedLinks; this.heavyLoadMetric=occupiedLinks.size()*occupiedTimeSlots.size(); } public int getServiceId() { return serviceId; } public void setServiceId(int serviceId) { this.serviceId = serviceId; } public Set<Integer> getOccupiedTimeSlots() { return occupiedTimeSlots; } public void setOccupiedTimeSlots(Set<Integer> occupiedTimeSlots) { this.occupiedTimeSlots = occupiedTimeSlots; } public Set<Link> getOccupiedLinks() { return occupiedLinks; } public void setOccupiedLinks(Set<Link> occupiedLinks) { this.occupiedLinks = occupiedLinks; } public Integer getHeavyLoadMetric() { return heavyLoadMetric; } public void setHeavyLoadMetric(Integer heavyLoadMetric) { this.heavyLoadMetric = heavyLoadMetric; } } <file_sep>/AR-Reprovisioning/src/main/java/weiw/ARReprovisioning.java package weiw; /** * Created by Wei on 2016/4/28. */ public class ARReprovisioning { public static void main(String[] args){ Simulation ARSim=new Simulation(); ARSim.simRun(50,20,300,0.025); } } <file_sep>/AR-Reprovisioning/src/main/java/weiw/topo/Link.java package weiw.topo; import java.util.List; /** * Created by Wei on 2016/4/28. */ public class Link { public int src; public int dst; public double weight; public int occupiedWavelengthNum; public List<Lambda> wavelengths; public Link(int src, int dst,double weight) { this.src = src; this.dst = dst; this.weight=weight; } public Link(int src, int dst, double weight, int occupiedWavelengthNum, List<Lambda> wavelengths) { this.src = src; this.dst = dst; this.weight = weight; this.occupiedWavelengthNum = occupiedWavelengthNum; this.wavelengths = wavelengths; } public int getSrc() { return src; } public void setSrc(int src) { this.src = src; } public int getDst() { return dst; } public void setDst(int dst) { this.dst = dst; } public double getWeight() { return weight; } public void setWeight(double weight) { this.weight = weight; } public int getOccupiedWavelengthNum() { return occupiedWavelengthNum; } public void setOccupiedWavelengthNum(int occupiedWavelengthNum) { this.occupiedWavelengthNum = occupiedWavelengthNum; } public List<Lambda> getWavelengths() { return wavelengths; } public void setWavelengths(List<Lambda> wavelengths) { this.wavelengths = wavelengths; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Link link = (Link) o; if (src != link.src) return false; return dst == link.dst; } @Override public int hashCode() { int result = src; result = 31 * result + dst; return result; } } <file_sep>/AR-Reprovisioning/src/main/java/weiw/time/Timer.java package weiw.time; /** * Created by Wei on 2016/4/29. */ public class Timer { public static int currentTime=0; public static void resetTime(){ currentTime=0; } } <file_sep>/AR-Reprovisioning/src/main/java/weiw/topo/Path.java package weiw.topo; import org.jgrapht.GraphPath; import java.util.List; /** * Created by Wei on 2016/4/28. */ public class Path { GraphPath<Integer, AccessEdge> nodes; //Á´±í£¬ÓÐÏÂÒ»Ìø List<Link> links; public GraphPath<Integer, AccessEdge> getNodes() { return nodes; } public void setNodes(GraphPath<Integer, AccessEdge> nodes) { this.nodes = nodes; } public void setLinks(List<Link> links) { this.links = links; } public List<Link> getLinks() { return links; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Path path = (Path) o; if (nodes != null ? !nodes.equals(path.nodes) : path.nodes != null) return false; return !(links != null ? !links.equals(path.links) : path.links != null); } @Override public int hashCode() { int result = nodes != null ? nodes.hashCode() : 0; result = 31 * result + (links != null ? links.hashCode() : 0); return result; } } <file_sep>/README.md # Simulation-of-AR-Re-provision Simulation of the provision and re-provision strategies of Advanced Reservation applications <file_sep>/AR-Reprovisioning/src/main/java/weiw/Simulation.java package weiw; import weiw.service.ARServiceEvent; import weiw.service.ARServiceGenerator; import weiw.time.Timer; import weiw.topo.NetworkResource; import weiw.topo.Topology; import java.util.List; /** * Created by Wei on 2016/4/28. */ public class Simulation { private final String topoFilePath="C:\\Users\\Wei\\AR-Reprovisioning\\src\\main\\resources\\map_NSFNet.data"; private final int minSlotNum=1; private final int maxSlotNum=5; private final int nodeNum=14; public List<ARServiceEvent> AReventQueue; public NetworkResource networkResource; public ARServiceGenerator ARserviceGenerator; public SimResults simResults; //拓扑是唯一的,不论跑几个循环。但是资源和业务发生器需要重新初始化 public Simulation(){ Topology.TopoReader(topoFilePath); Topology.initUndirectedWeightedGraph(); } public void simRun(int rouStart, int rouStep, int rouEnd, double mu){ for(int mode=-1;mode<3;mode++){ for(int i=rouStart;i<=rouEnd;i+=rouStep){ System.out.println("mode is: " + mode + ", rou is " + i); Timer.resetTime(); ARserviceGenerator=new ARServiceGenerator(nodeNum,mu,i,minSlotNum,maxSlotNum); AReventQueue=ARserviceGenerator.genEventQueue(Topology.ServiceNum); networkResource=new NetworkResource(); simResults=new SimResults(i,mu); for(ARServiceEvent ARserviceEvent:AReventQueue){ int arrivalTime=(int)ARserviceEvent.getArriveTime(); int serviceId=ARserviceEvent.getEventId(); double WCR; if(arrivalTime-Timer.currentTime==0){ WCR=-1; } else { WCR=networkResource.refreshTimeResource(arrivalTime - Timer.currentTime); Timer.currentTime=arrivalTime; } //剪头去尾 if (serviceId<5000||serviceId>45000){ WCR=-1; } AccommodationResult result=networkResource.handleArrivalEvent(ARserviceEvent); if(result.resultFlag){ simResults.handleSuccessResult(result,WCR); } else{ simResults.handleFailedResult(result, WCR); if(mode!=-1){ simResults.handleReprovisionedResult(networkResource.handleReProvisioningRequest(mode)); } } } simResults.printResults(); } } } } <file_sep>/AR-Reprovisioning/src/main/java/weiw/topo/Lambda.java package weiw.topo; /** * Created by Wei on 2016/4/28. */ public class Lambda { int identifier; //每个波长的编号 boolean isUsed; //波长是否占用 int serviceID; //占用该波长的业务ID public Lambda(int identifier){ this.identifier=identifier; isUsed=false; serviceID=-1; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Lambda lambda = (Lambda) o; if (identifier != lambda.identifier) return false; if (isUsed != lambda.isUsed) return false; return serviceID == lambda.serviceID; } @Override public int hashCode() { int result = identifier; result = 31 * result + (isUsed ? 1 : 0); result = 31 * result + serviceID; return result; } }
a95abf50eec4d07d72e5f00a50f56d2a5382bb8d
[ "Markdown", "Java" ]
10
Java
WeiWang0368/Simulation-of-AR-Re-provision
fef1d7eb17e7baef57bcbd5171e02e92c0fc3baa
4be5a4aae0e73810e589a38cc1b542d551a8adc5
refs/heads/master
<file_sep>Instructions ============ Ensure your database is created before running these commands: Initialize dbal tables to support document structure in PHPCR: ```bash php vendor/bin/jackalope jackalope:init:dbal ``` This will register basic node types, namespaces, first base root node, types of nodes, types of properties and workspace: ```bash php vendor/bin/phpcrodm doctrine:phpcr:register-system-node-types ``` <file_sep><?php use Doctrine\DBAL\DriverManager; use Jackalope\Tools\Console\Helper\DoctrineDbalHelper; use Symfony\Component\Console\Helper\HelperSet; $dbConn = DriverManager::getConnection( [ 'driver' => 'pdo_mysql', 'host' => 'localhost', 'user' => 'root', 'password' => '<PASSWORD>', 'dbname' => 'try-db', ] ); $helperSet = new HelperSet([ 'connection' => new DoctrineDbalHelper($dbConn) ]); <file_sep><?php use Doctrine\DBAL\DriverManager; use Doctrine\ODM\PHPCR\Configuration; use Doctrine\ODM\PHPCR\DocumentManager; use Doctrine\ODM\PHPCR\Mapping\Driver\YamlDriver; use Doctrine\ODM\PHPCR\Tools\Console\Helper\DocumentManagerHelper; use Jackalope\RepositoryFactoryDoctrineDBAL; use PHPCR\SimpleCredentials; use PHPCR\Util\Console\Helper\PhpcrConsoleDumperHelper; use PHPCR\Util\Console\Helper\PhpcrHelper; use Symfony\Component\Console\Helper\DialogHelper; use Symfony\Component\Console\Helper\HelperSet; $dbConn = DriverManager::getConnection( [ 'driver' => 'pdo_mysql', 'host' => 'localhost', 'user' => 'root', 'password' => '<PASSWORD>', 'dbname' => 'try-db', ] ); $repository = (new RepositoryFactoryDoctrineDBAL()) ->getRepository( [ 'jackalope.doctrine_dbal_connection' => $dbConn ] ) ; $session = $repository->login( new SimpleCredentials($user = 'admin', $pass = '<PASSWORD>'), $workspace = 'default' ); $config = new Configuration(); $config->setMetadataDriverImpl(new YamlDriver(['src/Cordoval'])); $documentManager = new DocumentManager($session, $config); $helperSet = new HelperSet(array( 'dialog' => new DialogHelper(), 'phpcr' => new PhpcrHelper($session), 'phpcr_console_dumper' => new PhpcrConsoleDumperHelper(), 'dm' => new DocumentManagerHelper(null, $documentManager), )); <file_sep><?php namespace Cordoval\TreeManager;
41c8aaa9264c42ae6b35ce410969541a170c4dd2
[ "Markdown", "PHP" ]
4
Markdown
cordoval/try-phpcr
d3cfa145d9c06ef07ed25cc743e8df91c83335ec
f96c816e9378ed867caaf08a561c771de1930912
refs/heads/master
<repo_name>kayoung1222/KorStock<file_sep>/DjangoStock/Stocks/urls.py from django.urls import path from . import views urlpatterns = [ path('',views.base, name='base'), path('news', views.news, name='news'), path('money', views.money, name='money'), ] <file_sep>/venv/Lib/site-packages/loads/tests/test_output.py import StringIO import datetime import mock import shutil import sys import tempfile from unittest2 import TestCase from mock import patch from loads.output import (create_output, output_list, register_output, StdOutput, NullOutput, FileOutput, FunkloadOutput) from loads import output from loads.tests.support import get_tb, hush TIME1 = datetime.datetime(2013, 5, 14, 0, 51, 8) _1 = datetime.timedelta(seconds=1) class FakeTestResult(object): def __init__(self, nb_errors=0, nb_failures=0): self.nb_hits = 10 self.start_time = datetime.datetime.now() self.duration = 0 self.average_request_time = lambda: 0 self.requests_per_second = lambda: 0 self.opened_sockets = 0 self.socket_data_received = 0 self.nb_success = 0 self.nb_errors = nb_errors self.nb_failures = nb_failures self.nb_finished_tests = 0 self.errors = [] self.failures = [] self.hits = [] self.tests = {} def get_url_metrics(self): return {'http://foo': {'average_request_time': 1.234, 'hits_success_rate': 23.}, 'http://baz': {'average_request_time': 12.34, 'hits_success_rate': 2.}} def get_counters(self): return {'boo': 123} class FakeOutput(object): name = 'fake' options = {'arg1': ('Some doc', str, None, False)} def __init__(self, test_result, args): self.args = args self.test_result = test_result class TestStdOutput(TestCase): def setUp(self): super(TestStdOutput, self).setUp() self.oldstdout = sys.stdout self.oldstderr = sys.stdout def tearDown(self): sys.stdout = self.oldstdout sys.stderr = self.oldstderr super(TestStdOutput, self).tearDown() def test_std(self): sys.stdout = StringIO.StringIO() test_result = FakeTestResult() std = StdOutput(test_result, {'total': 10}) for i in range(11): test_result.nb_finished_tests += 1 std.push('stopTest') std.flush() sys.stdout.seek(0) out = sys.stdout.read() self.assertTrue('Hits: 10' in out) self.assertTrue('100%' in out, out) @hush def test_errors_are_processed(self): test_result = FakeTestResult(nb_errors=1, nb_failures=1) std = StdOutput(test_result, {'total': 10}) std._print_tb = mock.Mock() std.flush() self.assertEquals(2, std._print_tb.call_count) def test_tb_is_rendered(self): sys.stderr = StringIO.StringIO() errors = iter([[get_tb(), ]]) std = StdOutput(mock.sentinel.test_result, mock.sentinel.args) std._print_tb(errors) sys.stderr.seek(0) out = sys.stderr.read() self.assertTrue('Exception' in out) self.assertTrue('Error message' in out) def test_empty_tb_is_not_processed(self): std = StdOutput(mock.sentinel.test_result, mock.sentinel.args) std._print_tb(iter(([], []))) def test_classnames_strings_are_used_when_available(self): sys.stderr = StringIO.StringIO() std = StdOutput(mock.sentinel.test_result, mock.sentinel.args) std._print_tb(iter([[['foo', 'foobar', None]]])) sys.stderr.seek(0) out = sys.stderr.read() self.assertTrue('foo: foobar' in out) def test_relative_value(self): self.assertEquals(output.std.get_screen_relative_value(23, 80), 10) def test_url_output(self): sys.stdout = StringIO.StringIO() test_result = FakeTestResult() std = StdOutput(test_result, {'total': 10}) for i in range(11): test_result.nb_finished_tests += 1 std.push('stopTest') std.flush() sys.stdout.seek(0) out = sys.stdout.read() wanted = ['http://baz', 'Average request time: 12.34', 'Hits success rate: 2.0', 'http://foo', 'Average request time: 1.234', 'Hits success rate: 23.0'] for item in wanted: self.assertTrue(item in out) def test_counter(self): sys.stdout = StringIO.StringIO() test_result = FakeTestResult() std = StdOutput(test_result, {'total': 10}) for i in range(11): test_result.nb_finished_tests += 1 std.push('stopTest') std.flush() sys.stdout.seek(0) out = sys.stdout.read() wanted = ['boo', '123'] for item in wanted: self.assertTrue(item in out) class TestNullOutput(TestCase): def test_api_works(self): output = NullOutput(mock.sentinel.test_result, mock.sentinel.args) output.push('something') output.flush() class TestFileOutput(TestCase): def test_file_is_written(self): tmpdir = tempfile.mkdtemp() try: output = FileOutput(mock.sentinel.test_result, {'output_file_filename': '%s/loads' % tmpdir}) output.push('something', 1, 2, method='GET') output.flush() with open('%s/loads' % tmpdir) as f: self.assertEquals('something - {"method": "GET"}', f.read()) finally: shutil.rmtree(tmpdir) class FakeTestCase(object): def __init__(self, name): self._testMethodName = name class TestFunkloadOutput(TestCase): @patch('loads.output._funkload.format_tb', lambda x: x) def test_file_is_written(self): # Create a fake test result object test_result = FakeTestResult() tmpdir = tempfile.mkdtemp() try: output = FunkloadOutput( test_result, {'output_funkload_filename': '%s/funkload.xml' % tmpdir, 'fqn': 'my_test_module.MyTestCase.test_mytest', 'hits': 200, 'users': [1, 2, 5], 'duration': '1'}) # Drive the observer with some tests and hits output.push('startTestRun', when=TIME1) test_case, state = FakeTestCase('test_mytest_foo'), [1, 2, 3, 4] output.push('startTest', test_case, state) output.push('add_hit', state, started=TIME1, elapsed=_1, url='http://example.local/foo', method='GET', status=200) output.push('addSuccess', test_case, state) output.push('stopTest', test_case, state) test_case, state = FakeTestCase('test_mytest_bar'), [1, 2, 3, 4] output.push('startTest', test_case, state) output.push('add_hit', state, started=TIME1, elapsed=_1, url='http://example.local/bar', method='GET', status=500) output.push( 'addFailure', test_case, ( Exception, Exception('Mock Exception'), ['mock', 'traceback']), state) output.push('stopTest', test_case, [1, 2, 3, 4]) output.flush() with open('%s/funkload.xml' % tmpdir) as f: content = f.read() test = ( '<response\n cycle="001" cvus="002" thread="004" ' 'suite="" name=""\n step="001" number="001" ' 'type="get" result="Successful" ' 'url="http://example.local/foo"\n code="200" ' 'description="" time="1368492668" duration="1.0" />',) for t in test: self.assertIn(t, content) test = ( '<testResult\n cycle="001" cvus="002" thread="004" ' 'suite="FakeTestCase"\n name="test_mytest_foo" ', 'result="Successful" steps="1"\n', 'connection_duration="0" requests="1"\n pages="1" ' 'xmlrpc="0" redirects="0" images="0" links="0"\n />') for t in test: self.assertIn(t, content) test = ( '<response\n cycle="001" cvus="002" thread="004" ' 'suite="" name=""\n step="001" number="001" ' 'type="get" result="Successful" ' 'url="http://example.local/bar"\n code="500" ' 'description="" time="1368492668" duration="1.0" />',) for t in test: self.assertIn(t, content) test = ( '<testResult\n cycle="001" cvus="002" thread="004" ' 'suite="FakeTestCase"\n name="test_mytest_foo" ', 'result="Failure" steps="1"\n', 'connection_duration="0" requests="1"\n pages="1" ' 'xmlrpc="0" redirects="0" images="0" links="0"\n' ' traceback="mock&#10;traceback"/>') for t in test: self.assertIn(t, content) finally: shutil.rmtree(tmpdir) class TestOutputPlugins(TestCase): def test_unexistant_output_raises_exception(self): self.assertRaises(NotImplementedError, create_output, 'xxx', None, None) @mock.patch('loads.output._OUTPUTS', {}) def test_register_item_works(self): register_output(FakeOutput) self.assertTrue(FakeOutput in output_list()) @mock.patch('loads.output._OUTPUTS', {}) def test_register_multiple_times(self): register_output(FakeOutput) register_output(FakeOutput) self.assertTrue(FakeOutput in output_list()) self.assertEquals(len(output_list()), 1) @mock.patch('loads.output._OUTPUTS', {}) def test_create_output(self): register_output(FakeOutput) obj = create_output('fake', mock.sentinel.test_result, mock.sentinel.args) self.assertEquals(obj.args, mock.sentinel.args) self.assertEquals(obj.test_result, mock.sentinel.test_result) <file_sep>/venv/Lib/site-packages/loads/results/remote.py from collections import defaultdict from loads.results import TestResult from loads.transport.client import Client class RemoteTestResult(TestResult): """ This version does not store all data RemoteTestResult interacts with the broker to fetch the data when its APIs are called. """ def __init__(self, config=None, args=None): super(RemoteTestResult, self).__init__(config, args) self.counts = defaultdict(int) self.run_id = None if args is None: self.args = {} def __getattribute__(self, name): properties = {'nb_finished_tests': 'stopTest', 'nb_hits': 'add_hit', 'nb_failures': 'addFailure', 'nb_errors': 'addError', 'nb_success': 'addSuccess', 'nb_tests': 'startTest', 'socket': 'socket_open', 'socket_data_received': 'socket_message'} values = ('errors', 'failures') if name in properties: return self.counts[properties[name]] elif name in values: if self.args.get('agents') is None: raise NotImplementedError(name) return self._get_values(name) return TestResult.__getattribute__(self, name) def set_counts(self, counts): self.counts.update(counts) def _get_values(self, name): """Calls the broker to get the errors or failures. """ if name in 'failures': key = 'addFailure' elif name == 'errors': key = 'addError' client = Client(self.args['broker']) for line in client.get_data(self.run_id, data_type=key): line = line['exc_info'] yield [line] def sync(self, run_id): if self.args.get('agents') is None: return self.run_id = run_id # we're asking the broker about the latest counts self.counts = defaultdict(int) client = Client(self.args['broker']) for line in client.get_data(run_id, groupby=True): self.counts[line['data_type']] += line['count'] <file_sep>/venv/Lib/site-packages/loads/observers/_email.py from email.mime.text import MIMEText from email.header import Header from rfc822 import AddressList import smtplib from loads.util import logger class EMailObserver(object): name = 'email' options = [{'name': 'sender', 'type': str, 'default': '<EMAIL>'}, {'name': 'recipient', 'type': str, 'default': '<EMAIL>'}, {'name': 'host', 'type': str, 'default': 'localhost'}, {'name': 'port', 'type': int, 'default': 25}, {'name': 'user', 'type': str, 'default': None}, {'name': 'password', 'type': str, 'default': None}, {'name': 'subject', 'type': str, 'default': 'Loads Results'}] def _normalize_realname(self, field): address = AddressList(field).addresslist if len(address) == 1: realname, email = address[0] if realname != '': return '%s <%s>' % (str(Header(realname, 'utf-8')), str(email)) return field def __init__(self, sender='<EMAIL>', host='localhost', port=25, user=None, password=<PASSWORD>, subject='Loads Results', recipient='<EMAIL>', **kw): self.subject = subject self.sender = sender self.host = host self.port = port self.user = user self.password = <PASSWORD> self.recipient = recipient def __call__(self, test_results): # XXX we'll add more details in the mail later msg = 'Test over. %s' % str(test_results) body = msg msg = MIMEText(body.encode('utf-8'), 'plain', 'utf8') msg['From'] = self._normalize_realname(self.sender) msg['To'] = self._normalize_realname(self.recipient) msg['Subject'] = Header(self.subject, 'utf-8') logger.debug('Connecting to %s:%d' % (self.host, self.port)) server = smtplib.SMTP(self.host, self.port, timeout=5) # auth if self.user is not None and self.password is not None: logger.debug('Login with %r' % self.user) try: server.login(self.user, self.password) except (smtplib.SMTPHeloError, smtplib.SMTPAuthenticationError, smtplib.SMTPException), e: return False, str(e) # the actual sending logger.debug('Sending the mail') try: server.sendmail(self.sender, [self.recipient], msg.as_string()) finally: server.quit() if __name__ == '__main__': client = EMailObserver() client('ohay, I am the loads bot') <file_sep>/venv/Lib/site-packages/loads/db/_redis.py try: import redis except ImportError: raise ImportError("You need to install http://pypi.python.org/pypi/redis") import hashlib from loads.db import BaseDB from loads.util import json class RedisDB(BaseDB): name = 'redis' options = {'host': ('localhost', 'Redis host', str), 'port': (6379, 'Redis port', int)} def _initialize(self): self.host = self.params['host'] self.port = self.params['port'] self._redis = redis.StrictRedis(host=self.host, port=self.port, db=0) def ping(self): try: self._redis.ping() return True except redis.ConnectionError: return False # # APIs # def save_metadata(self, run_id, metadata): key = 'metadata:%s' % run_id self._redis.set(key, json.dumps(metadata)) def update_metadata(self, run_id, **metadata): existing = self.get_metadata(run_id) existing.update(metadata) self.save_metadata(run_id, existing) def get_metadata(self, run_id): key = 'metadata:%s' % run_id metadata = self._redis.get(key) if metadata is None: return {} return json.loads(metadata) def add(self, data): run_id = data['run_id'] data_type = data['data_type'] = data.get('data_type', 'unknown') size = data.get('size', 1) pipeline = self._redis.pipeline() pipeline.sadd('runs', run_id) # adding counts counter = 'count:%s:%s' % (run_id, data_type) counters = 'counters:%s' % run_id if not self._redis.sismember(counters, counter): pipeline.sadd(counters, counter) pipeline.incrby('count:%s:%s' % (run_id, data_type), size) # adding urls if 'url' in data: url = data['url'] urls = 'urls:%s' % run_id if not self._redis.sismember(urls, url): pipeline.sadd(urls, url) pipeline.incrby('url:%s:%s' % (run_id, url), 1) # adding data dumped = json.dumps(data) pipeline.lpush('data:%s' % run_id, dumped) # adding errors if data_type == 'addError': pipeline.lpush('errors:%s' % run_id, dumped) # adding group by md5 = hashlib.md5(dumped).hexdigest() pipeline.incrby('bcount:%s:%s' % (run_id, md5), size) pipeline.set('bvalue:%s:%s' % (run_id, md5), dumped) bcounters = 'bcounters:%s' % run_id if not self._redis.sismember(bcounters, md5): pipeline.sadd(bcounters, md5) pipeline.execute() def flush(self): pass def close(self): pass def get_urls(self, run_id): urls = {} for url in self._redis.smembers('urls:%s' % run_id): urls[url] = int(self._redis.get('url:%s:%s' % (run_id, url))) return urls def get_counts(self, run_id): counts = {} counters = 'counters:%s' % run_id for member in self._redis.smembers(counters): name = member.split(':')[-1] counts[name] = int(self._redis.get(member)) return counts def get_runs(self): return self._redis.smembers('runs') def get_errors(self, run_id, start=None, size=None): key = 'errors:%s' % run_id len = self._redis.llen(key) if len == 0: raise StopIteration() if start is None: start = 0 if size is None: end = len else: end = start + size if end > len: end = len for index in range(start, end): yield json.loads(self._redis.lindex(key, index)) def get_data(self, run_id, data_type=None, groupby=False, start=None, size=None): key = 'data:%s' % run_id len = self._redis.llen(key) if len == 0: raise StopIteration() if not groupby: if start is None: start = 0 if size is None: end = len else: end = start + size if end > len: end = len for index in range(start, end): data = json.loads(self._redis.lindex(key, index)) if data_type is None or data_type == data.get('data_type'): yield data else: # XXX not sure how to batch this yet if start is not None or size is not None: raise NotImplementedError() bcounters = 'bcounters:%s' % run_id for hash in self._redis.smembers(bcounters): data = json.loads(self._redis.get('bvalue:%s:%s' % (run_id, hash))) filtered = (data_type is not None and data_type != data.get('data_type')) if filtered: continue counter = self._redis.get('bcount:%s:%s' % (run_id, hash)) data['count'] = int(counter) yield data def prepare_run(self): pass def is_summarized(self, run_id): return False def summarize_run(self, run_id): pass def delete_run(self, run_id): pass <file_sep>/venv/Lib/site-packages/loads/tests/runner.py #! /usr/bin/python import sys import os from loads.runners import LocalRunner from loads.tests.support import get_runner_args def main(): fqn = sys.argv[1] status = [ os.environ.get('LOADS_TOTAL_HITS', '1'), os.environ.get('LOADS_TOTAL_USERS', '1'), os.environ.get('LOADS_CURRENT_HIT', '1'), os.environ.get('LOADS_CURRENT_USER', '1'), ] args = get_runner_args(fqn=fqn, hits=os.environ.get('LOADS_TOTAL_HITS'), duration=os.environ.get('LOADS_DURATION'), zmq_endpoint=os.environ['LOADS_ZMQ_RECEIVER'], agent_id=os.environ['LOADS_AGENT_ID'], run_id=os.environ['LOADS_RUN_ID'], externally_managed=True, loads_status=status, slave=True) LocalRunner(args).execute() if __name__ == '__main__': main() <file_sep>/venv/Lib/site-packages/loads/runners/__init__.py from loads.runners.distributed import DistributedRunner # NOQA from loads.runners.local import LocalRunner # NOQA from loads.runners.external import ExternalRunner # NOQA RUNNERS = (DistributedRunner, LocalRunner, ExternalRunner) <file_sep>/venv/Lib/site-packages/loads/tests/support.py import os import functools import sys import StringIO import subprocess import atexit from loads.transport.util import DEFAULT_FRONTEND from loads.util import logger _processes = [] def start_process(cmd, *args): devnull = open('/dev/null', 'w') args = list(args) process = subprocess.Popen([sys.executable, '-m', cmd] + args, stdout=devnull, stderr=devnull) _processes.append(process) return process def stop_process(proc): proc.terminate() if proc in _processes: _processes.remove(proc) def stop_processes(): for proc in _processes: try: proc.terminate() except OSError: pass _processes[:] = [] atexit.register(stop_processes) def get_runner_args(fqn, users=1, hits=1, duration=None, agents=None, broker=DEFAULT_FRONTEND, test_runner=None, server_url='http://localhost:9000', zmq_endpoint='tcp://127.0.0.1:5558', output=['null'], test_dir=None, include_file=None, python_dep=None, observer=None, slave=False, agent_id=None, run_id=None, loads_status=None, externally_managed=False, project_name='N/A', detach=False): if output is None: output = ['null'] if observer is None: observer = [] if include_file is None: include_file = [] if python_dep is None: python_dep = [] args = {'fqn': fqn, 'users': str(users), 'agents': agents, 'broker': broker, 'test_runner': test_runner, 'server_url': server_url, 'zmq_receiver': zmq_endpoint, 'output': output, 'observer': observer, 'test_dir': test_dir, 'include_file': include_file, 'python_dep': python_dep, 'slave': slave, 'externally_managed': externally_managed, 'project_name': project_name, 'detach': detach} if duration is not None: args['duration'] = float(duration) else: args['hits'] = str(hits) if agent_id is not None: args['agent_id'] = agent_id if run_id is not None: args['run_id'] = run_id if loads_status is not None: args['loads_status'] = loads_status return args def get_tb(): """runs an exception and return the traceback information""" try: raise Exception('Error message') except Exception: return sys.exc_info() def hush(func): """Make the passed function silent.""" @functools.wraps(func) def _silent(*args, **kw): old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = StringIO.StringIO() sys.stderr = StringIO.StringIO() debug = [] def _debug(msg): debug.append(str(msg)) old_debug = logger.debug logger.debug = _debug try: return func(*args, **kw) except: sys.stdout.seek(0) print(sys.stdout.read()) sys.stderr.seek(0) print(sys.stderr.read()) print('\n'.join(debug)) raise finally: sys.stdout = old_stdout sys.stderr = old_stderr logger.debug = old_debug return _silent _files = [] def rm_onexit(path): _files.append(path) def cleanup_files(): for _file in _files: if os.path.exists(_file): os.remove(_file) atexit.register(cleanup_files) # taken from http://emptysqua.re/blog/undoing-gevents-monkey-patching/ def patch_socket(aggressive=True): """Like gevent.monkey.patch_socket(), but stores old socket attributes for unpatching. """ from gevent import socket _socket = __import__('socket') old_attrs = {} for attr in ( 'socket', 'SocketType', 'create_connection', 'socketpair', 'fromfd' ): if hasattr(_socket, attr): old_attrs[attr] = getattr(_socket, attr) setattr(_socket, attr, getattr(socket, attr)) try: from gevent.socket import ssl, sslerror old_attrs['ssl'] = _socket.ssl _socket.ssl = ssl old_attrs['sslerror'] = _socket.sslerror _socket.sslerror = sslerror except ImportError: if aggressive: try: del _socket.ssl except AttributeError: pass return old_attrs def unpatch_socket(old_attrs): """Take output of patch_socket() and undo patching.""" _socket = __import__('socket') for attr in old_attrs: if hasattr(_socket, attr): setattr(_socket, attr, old_attrs[attr]) <file_sep>/venv/Lib/site-packages/loads/results/_unittest.py import unittest class UnitTestTestResult(unittest.TestResult): """Used to make Loads test cases compatible with unittest This class will ignore the extra options used by Loads, so tests written for loads can also be run in Nose or Unittest(2) """ def startTest(self, test, *args, **kw): unittest.TestResult.startTest(self, test) def stopTest(self, test, *args, **kw): unittest.TestResult.stopTest(self, test) def addError(self, test, exc_info, *args, **kw): unittest.TestResult.addError(self, test, exc_info) def addFailure(self, test, exc_info, *args, **kw): unittest.TestResult.addFailure(self, test, exc_info) def addSuccess(self, test, *args, **kw): unittest.TestResult.addSuccess(self, test) def incr_counter(self, test, *args, **kw): pass <file_sep>/venv/Lib/site-packages/loads/observers/_irc.py import ssl import irc.client import irc.connection class ExitError(Exception): pass class IRCObserver(object): name = 'irc' options = [{'name': 'server', 'type': str, 'default': 'irc.mozilla.org'}, {'name': 'channel', 'type': str, 'default': '#services-dev'}, {'name': 'port', 'type': int, 'default': 8443}, {'name': 'ssl', 'type': bool, 'default': True}, {'name': 'nickname', 'type': str, 'default': 'loads'}] def __init__(self, channel='#services-dev', server='irc.mozilla.org', nickname='loads', port=8443, ssl=True, args=None, **kw): self.channel = channel self.server = server self.nickname = nickname self.port = port self.ssl = ssl self.args = args def __call__(self, test_results): msg = '[loads] Test Over. \x1f' + str(test_results) # creating the IRC client client = irc.client.IRC() if self.ssl: connect_factory = irc.connection.Factory(wrapper=ssl.wrap_socket) else: connect_factory = irc.connection.Factory() c = client.server().connect(self.server, self.port, self.nickname, connect_factory=connect_factory) def on_connect(connection, event): connection.join(self.channel) def on_endofnames(connection, event): main_loop(connection) def main_loop(connection): connection.privmsg(self.channel, msg) connection.quit("Bye !") def on_disconnect(connection, event): raise ExitError() def on_error(connection, event): raise ExitError() c.add_global_handler("welcome", on_connect) c.add_global_handler("endofnames", on_endofnames) c.add_global_handler("disconnect", on_disconnect) c.add_global_handler("error", on_error) try: client.process_forever() except ExitError: pass if __name__ == '__main__': client = IRCObserver() client('ohay, I am the loads bot') <file_sep>/venv/Lib/site-packages/loads/transport/util.py import sys import gc import traceback import threading import atexit import time import os import socket try: import zmq.green as zmq except ImportError: import zmq from loads.transport.exc import TimeoutError from loads.util import logger DEFAULT_FRONTEND = "ipc:///tmp/loads-front.ipc" DEFAULT_SSH_FRONTEND = "tcp://127.0.0.1:7780" DEFAULT_BACKEND = "ipc:///tmp/loads-back.ipc" DEFAULT_HEARTBEAT = "ipc:///tmp/loads-beat.ipc" DEFAULT_REG = "ipc:///tmp/loads-reg.ipc" DEFAULT_PUBLISHER = "ipc:///tmp/loads-publisher.ipc" DEFAULT_SSH_PUBLISHER = "tcp://127.0.0.1:7776" DEFAULT_BROKER_RECEIVER = "ipc:///tmp/loads-broker-receiver.ipc" DEFAULT_AGENT_TIMEOUT = 60. DEFAULT_TIMEOUT = 5. DEFAULT_TIMEOUT_MOVF = 20. DEFAULT_TIMEOUT_OVF = 1 DEFAULT_MAX_AGE = -1 DEFAULT_MAX_AGE_DELTA = 0 _IPC_FILES = [] PARAMS = {} @atexit.register def _cleanup_ipc_files(): for file in _IPC_FILES: file = file.split('ipc://')[-1] if os.path.exists(file): os.remove(file) def register_ipc_file(file): _IPC_FILES.append(file) def send(socket, msg, max_retries=3, retry_sleep=0.1): retries = 0 while retries < max_retries: try: socket.send(msg, zmq.NOBLOCK) return except zmq.ZMQError, e: logger.debug('Failed on send()') logger.debug(str(e)) if e.errno in (zmq.EFSM, zmq.EAGAIN): retries += 1 time.sleep(retry_sleep) else: raise logger.debug('Sending failed') logger.debug(msg) raise TimeoutError() def recv(socket, max_retries=3, retry_sleep=0.1): retries = 0 while retries < max_retries: try: return socket.recv(zmq.NOBLOCK) except zmq.ZMQError, e: logger.debug('Failed on recv()') logger.debug(str(e)) if e.errno in (zmq.EFSM, zmq.EAGAIN): retries += 1 time.sleep(retry_sleep) else: raise logger.debug('Receiving failed') raise TimeoutError() if sys.platform == "win32": timer = time.clock else: timer = time.time def timed(debug=False): def _timed(func): def __timed(*args, **kw): start = timer() try: res = func(*args, **kw) finally: duration = timer() - start if debug: logger.debug('%.4f' % duration) return duration, res return __timed return _timed def decode_params(params): """Decode a string into a dict. This is mainly useful when passing a dict trough the command line. The params passed in "params" should be in the form of key:value, separated by a pipe, the output is a python dict. """ output_dict = {} for items in params.split('|'): key, value = items.split(':', 1) output_dict[key] = value return output_dict def encode_params(intput_dict): """Convert the dict given in input into a string of key:value separated with pipes, like spam:yeah|eggs:blah """ return '|'.join([':'.join(i) for i in intput_dict.items()]) def get_params(): return PARAMS def dump_stacks(): dump = [] # threads threads = dict([(th.ident, th.name) for th in threading.enumerate()]) for thread, frame in sys._current_frames().items(): if thread not in threads: continue dump.append('Thread 0x%x (%s)\n' % (thread, threads[thread])) dump.append(''.join(traceback.format_stack(frame))) dump.append('\n') # greenlets try: from greenlet import greenlet except ImportError: return dump # if greenlet is present, let's dump each greenlet stack for ob in gc.get_objects(): if not isinstance(ob, greenlet): continue if not ob: continue # not running anymore or not started dump.append('Greenlet\n') dump.append(''.join(traceback.format_stack(ob.gr_frame))) dump.append('\n') return dump def verify_broker(broker_endpoint=DEFAULT_FRONTEND, timeout=1.): """ Return True if there's a working broker bound at broker_endpoint """ from loads.transport.client import Client client = Client(broker_endpoint) try: return client.ping(timeout=timeout, log_exceptions=False) except TimeoutError: return None finally: client.close() # let's just make the assumption it won't change # once loads is started _HOST = None def get_hostname(): global _HOST if _HOST is None: _HOST = socket.gethostname() return _HOST <file_sep>/venv/Lib/site-packages/loads/transport/exc.py class TimeoutError(Exception): pass class ExecutionError(Exception): pass class DuplicateBrokerError(Exception): pass class NoWorkerError(Exception): pass <file_sep>/venv/Lib/site-packages/loads/tests/test_heartbeat.py # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. import unittest2 import time from zmq.green.eventloop import ioloop from loads.transport.heartbeat import Stethoscope, Heartbeat class TestHeartbeat(unittest2.TestCase): def test_working(self): loop = ioloop.IOLoop() beats = [] lost = [] def onbeat(): beats.append('.') def onbeatlost(): lost.append('.') hb = Heartbeat('ipc:///tmp/stetho.ipc', interval=0.1, io_loop=loop) stetho = Stethoscope('ipc:///tmp/stetho.ipc', onbeat=onbeat, onbeatlost=onbeatlost, delay=1., retries=5., io_loop=loop) def start(): hb.start() def start_2(): stetho.start() # it's ok to try to start it again stetho.start() # hb starts immediatly loop.add_callback(start) # stetho 0.2 seconds after loop.add_timeout(time.time() + .2, start_2) def stop(): hb.stop() stetho.stop() loop.stop() # all stops after 1s loop.add_timeout(time.time() + 1., stop) # let's go loop.start() self.assertEqual(len(lost), 0, len(lost)) self.assertTrue(len(beats) > 2, len(beats)) def test_lost(self): beats = [] lost = [] loop = ioloop.IOLoop() def _hbreg(): beats.append('o') def _onregister(): beats.append('+') def _onbeat(): beats.append('.') def _onbeatlost(): lost.append('.') hb = Heartbeat('ipc:///tmp/stetho.ipc', interval=0.1, io_loop=loop, onregister=_hbreg) stetho = Stethoscope('ipc:///tmp/stetho.ipc', onbeat=_onbeat, onbeatlost=_onbeatlost, delay=0.1, io_loop=loop, onregister=_onregister, warmup_delay=0) # scenario def start(): hb.start() stetho.start() def stop_hb(): hb.stop() def stop_st(): stetho.stop() loop.stop() # that starts the heartbeat and the client loop.add_callback(start) # the hb stops after 500ms loop.add_timeout(time.time() + .5, stop_hb) # the st stops after 1 second, then the loop loop.add_timeout(time.time() + 1., stop_st) loop.start() self.assertTrue(len(beats) > 0) self.assertEqual(beats[:2], ['o', '+']) self.assertTrue(len(lost) > 0) def test_restart(self): # we want to make sure the Stethoscope can be restarted beats = [] lost = [] loop = ioloop.IOLoop() def _hbreg(): beats.append('o') def _onregister(): beats.append('+') def _onbeat(): beats.append('.') def _onbeatlost(): lost.append('.') hb = Heartbeat('ipc:///tmp/stetho.ipc', interval=0.1, io_loop=loop, onregister=_hbreg) stetho = Stethoscope('ipc:///tmp/stetho.ipc', onbeat=_onbeat, onbeatlost=_onbeatlost, delay=0.1, io_loop=loop, onregister=_onregister, warmup_delay=0) # scenario def start(): hb.start() stetho.start() def stop_st(): stetho.stop() def restart_st(): stetho.start() beats.append('RESTARTED') def stop(): stetho.stop() loop.stop() # that starts the heartbeat and the client loop.add_callback(start) # the st stops after 500ms loop.add_timeout(time.time() + .5, stop_st) # the st starts again after 500ms loop.add_timeout(time.time() + .5, restart_st) # the st stops after 1 second, then the loop loop.add_timeout(time.time() + 1., stop) loop.start() self.assertTrue(len(beats) > 0) self.assertTrue('RESTARTED' in beats) # make sure the st gets the beats after a restart rest = beats.index('RESTARTED') self.assertTrue('o+' in ''.join(beats[rest:]), beats) <file_sep>/venv/Lib/site-packages/loads/tests/test_here.py from loads.case import TestCase class TestWebSite(TestCase): def test_something(self): pass <file_sep>/venv/Lib/site-packages/loads/tests/test_results_remote.py from unittest2 import TestCase from loads.results.remote import RemoteTestResult from loads.results import remote import mock class TestRemoteTestResult(TestCase): def setUp(self): self._old_client = remote.Client remote.Client = mock.MagicMock() remote.Client.get_data = mock.MagicMock() def tearDown(self): remote.Client = self._old_client def test_getattributes(self): # RemoteTestResult has some magic attribute getters remote = RemoteTestResult() self.assertRaises(NotImplementedError, getattr, remote, 'errors') args = {'agents': [], 'broker': 'tcp://example.com:999'} remote = RemoteTestResult(args=args) self.assertEqual(list(remote.errors), []) self.assertEqual(list(remote.failures), []) <file_sep>/venv/Lib/site-packages/loads/tests/test_python_db.py import unittest2 import time import os import shutil import tempfile import json from zmq.green.eventloop import ioloop from loads.db._python import BrokerDB, read_zfile, get_dir_size _RUN_ID = '8b91dee8-0aec-4bb9-b0a0-87269a9c2874' _AGENT_ID = 1727 ONE_RUN = [ {'agent_id': _AGENT_ID, 'data_type': 'startTestRun', 'run_id': _RUN_ID}, {'agent_id': _AGENT_ID, 'data_type': 'startTest', 'run_id': _RUN_ID, 'test': 'test_es (loads.examples.test_blog.TestWebSite)', 'loads_status': [1, 1, 1, 0]}, {'status': 200, 'loads_status': [1, 1, 1, 0], 'data_type': 'add_hit', 'run_id': _RUN_ID, 'started': '2013-06-26T10:11:38.838224', 'elapsed': 0.008656, 'url': 'http://127.0.0.1:9200/', 'agent_id': _AGENT_ID, u'method': u'GET'}, {'test': 'test_es (loads.examples.test_blog.TestWebSite)', 'agent_id': _AGENT_ID, 'loads_status': [1, 1, 1, 0], 'data_type': 'addSuccess', 'run_id': _RUN_ID}, {'test': 'test_es (loads.examples.test_blog.TestWebSite)', 'agent_id': _AGENT_ID, 'loads_status': [1, 1, 1, 0], 'data_type': 'addError', 'run_id': _RUN_ID}, {'test': 'test_es (loads.examples.test_blog.TestWebSite)', 'agent_id': _AGENT_ID, 'loads_status': [1, 1, 1, 0], 'data_type': 'stopTest', 'run_id': _RUN_ID}, {'agent_id': _AGENT_ID, 'data_type': 'stopTestRun', 'run_id': _RUN_ID}] class TestBrokerDB(unittest2.TestCase): def setUp(self): self.loop = ioloop.IOLoop() self.tmp = tempfile.mkdtemp() dboptions = {'directory': self.tmp} self.db = BrokerDB(self.loop, db='python', **dboptions) def tearDown(self): shutil.rmtree(self.db.directory) self.db.close() self.loop.close() def test_brokerdb(self): self.assertEqual(list(self.db.get_data('swwqqsw')), []) def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + 2.1, self.loop.stop) self.loop.start() # let's check if we got the data in the file db = os.path.join(self.db.directory, '1-db.json') data = [record for record, line in read_zfile(db)] data.sort() db = os.path.join(self.db.directory, '2-db.json') data2 = [record for record, line in read_zfile(db)] data2.sort() self.assertEqual(len(data), 14) self.assertEqual(len(data2), 14) counts = self.db.get_counts('1') for type_ in ('addSuccess', 'stopTestRun', 'stopTest', 'startTest', 'startTestRun', 'add_hit'): self.assertEqual(counts[type_], 2) # we got 12 lines, let's try batching batch = list(self.db.get_data('1', size=2)) self.assertEqual(len(batch), 2) batch = list(self.db.get_data('1', start=2)) self.assertEqual(len(batch), 12) batch = list(self.db.get_data('1', start=2, size=5)) self.assertEqual(len(batch), 5) data = [self.db._uncompress_headers('1', line) for line in data] data.sort() data3 = list(self.db.get_data('1')) data3.sort() self.assertEqual(data3, data) # filtered data3 = list(self.db.get_data('1', data_type='add_hit')) self.assertEqual(len(data3), 2) # group by res = list(self.db.get_data('1', groupby=True)) self.assertEqual(len(res), 7) self.assertEqual(res[0]['count'], 2) res = list(self.db.get_data('1', data_type='add_hit', groupby=True)) self.assertEqual(res[0]['count'], 2) self.assertTrue('1' in self.db.get_runs()) self.assertTrue('2' in self.db.get_runs()) # len(data) < asked ize batch = list(self.db.get_data('1', start=2, size=5000)) self.assertEqual(len(batch), 12) def test_metadata(self): self.assertEqual(self.db.get_metadata('1'), {}) self.db.save_metadata('1', {'hey': 'ho'}) self.assertEqual(self.db.get_metadata('1'), {'hey': 'ho'}) self.db.update_metadata('1', one=2) meta = self.db.get_metadata('1').items() meta.sort() self.assertEqual(meta, [('hey', 'ho'), ('one', 2)]) def test_get_urls(self): def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + .5, self.loop.stop) self.loop.start() self.assertTrue(self.db.ping()) urls = self.db.get_urls('1') self.assertEqual(urls, {'http://1172.16.31.10:9200/': 2}) def test_get_errors(self): def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + .5, self.loop.stop) self.loop.start() self.assertTrue(self.db.ping()) errors = list(self.db.get_errors('2')) self.assertEqual(len(errors), 2, errors) errors = list(self.db.get_errors('1')) self.assertEqual(len(errors), 2, errors) def test_compression(self): headers_f = os.path.join(self.db.directory, 'run-id-headers.json') headers = {"1": 'one', "2": 'two'} with open(headers_f, 'w') as f: f.write(json.dumps(headers)) data = {'one': 'ok', 'two': 3, 'three': 'blah'} self.db._update_headers('run-id') self.db.add({'run_id': 'run-id', 'one': 'ok', 'two': 3, 'three': 'blah'}) result = self.db._compress_headers('run-id', data) result = result.items() result.sort() self.assertEqual(result, [(1, 'ok'), (2, 3), (3, 'blah')]) self.db.flush() with open(headers_f) as f: new_headers = json.loads(f.read()) wanted = [(1, u'one'), (2, u'two'), (3, u'three'), (4, u'run_id')] new_headers = [(int(key), value) for key, value in new_headers.items()] new_headers.sort() self.assertEquals(new_headers, wanted) @unittest2.skipIf('TRAVIS' in os.environ, '') def test_max_size(self): # adding some data for run_1 and run_2 self.db.prepare_run() for run in ('run_1', 'run_2', 'run_3'): for i in range(1000): self.db.add({'run_id': run, 'one': 'ok', 'two': 3, 'three': 'blah'}) # flushing self.db.flush() time.sleep(.1) self.assertEqual(self.db.get_runs(), ['run_1', 'run_2', 'run_3']) # setting the max size to current size self.db.max_size = get_dir_size(self.tmp) self.db.prepare_run() # adding data for run_4 for i in range(1000): self.db.add({'run_id': 'run_4', 'one': 'ok', 'two': 3, 'three': 'blah'}) # run-1 should have been wiped... self.db.flush() self.assertEqual(self.db.get_runs(), ['run_2', 'run_3', 'run_4']) def test_reload(self): self.assertEqual(self.db.get_metadata('1'), {}) self.db.save_metadata('1', {'hey': 'ho'}) self.assertEqual(self.db.get_metadata('1'), {'hey': 'ho'}) self.db.update_metadata('1', one=2) meta = self.db.get_metadata('1').items() meta.sort() self.assertEqual(meta, [('hey', 'ho'), ('one', 2)]) self.db.flush() # make sure we don't lose existing data when # the db client is started and writes before reads dboptions = {'directory': self.tmp} db2 = BrokerDB(self.loop, db='python', **dboptions) # this used to overwrite any existing data db2.update_metadata('1', has_data=1) meta = db2.get_metadata('1').items() meta.sort() wanted = [(u'has_data', 1), (u'hey', u'ho'), (u'one', 2)] self.assertEqual(meta, wanted) <file_sep>/venv/Lib/site-packages/loads/tests/test_functional.py # Contains functional tests for loads. # It runs the tests located in the example directory. # # Try to run loads for all the combinaison possible: # - normal local run # - normal distributed run # - run via nosetest # - run with hits / users import os import errno import time import requests import tempfile import shutil import sys import zipfile from cStringIO import StringIO from unittest2 import TestCase, skipIf from zmq.green.eventloop import ioloop from loads.db._python import BrokerDB from loads.main import run as start_runner from loads.runners import LocalRunner, DistributedRunner from loads.tests.support import (get_runner_args, start_process, stop_process, hush) from loads.transport.client import Pool, Client from loads.transport.util import DEFAULT_FRONTEND, verify_broker _EXAMPLES_DIR = os.path.join(os.path.dirname(__file__), os.pardir, 'examples') _RESULTS = os.path.join(os.path.dirname(__file__), 'observers') class Observer(object): name = 'dummy' options = {} def __init__(self, *args, **kw): pass def __call__(self, results): with open(_RESULTS, 'a+') as f: f.write(str(results) + '\n') class ObserverFail(Observer): def __call__(self, results): raise ValueError("Boom") _PROCS = [] def start_servers(): if len(_PROCS) != 0: return _PROCS _PROCS.append(start_process('loads.transport.broker', '--db', 'python', '--heartbeat', 'tcp://0.0.0.0:9876')) for x in range(10): _PROCS.append(start_process('loads.transport.agent')) _PROCS.append(start_process('loads.examples.echo_server')) # wait for the echo server to be started tries = 0 while True: try: requests.get('http://0.0.0.0:9000') break except requests.ConnectionError: time.sleep(.2) tries += 1 if tries > 20: raise # wait for the broker to be up with 3 slaves. client = Pool() while len(client.list()) != 10: time.sleep(.1) # control that the broker is responsive client.ping() for wid in client.list(): status = client.status(wid)['status'] assert status == {}, status client.close() if verify_broker() is None: raise ValueError('Broker seem down') return _PROCS def stop_servers(): for proc in _PROCS: stop_process(proc) _PROCS[:] = [] @skipIf('TRAVIS' in os.environ, 'deactivated in Travis') class FunctionalTest(TestCase): @classmethod def setUpClass(cls): if 'TRAVIS' in os.environ: return start_servers() cls.client = Client() cls.location = os.getcwd() cls.loop = ioloop.IOLoop() cls.dirs = [] cls.db = BrokerDB(cls.loop, db='python') @classmethod def tearDownClass(cls): if 'TRAVIS' in os.environ: return stop_servers() os.chdir(cls.location) for dir in cls.dirs: if os.path.exists(dir): shutil.rmtree(dir) if os.path.exists(_RESULTS): os.remove(_RESULTS) def tearDown(self): runs = self.client.list_runs() for run_id in runs: if not isinstance(run_id, basestring): continue self.client.stop_run(run_id) def _get_client(self): client = Pool() while len(client.list_runs()) > 0: time.sleep(.2) return client def _wait_run_started(self, client=None): if client is None: client = self.client while len(client.list_runs()) == 0: time.sleep(.2) return client.list_runs().items()[0] def _wait_run_over(self, client=None): if client is None: client = self.client while len(client.list_runs()) > 0: time.sleep(.2) def test_normal_run(self): start_runner(get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_something', output=['null'])) def test_file_output(self): fqn = 'loads.examples.test_blog.TestWebSite.test_something' args = get_runner_args(fqn=fqn, output=['file']) fd, args['output_file_filename'] = tempfile.mkstemp() os.close(fd) try: start_runner(args) finally: os.remove(args['output_file_filename']) def test_normal_run_with_users_and_hits(self): start_runner(get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_something', output=['null'], users=2, hits=2)) def test_concurent_session_access(self): runner = LocalRunner(get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_concurrency', output=['null'], users=2)) runner.execute() nb_success = runner.test_result.nb_success assert nb_success == 2, nb_success assert runner.test_result.nb_errors == 0 assert runner.test_result.nb_failures == 0 assert runner.test_result.get_counter('lavabo') == 2 assert runner.test_result.get_counter('beau') == 2 def test_duration_updates_counters(self): runner = LocalRunner(get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_concurrency', output=['null'], duration=2.)) runner.execute() nb_success = runner.test_result.nb_success assert nb_success > 2, nb_success def test_distributed_run(self): client = self._get_client() start_runner(get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_something', agents=2, project_name='test_distributed_run', output=['null'], observer=['loads.tests.test_functional.Observer', 'loads.tests.test_functional.ObserverFail'], users=1, hits=5)) runs = client.list_runs() run_id = runs.keys()[0] client.stop_run(run_id) # checking the metadata metadata = client.get_metadata(run_id) self.assertEqual(metadata['project_name'], 'test_distributed_run') # wait for the run to end self._wait_run_over(client) # checking the data # the run is over so the detailed lines where pruned self.assertRaises(ValueError, client.get_data, run_id) self.assertEqual(client.get_urls(run_id), {u'http://1192.168.127.12:9000/': 10}) counts = dict(client.get_counts(run_id)) self.assertEquals(counts['socket_open'], 10) self.assertEquals(counts['socket_close'], 10) # making sure the observer was called with open(_RESULTS) as f: data = f.readlines() assert len(data) > 0, data def test_distributed_run_duration(self): client = self._get_client() args = get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_something', agents=1, output=['null'], users=1, duration=2) start_runner(args) for i in range(10): runs = client.list_runs() time.sleep(.1) data = client.get_metadata(runs.keys()[0]) if len(data) > 0: return raise AssertionError('No data back') def test_distributed_run_external_runner(self): client = self._get_client() args = get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_something', agents=1, users=1, test_runner='%s -m loads.tests.runner {test}' % sys.executable) start_runner(args) # getting the run_id runs = self.client.list_runs() while runs == []: runs = self.client.list_runs() run_id = runs.keys()[0] data = client.get_metadata(run_id) self.assertTrue(len(data) > 5, len(data)) def test_distributed_detach(self): client = self._get_client() args = get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_something', agents=1, users=1, output=['null'], duration=2) # simulate a ctrl+c def _recv(self, msg): raise KeyboardInterrupt old = DistributedRunner._recv_result DistributedRunner._recv_result = _recv # simulate a 'detach' answer def _raw_input(msg): return 'detach' from loads import main main.raw_input = _raw_input # start the runner start_runner(args) # getting the run_id run_id, _ = self._wait_run_started() # we detached. time.sleep(.2) # now reattach the console DistributedRunner._recv_result = old start_runner({'attach': True, 'broker': DEFAULT_FRONTEND, 'output': ['null']}) # now waiting for the test to be over self._wait_run_over(client) # now let's see the metadata data = client.get_metadata(run_id) self.assertTrue(len(data) > 5, len(data)) @classmethod def _get_dir(self): dir = tempfile.mkdtemp() self.dirs.append(dir) return dir @hush def test_file_copy_test_file(self): client = self._get_client() test_dir = self._get_dir() os.chdir(os.path.dirname(__file__)) args = get_runner_args( fqn='test_here.TestWebSite.test_something', agents=1, users=1, hits=1, test_dir=test_dir, include_file=['test_here.py']) start_runner(args) data = [] run_id, agents = self._wait_run_started(client) self._wait_run_over(client) data = self.client.get_metadata(run_id) # check that we got in the dir real_test_dir = test_dir + agents[0][0] self.dirs.append(real_test_dir) content = os.listdir(real_test_dir) self.assertTrue('test_here.py' in content, content) if data == []: raise AssertionError('No data back') def test_sending_crap_ujson(self): test_dir = self._get_dir() os.chdir(os.path.dirname(__file__)) data = StringIO() filepath = 'test_here.py' zf = zipfile.ZipFile(data, "w", compression=zipfile.ZIP_DEFLATED) info = zipfile.ZipInfo('test_here.py') info.external_attr = os.stat(filepath).st_mode << 16L with open(filepath) as f: zf.writestr(info, f.read()) zf.close() data = data.getvalue() args = get_runner_args( fqn='test_here.TestWebSite.test_something', agents=1, users=1, hits=1, test_dir=test_dir, include_file=['test_here.py']) args['crap'] = data self.assertRaises(ValueError, start_runner, args) def test_errors(self): client = self._get_client() start_runner(get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_will_error', agents=1, project_name='test_distributed_run', output=['null'], observer=['loads.tests.test_functional.Observer', 'loads.tests.test_functional.ObserverFail'], users=1, hits=5)) run_id, _ = self._wait_run_started(client) client.stop_run(run_id) # checking the metadata metadata = client.get_metadata(run_id) self.assertEqual(metadata['project_name'], 'test_distributed_run') # checking the data # the run is over so the detailed lines where pruned # but we have all errors errors = list(self.db.get_errors(run_id)) self.assertTrue(len(errors) > 0) def is_running(self, pid): try: os.kill(pid, 0) except OSError as err: if err.errno == errno.ESRCH: return False return True @hush def test_die(self): # make sure a recalcitrant process gets eventually killed client = self._get_client() start_runner(get_runner_args( fqn='loads.examples.test_blog.TestWebSite.test_wontdie', agents=1, project_name='test_distributed_run', output=['null'], users=1, duration=200, detach=True)) run_id, agents = self._wait_run_started(client) agent_id = agents[0][0] # get the pid of the worker status = client.status(agent_id) while status['status'] == {}: status = client.status(agent_id) print status worker_pid = int(status['status'].keys()[0]) # force a stop client.stop_run(run_id) # we want to make sure the pid is gone in a 6s frame start = time.time() dead = not self.is_running(worker_pid) while not dead and time.time() - start < 6: dead = not self.is_running(worker_pid) self.assertTrue(dead) self._wait_run_over(client) <file_sep>/venv/Lib/site-packages/loads/transport/broker.py """ Jobs runner. """ import errno import sys import traceback import argparse import os import zmq.green as zmq from zmq.green.eventloop import ioloop, zmqstream from loads.util import set_logger, logger, json from loads.transport.util import (register_ipc_file, DEFAULT_FRONTEND, DEFAULT_BACKEND, DEFAULT_REG, verify_broker, DEFAULT_BROKER_RECEIVER, DEFAULT_PUBLISHER, DEFAULT_AGENT_TIMEOUT) from loads.transport.heartbeat import Heartbeat from loads.transport.exc import DuplicateBrokerError from loads.db import get_backends from loads.transport.brokerctrl import BrokerController DEFAULT_IOTHREADS = 1 class Broker(object): """Class that route jobs to agents. Options: - **frontend**: the ZMQ socket to receive jobs. - **backend**: the ZMQ socket to communicate with agents. - **heartbeat**: the ZMQ socket to receive heartbeat requests. - **register** : the ZMQ socket to register agents. - **receiver**: the ZMQ socket that receives data from agents. - **publisher**: the ZMQ socket to publish agents data """ def __init__(self, frontend=DEFAULT_FRONTEND, backend=DEFAULT_BACKEND, heartbeat=None, register=DEFAULT_REG, io_threads=DEFAULT_IOTHREADS, agent_timeout=DEFAULT_AGENT_TIMEOUT, receiver=DEFAULT_BROKER_RECEIVER, publisher=DEFAULT_PUBLISHER, db='python', dboptions=None, web_root=None): # before doing anything, we verify if a broker is already up and # running logger.debug('Verifying if there is a running broker') pid = verify_broker(frontend) if pid is not None: # oops. can't do this ! logger.debug('Ooops, we have a running broker on that socket') raise DuplicateBrokerError(pid) self.endpoints = {'frontend': frontend, 'backend': backend, 'register': register, 'receiver': receiver, 'publisher': publisher} if heartbeat is not None: self.endpoints['heartbeat'] = heartbeat logger.debug('Initializing the broker.') for endpoint in self.endpoints.values(): if endpoint.startswith('ipc'): register_ipc_file(endpoint) self.context = zmq.Context(io_threads=io_threads) # setting up the sockets self._frontend = self.context.socket(zmq.ROUTER) self._frontend.identity = 'broker-' + frontend self._frontend.bind(frontend) self._backend = self.context.socket(zmq.ROUTER) self.pid = str(os.getpid()) self._backend.identity = self.pid self._backend.bind(backend) self._registration = self.context.socket(zmq.PULL) self._registration.bind(register) self._receiver = self.context.socket(zmq.PULL) self._receiver.bind(receiver) self._publisher = self.context.socket(zmq.PUB) self._publisher.bind(publisher) # setting up the streams self.loop = ioloop.IOLoop() self._frontstream = zmqstream.ZMQStream(self._frontend, self.loop) self._frontstream.on_recv(self._handle_recv_front) self._backstream = zmqstream.ZMQStream(self._backend, self.loop) self._backstream.on_recv(self._handle_recv_back) self._regstream = zmqstream.ZMQStream(self._registration, self.loop) self._regstream.on_recv(self._handle_reg) self._rcvstream = zmqstream.ZMQStream(self._receiver, self.loop) self._rcvstream.on_recv(self._handle_recv) # heartbeat if heartbeat is not None: self.pong = Heartbeat(heartbeat, io_loop=self.loop, ctx=self.context, onregister=self._deregister) else: self.pong = None # status self.started = False self.poll_timeout = None # controller self.ctrl = BrokerController(self, self.loop, db=db, dboptions=dboptions, agent_timeout=agent_timeout) self.web_root = web_root def _handle_recv(self, msg): # publishing all the data received from agents self._publisher.send(msg[0]) data = json.loads(msg[0]) agent_id = str(data.get('agent_id')) # saving the data locally self.ctrl.save_data(agent_id, data) def _deregister(self): self.ctrl.unregister_agents('asked by the heartbeat.') def _handle_reg(self, msg): if msg[0] == 'REGISTER': self.ctrl.register_agent(json.loads(msg[1])) elif msg[0] == 'UNREGISTER': self.ctrl.unregister_agent(msg[1], 'asked via UNREGISTER') def send_json(self, target, data): assert isinstance(target, basestring), target msg = [target, '', json.dumps(data)] try: self._frontstream.send_multipart(msg) except ValueError: logger.error('Could not dump %s' % str(data)) raise def _handle_recv_front(self, msg, tentative=0): """front => back All commands starting with CTRL_ are sent to the controller. """ target = msg[0] try: data = json.loads(msg[-1]) except ValueError: exc = 'Invalid JSON received.' logger.exception(exc) self.send_json(target, {'error': exc}) return cmd = data['command'] # a command handled by the controller if cmd.startswith('CTRL_'): cmd = cmd[len('CTRL_'):] logger.debug('calling %s' % cmd) try: res = self.ctrl.run_command(cmd, msg, data) except Exception, e: logger.debug('Failed') exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) self.send_json(target, {'error': exc}) else: # sending back a synchronous result if needed. if res is not None: logger.debug('sync success %s' % str(res)) self.send_json(target, res) else: logger.debug('async success') # misc commands elif cmd == 'PING': res = {'result': {'pid': os.getpid(), 'endpoints': self.endpoints, 'agents': self.ctrl.agents}} self.send_json(target, res) elif cmd == 'LIST': # we return a list of agent ids and their status self.send_json(target, {'result': self.ctrl.agents}) return else: self.send_json(target, {'error': 'unknown command %s' % cmd}) def _handle_recv_back(self, msg): # let's remove the agent id and track the time it took agent_id = msg[0] if len(msg) == 7: client_id = msg[4] else: client_id = None # grabbing the data to update the agents statuses if needed try: data = json.loads(msg[-1]) except ValueError: logger.error("Could not load the received message") logger.error(str(msg)) return if 'error' in data: result = data['error'] else: result = data['result'] command = result.get('command') # results from commands sent by the broker if command in ('_STATUS', 'STOP', 'QUIT'): run_id = self.ctrl.update_status(agent_id, result) if run_id is not None: # if the tests are finished, publish this on the pubsub. self._publisher.send(json.dumps({'data_type': 'run-finished', 'run_id': run_id})) return # other things are pass-through (asked by a client) if client_id is None: return try: self._frontstream.send_multipart([client_id, '', msg[-1]]) except Exception, e: logger.error('Could not send to front') logger.error(msg) # we don't want to die on error. we just log it exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) logger.error('\n'.join(exc)) def start(self): """Starts the broker. """ logger.debug('Starting the loop') if self.started: return # running the heartbeat if self.pong is not None: self.pong.start() # running the cleaner self.cleaner = ioloop.PeriodicCallback(self.ctrl.clean, 2500, self.loop) self.cleaner.start() self.started = True while self.started: try: self.loop.start() except zmq.ZMQError as e: logger.debug(str(e)) if e.errno == errno.EINTR: continue elif e.errno == zmq.ETERM: break else: logger.debug("got an unexpected error %s (%s)", str(e), e.errno) raise else: break def stop(self): """Stops the broker. """ if not self.started: return try: self._backstream.flush() except IOError: pass if self.pong is not None: logger.debug('Stopping the heartbeat') self.pong.stop() logger.debug('Stopping the cleaner') self.cleaner.stop() logger.debug('Stopping the loop') self.loop.stop() self.started = False self.context.destroy(0) def main(args=sys.argv): parser = argparse.ArgumentParser(description='Loads broker.') parser.add_argument('--frontend', dest='frontend', default=DEFAULT_FRONTEND, help="ZMQ socket to receive jobs.") parser.add_argument('--backend', dest='backend', default=DEFAULT_BACKEND, help="ZMQ socket for agents.") parser.add_argument('--heartbeat', dest='heartbeat', default=None, help="ZMQ socket for the heartbeat.") parser.add_argument('--register', dest='register', default=DEFAULT_REG, help="ZMQ socket for the registration.") parser.add_argument('--receiver', dest='receiver', default=DEFAULT_BROKER_RECEIVER, help="ZMQ socket to receive events from the runners") parser.add_argument('--publisher', dest='publisher', default=DEFAULT_PUBLISHER, help="ZMQ socket for the registration.") parser.add_argument('--io-threads', type=int, default=DEFAULT_IOTHREADS, help="Number of I/O threads") parser.add_argument('--debug', action='store_true', default=False, help="Debug mode") parser.add_argument('--check', action='store_true', default=False, help=("Use this option to check if there's a running " " broker. Returns the PID if a broker is up.")) parser.add_argument('--logfile', dest='logfile', default='stdout', help="File to log in to.") parser.add_argument('--db', dest='db', default='python', help="Database backend.") parser.add_argument('--web-root', help='Root url of the web dashboard.', type=str, default=None) # add db args for backend, options in get_backends(): for option, default, help, type_ in options: option = 'db_%s_%s' % (backend, option) kargs = {'dest': option, 'default': default} if type_ is bool: kargs['action'] = 'store_true' else: kargs['type'] = type_ option = option.replace('_', '-') parser.add_argument('--%s' % option, **kargs) args = parser.parse_args() set_logger(args.debug, logfile=args.logfile) if args.check: pid = verify_broker(args.frontend) if pid is None: logger.info('There seem to be no broker on this endpoint') else: logger.info('A broker is running. PID: %s' % pid) return 0 # grabbing the db options dboptions = {} prefix = 'db_%s_' % args.db for key, value in args._get_kwargs(): if not key.startswith(prefix): continue dboptions[key[len(prefix):]] = value logger.info('Starting the broker') try: broker = Broker(frontend=args.frontend, backend=args.backend, heartbeat=args.heartbeat, register=args.register, receiver=args.receiver, publisher=args.publisher, io_threads=args.io_threads, db=args.db, dboptions=dboptions, web_root=args.web_root) except DuplicateBrokerError, e: logger.info('There is already a broker running on PID %s' % e) logger.info('Exiting') return 1 logger.info('Listening to incoming jobs at %r' % args.frontend) logger.info('Workers may register at %r' % args.backend) if args.heartbeat is not None: logger.info('The heartbeat socket is at %r' % args.heartbeat) try: broker.start() except KeyboardInterrupt: pass finally: broker.stop() return 0 if __name__ == '__main__': sys.exit(main()) <file_sep>/venv/Lib/site-packages/loads/transport/heartbeat.py import time try: import zmq.green as zmq from zmq.green.eventloop import ioloop, zmqstream except ImportError: import zmq from zmq.eventloop import ioloop, zmqstream from loads.util import logger from loads.transport.util import DEFAULT_HEARTBEAT class Stethoscope(object): """Implements a ZMQ heartbeat client. Listens to a given ZMQ endpoint and expect to find there a beat. If no beat is found, it calls the :param onbeatlost: callable. When a beat is found, calls the :param onbeat: callable. Options: - **endpoint** : The ZMQ socket to call. - **warmup_delay** : The delay before starting to Ping. Defaults to 5s. - **delay**: The delay between two pings. Defaults to 3s. - **retries**: The number of attempts to ping. Defaults to 3. - **onbeatlost**: a callable that will be called when a ping failed. If the callable returns **True**, the ping quits. Defaults to None. - **onbeat**: a callable that will be called when a ping succeeds. Defaults to None. - **onregister**: a callable that will be called on a register ping. """ def __init__(self, endpoint=DEFAULT_HEARTBEAT, warmup_delay=.5, delay=30., retries=3, onbeatlost=None, onbeat=None, io_loop=None, ctx=None, onregister=None): self.loop = io_loop or ioloop.IOLoop.instance() self._stop_loop = io_loop is None self.daemon = True self.context = ctx or zmq.Context() self.endpoint = endpoint self.running = False self.delay = delay self.retries = retries self.onbeatlost = onbeatlost self.onbeat = onbeat self.warmup_delay = warmup_delay self._endpoint = None self._stream = None self._timer = None self.tries = 0 self.onregister = onregister self._endpoint = None def _initialize(self): logger.debug('Subscribing to ' + self.endpoint) if self._endpoint is None: self._endpoint = self.context.socket(zmq.SUB) self._endpoint.setsockopt(zmq.SUBSCRIBE, '') self._endpoint.linger = 0 self._stream = zmqstream.ZMQStream(self._endpoint, self.loop) self._endpoint.connect(self.endpoint) self._stream.on_recv(self._handle_recv) self._timer = ioloop.PeriodicCallback(self._delayed, self.delay * 1000, io_loop=self.loop) def _delayed(self): self.tries += 1 if self.tries >= self.retries: logger.debug('Nothing came back') if self.onbeatlost is None or self.onbeatlost(): self.stop() # bye ! def _handle_recv(self, msg): self.tries = 0 msg = msg[0] if msg == 'BEAT' and self.onbeat is not None: self.onbeat() elif self.onregister is not None: self.onregister() def start(self): """Starts the loop""" logger.debug('Starting the loop') if self.running: return self.running = True self._initialize() time.sleep(self.warmup_delay) self._timer.start() def stop(self): """Stops the Pinger""" logger.debug('Stopping the Pinger') self.running = False try: self._stream.flush() except zmq.ZMQError: pass self.tries = 0 self._stream.stop_on_recv() self._timer.stop() self._endpoint.disconnect(self.endpoint) class Heartbeat(object): """Class that implements a ZMQ heartbeat server. This class sends in a ZMQ socket regular beats. Options: - **endpoint** : The ZMQ socket to call. - **interval** : Interval between two beat. - **register** : Number of beats between two register beats - **onregister**: if provided, a callable that will be called prior to the REGISTER call """ def __init__(self, endpoint=DEFAULT_HEARTBEAT, interval=10., io_loop=None, ctx=None, register=5, onregister=None): self.loop = io_loop or ioloop.IOLoop.instance() self.daemon = True self.kill_context = ctx is None self.context = ctx or zmq.Context() self.endpoint = endpoint self.running = False self.interval = interval logger.debug('Publishing to ' + self.endpoint) self._endpoint = self.context.socket(zmq.PUB) self._endpoint.linger = 0 self._endpoint.hwm = 0 self._endpoint.bind(self.endpoint) self._cb = ioloop.PeriodicCallback(self._ping, interval * 1000, io_loop=self.loop) self.register = register self.current_register = 0 self.onregister = onregister def start(self): """Starts the Pong service""" self.running = True self._cb.start() def _ping(self): if self.current_register == 0: if self.onregister is not None: self.onregister() self._endpoint.send('REGISTER') else: self._endpoint.send('BEAT') self.current_register += 1 if self.current_register == self.register: self.current_register = 0 def stop(self): """Stops the Pong service""" self.running = False self._cb.stop() if self.kill_context: self.context.destroy(0) <file_sep>/venv/Lib/site-packages/loads/results/base.py import itertools from collections import defaultdict from datetime import datetime, timedelta from loads.util import get_quantiles, total_seconds, seconds_to_time, unbatch class TestResult(object): """Data TestResult. This is the class receiving all the information about the tests and the requests. Consumes the data passed to it and provide convenient APIs to read this data back. This can be useful if you want to transform this data to create reports, but it doesn't assume any representation for the output. """ __test__ = False # This is not something to run as a test. def __init__(self, config=None, args=None): self.config = config self.hits = [] self.tests = {} self.opened_sockets = self.closed_sockets = 0 self.socket_data_received = 0 self.start_time = None self.stop_time = None self.observers = [] self.args = args def __str__(self): duration = seconds_to_time(self.duration) msg = 'Ran %d tests in %s. %d hits, %.2f RPS.' % ( self.nb_finished_tests, duration, self.nb_hits, self.requests_per_second()) if self.socket > 0: msg += ' %s sockets opened, Bytes thru sockets %d' % ( self.socket, self.socket_data_received) return msg def close(self): pass @property def project_name(self): return self.args.get('project_name', 'N/A') @property def nb_finished_tests(self): return len(self._get_tests(finished=True)) @property def nb_hits(self): return len(self.hits) @property def duration(self): end = self.stop_time or datetime.utcnow() if self.start_time is None: return 0 return total_seconds(end - self.start_time) @property def nb_failures(self): return sum([len(t.failures) for t in self._get_tests()]) @property def nb_errors(self): return sum([len(t.errors) for t in self._get_tests()]) @property def nb_success(self): return sum([t.success for t in self._get_tests()]) @property def errors(self): return itertools.chain((t.errors for t in (self._get_tests()))) @property def failures(self): return itertools.chain((t.failures for t in (self._get_tests()))) @property def urls(self): """Returns the URLs that had been called.""" return set([h.url for h in self.hits]) @property def nb_tests(self): return len(self.tests) @property def sockets(self): return self.opened_sockets - self.closed_sockets def _get_hits(self, url=None, series=None): """Filters the hits with the given parameters. :param url: The url you want to filter with. Only the hits targetting this URL will be returned. :param series: Only the hits done during this series will be returned. """ def _filter(_hit): if series is not None and _hit.series != series: return False if url is not None and _hit.url != url: return False return True return filter(_filter, self.hits) def _get_tests(self, name=None, series=None, finished=None, user=None): """Filters the tests with the given parameters. :param name: The name of the test you want to filter on. :param series: The series key you want to filter on. :param finished: Return only the finished or unfinished tests :param user: The user key to filter on. """ def _filter(test): if name is not None and test.name != name: return False if series is not None and test.series != series: return False if finished is not None and test.finished != finished: return False return True return filter(_filter, self.tests.values()) def average_request_time(self, url=None, series=None): """Computes the average time a request takes (in seconds) :param url: The url we want to know the average request time. Could be `None` if you want to get the overall average time of a request. :param series: You can filter by the series, to only know the average request time during a particular series. """ elapsed = [total_seconds(h.elapsed) for h in self._get_hits(url, series)] if elapsed: return float(sum(elapsed)) / len(elapsed) else: return 0 def get_request_time_quantiles(self, url=None, series=None): elapsed = [total_seconds(h.elapsed) for h in self._get_hits(url=url, series=series)] # XXX Cache these results, they might be long to compute. return get_quantiles(elapsed, (0, 0.1, 0.5, 0.9, 1)) def hits_success_rate(self, url=None, series=None): """Returns the success rate for the filtered hits. (A success is a hit with a status code of 2XX or 3XX). :param url: the url to filter on. :param hit: the hit to filter on. """ hits = list(self._get_hits(url, series)) success = [h for h in hits if 200 <= h.status < 400] if hits: return float(len(success)) / len(hits) else: return 0 def get_url_metrics(self): urls = defaultdict(dict) for url in self.urls: for metric in ('average_request_time', 'hits_success_rate'): urls[url][metric] = getattr(self, metric)(url) return urls def tests_per_second(self): delta = self.stop_time - self.start_time return self.nb_tests / total_seconds(delta) def average_test_duration(self, test=None, series=None): durations = [t.duration for t in self._get_tests(test, series) if t is not None] if durations: return float(sum(durations)) / len(durations) def test_success_rate(self, test=None, series=None): rates = [t.success_rate for t in self._get_tests(test, series)] if rates: return sum(rates) / len(rates) return 1 def requests_per_second(self, url=None, hit=None): if self.duration == 0: return 0 return float(len(self.hits)) / self.duration # batched results def batch(self, **args): for field, message in unbatch(args): if hasattr(self, field): getattr(self, field)(**message) # These are to comply with the APIs of unittest. def startTestRun(self, agent_id=None, when=None): if when is None: when = datetime.utcnow() if agent_id is None: self.start_time = when def stopTestRun(self, agent_id=None): # we don't want to start multiple time the test run if agent_id is None: self.stop_time = datetime.utcnow() def startTest(self, test, loads_status, agent_id=None): hit, user, current_hit, current_user = loads_status key = self._get_key(test, loads_status, agent_id) if key not in self.tests: self.tests[key] = Test(name=test, hit=hit, user=user) def stopTest(self, test, loads_status, agent_id=None): hit, user, current_hit, current_user = loads_status t = self._get_test(test, loads_status, agent_id) t.end = datetime.utcnow() def addError(self, test, exc_info, loads_status, agent_id=None): test = self._get_test(test, loads_status, agent_id) test.errors.append(exc_info) def addFailure(self, test, exc_info, loads_status, agent_id=None): test = self._get_test(test, loads_status, agent_id) test.failures.append(exc_info) def addSuccess(self, test, loads_status, agent_id=None): test = self._get_test(test, loads_status, agent_id) test.success += 1 def incr_counter(self, test, loads_status, name, agent_id=None): test = self._get_test(test, loads_status, agent_id) test.incr_counter(name) def get_counter(self, name, test=None): return sum([t.get_counter(name) for t in self._get_tests(name=test)]) def get_counters(self, test=None): counters = defaultdict(int) for test in self._get_tests(name=test): for name, value in test.get_counters().items(): counters[name] += value return counters def add_hit(self, **data): self.hits.append(Hit(**data)) def socket_open(self, agent_id=None): self.opened_sockets += 1 def socket_close(self, agent_id=None): self.closed_sockets += 1 def socket_message(self, size, agent_id=None): self.socket_data_received += size def __getattribute__(self, name): # call the observer's "push" method after calling the method of the # test_result itself. attr = object.__getattribute__(self, name) if name in ('startTestRun', 'stopTestRun', 'startTest', 'stopTest', 'addError', 'addFailure', 'addSuccess', 'add_hit', 'socket_open', 'socket_message', 'incr_counter'): def wrapper(*args, **kwargs): ret = attr(*args, **kwargs) for obs in self.observers: obs.push(name, *args, **kwargs) return ret return wrapper return attr def add_observer(self, observer): self.observers.append(observer) def _get_key(self, test, loads_status, agent_id): return tuple((str(test),) + tuple(loads_status) + (agent_id,)) def _get_test(self, test, loads_status, agent_id): key = self._get_key(test, loads_status, agent_id) if key not in self.tests: self.startTest(test, loads_status, agent_id) return self.tests[key] def sync(self, run_id): pass class Hit(object): """Represent a hit. Used for later computation. """ def __init__(self, url, method, status, started, elapsed, loads_status, agent_id=None): self.url = url self.method = method self.status = status self.started = started if not isinstance(elapsed, timedelta): elapsed = timedelta(seconds=elapsed) self.elapsed = elapsed loads_status = loads_status or (None, None, None, None) (self.series, self.user, self.current_hit, self.current_user) = loads_status self.agent_id = agent_id class Test(object): """Represent a test that had been run.""" def __init__(self, start=None, **kwargs): self.start = start or datetime.utcnow() self.end = None self.name = None self.hit = None self.user = None self.failures = [] self.errors = [] self.success = 0 self._counters = defaultdict(int) for key, value in kwargs.items(): setattr(self, key, value) def incr_counter(self, name): self._counters[name] += 1 @property def finished(self): return bool(self.end) @property def duration(self): if self.end is not None: return total_seconds(self.end - self.start) else: return 0 @property def success_rate(self): total = self.success + len(self.failures) + len(self.errors) if total != 0: return float(self.success) / total return 1 # Every of the 0 runs we had was successful def __repr__(self): return ('<Test %s. errors: %s, failures: %s, success: %s>' % (self.name, len(self.errors), len(self.failures), self.success)) def get_error(self): """Returns the first encountered error""" if not self.errors: return return self.errors[0] def get_failure(self): """Returns the first encountered failure""" if not self.failures: return return self.failures[0] def get_counter(self, name): return self._counters[name] def get_counters(self): return self._counters <file_sep>/venv/Lib/site-packages/loads/tests/test_measure.py import unittest2 import functools import mock from loads.measure import Session from loads import measure from loads.tests.support import hush from requests.adapters import HTTPAdapter # XXX replace my Mock class _FakeTest(object): pass class _Headers(object): def getheaders(self, name): return {} class _Original(object): msg = _Headers() class _Response(object): _original_response = _Original() class _FakeResponse(object): history = False elapsed = 1 cookies = {} headers = {} status_code = 200 url = 'http://impossible.place' raw = _Response() class _TestResult(object): def __init__(self): self.data = [] def __getattr__(self, name): # Relay all the methods to the self.push method if they are part of the # protocol. if name in ('startTest', 'stopTest', 'addFailure', 'addError', 'addSuccess', 'add_hit'): # XXX change to camel_case return functools.partial(self.push, data_type=name) def push(self, data_type, **data): self.data.append(data) class TestMeasure(unittest2.TestCase): def setUp(self): self.old_dns = measure.dns_resolve self.old_send = HTTPAdapter.send HTTPAdapter.send = self._send measure.dns_resolve = self._dns def tearDown(self): measure.dns_resolve = self.old_dns HTTPAdapter.send = self.old_send def _send(self, *args, **kw): return _FakeResponse() def _dns(self, url): return url, url, 'meh' @hush def test_session(self): test = _FakeTest() test_result = _TestResult() session = Session(test, test_result) session.get('http://impossible.place') self.assertEqual(len(test_result.data), 1) def test_host_proxy(self): uri = 'https://super-server:443/' proxy = measure.HostProxy(uri) self.assertEquals(proxy.uri, 'https://super-server:443') env = {} self.assertEquals(proxy.extract_uri(env), 'https://super-server:443') self.assertEquals(env['HTTP_HOST'], 'super-server:443') self.assertEquals(proxy.scheme, 'https') proxy.uri = 'http://somewhere-else' self.assertEquals(proxy.extract_uri(env), 'http://somewhere-else') self.assertEquals(env['HTTP_HOST'], 'somewhere-else') self.assertEquals(proxy.scheme, 'http') def test_TestApp(self): session = mock.sentinel.session test_result = _TestResult() app = measure.TestApp('http://super-server', session, test_result) self.assertEquals(app.server_url, 'http://super-server') app.server_url = 'http://somewhere-else' self.assertEquals(app.server_url, 'http://somewhere-else') <file_sep>/venv/Lib/site-packages/loads/transport/agent.py """ The agent does several things: - maintains a connection to a master - gets load testing orders & performs them - sends back the results in real time. """ import tempfile import argparse import errno import logging import os import random import subprocess import shlex import sys import time import traceback from collections import defaultdict import functools import zmq from zmq.eventloop import ioloop, zmqstream from loads.transport import util from loads.util import logger, set_logger, json, unpack_include_files from loads.transport.util import (DEFAULT_FRONTEND, DEFAULT_TIMEOUT_MOVF, DEFAULT_MAX_AGE, DEFAULT_MAX_AGE_DELTA, get_hostname) from loads.transport.message import Message from loads.transport.util import decode_params, timed from loads.transport.heartbeat import Stethoscope from loads.transport.client import Client class ExecutionError(Exception): pass class Agent(object): """Class that links a callable to a broker. Options: - **broker**: The ZMQ socket to connect to the broker. - **ping_delay**: the delay in seconds betweem two pings. - **ping_retries**: the number of attempts to ping the broker before quitting. - **params** a dict containing the params to set for this agent. - **timeout** the maximum time allowed before the thread stacks is dump and the message result not sent back. - **max_age**: maximum age for a agent in seconds. After that delay, the agent will simply quit. When set to -1, never quits. Defaults to -1. - **max_age_delta**: maximum value in seconds added to max age. The agent will quit after *max_age + random(0, max_age_delta)* This is done to avoid having all agents quit at the same instant. Defaults to 0. The value must be an integer. """ def __init__(self, broker=DEFAULT_FRONTEND, ping_delay=10., ping_retries=3, params=None, timeout=DEFAULT_TIMEOUT_MOVF, max_age=DEFAULT_MAX_AGE, max_age_delta=DEFAULT_MAX_AGE_DELTA): logger.debug('Initializing the agent.') self.debug = logger.isEnabledFor(logging.DEBUG) self.params = params self.pid = os.getpid() self.agent_id = '%s-%s' % (get_hostname(), self.pid) self.timeout = timeout self.max_age = max_age self.max_age_delta = max_age_delta self.env = os.environ.copy() self.running = False self._workers = {} self._max_id = defaultdict(int) # Let's ask the broker its options self.broker = broker client = Client(self.broker) # this will timeout in case the broker is unreachable result = client.ping() self.endpoints = result['endpoints'] # Setup the zmq sockets self.loop = ioloop.IOLoop() self.ctx = zmq.Context() # backend socket - used to receive work from the broker self._backend = self.ctx.socket(zmq.ROUTER) self._backend.identity = self.agent_id self._backend.connect(self.endpoints['backend']) # register socket - used to register into the broker self._reg = self.ctx.socket(zmq.PUSH) self._reg.connect(self.endpoints['register']) # hearbeat socket - used to check if the broker is alive heartbeat = self.endpoints.get('heartbeat') if heartbeat is not None: logger.info("Hearbeat activated") self.ping = Stethoscope(heartbeat, onbeatlost=self.lost, delay=ping_delay, retries=ping_retries, ctx=self.ctx, io_loop=self.loop, onregister=self.register) else: self.ping = None # Setup the zmq streams. self._backstream = zmqstream.ZMQStream(self._backend, self.loop) self._backstream.on_recv(self._handle_recv_back) self._check = ioloop.PeriodicCallback(self._check_proc, ping_delay * 1000, io_loop=self.loop) def _run(self, args, run_id=None): logger.debug('Starting a run.') args['batched'] = True args['slave'] = True args['agent_id'] = self.agent_id args['zmq_receiver'] = self.endpoints['receiver'] args['run_id'] = run_id cmd = 'from loads.main import run;' cmd += 'run(%s)' % str(args) cmd = sys.executable + ' -c "%s"' % cmd cmd = shlex.split(cmd) try: proc = subprocess.Popen(cmd, cwd=args.get('test_dir')) except Exception, e: msg = 'Failed to start process ' + str(e) logger.debug(msg) raise ExecutionError(msg) self._workers[proc.pid] = proc, run_id self._sync_hb() return proc.pid def _sync_hb(self): if self.ping is None: return if len(self._workers) > 0 and self.ping.running: self.ping.stop() elif len(self._workers) == 0 and not self.ping.running: self.ping.start() def _status(self, command, data): status = {} run_id = data.get('run_id') for pid, (proc, _run_id) in self._workers.items(): if run_id is not None and run_id != _run_id: continue if proc.poll() is None: status[pid] = {'status': 'running', 'run_id': _run_id} else: status[pid] = {'status': 'terminated', 'run_id': _run_id} res = {'result': {'status': status, 'command': command}} return res def _handle_commands(self, message): # we get the messages from the broker here data = message.data command = data['command'] logger.debug('Received command %s' % command) if command == 'RUN': test_dir = data['args'].get('test_dir') if test_dir is None: test_dir = tempfile.mkdtemp() else: test_dir += self.agent_id if not os.path.exists(test_dir): os.makedirs(test_dir) data['args']['test_dir'] = test_dir # XXX should be done in _run or at least asynchronously filedata = data.get('filedata') if filedata: unpack_include_files(filedata, test_dir) args = data['args'] run_id = data.get('run_id') pid = self._run(args, run_id) return {'result': {'pids': [pid], 'agent_id': self.agent_id, 'command': command}} elif command in ('STATUS', '_STATUS'): return self._status(command, data) elif command == 'STOP': logger.debug('asked to STOP all runs') return self._stop_runs(command) elif command == 'QUIT': if len(self._workers) > 0 and not data.get('force', False): # if we're busy we won't quit - unless forced ! logger.info("Broker asked us to quit ! But we're busy...") logger.info("Cowardly refusing to die") return self._status(command, data) logger.debug('asked to QUIT') try: return self._stop_runs(command) finally: os._exit(0) raise NotImplementedError(command) def _kill_worker(self, proc): pid = proc.pid logger.debug('%d final termination' % proc.pid) if proc.poll() is None: logger.debug('Calling kill on %d' % proc.pid) try: proc.kill() except OSError: logger.exception('Cannot kill %d' % pid) def _stop_runs(self, command): status = {} for pid, (proc, run_id) in self._workers.items(): logger.debug('terminating proc for run %s' % str(run_id)) if proc.poll() is None: logger.debug('Starting the graceful period for the worker') proc.terminate() delay = time.time() + 5 kill = functools.partial(self._kill_worker, proc) self.loop.add_timeout(delay, kill) if pid in self._workers: del self._workers[pid] status[pid] = {'status': 'terminated', 'run_id': run_id} self.loop.add_callback(self._sync_hb) return {'result': {'status': status, 'command': command}} def _check_proc(self): for pid, (proc, run_id) in self._workers.items(): if not proc.poll() is None: del self._workers[pid] self._sync_hb() def _handle_recv_back(self, msg): # do the message and send the result if self.debug: target = timed()(self._handle_commands) else: target = self._handle_commands duration = -1 broker_id = msg[2] if len(msg) == 7: client_id = msg[4] else: client_id = None data = msg[-1] try: res = target(Message.load_from_string(data)) if self.debug: duration, res = res res['hostname'] = get_hostname() res['agent_id'] = self.agent_id res['pid'] = self.pid res = json.dumps(res) # we're working with strings if isinstance(res, unicode): res = res.encode('utf8') except Exception, e: exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) res = {'error': {'agent_id': self.agent_id, 'error': '\n'.join(exc)}} logger.error(res) data = [broker_id, '', str(self.agent_id), ''] if client_id is not None: data += [client_id, ''] data.append(res) print 'send ' + str(data) try: self._backend.send_multipart(data) except Exception: logging.error("Could not send back the result", exc_info=True) def lost(self): if len(self._workers) > 0: # if we're busy we won't quit! logger.info("Broker lost ! But we're busy...") return False logger.info('Broker lost ! Quitting..') self.loop.add_callback(self._stop) return True def stop(self): """Stops the agent. """ if not self.running: return # telling the broker we are stopping try: self._reg.send_multipart(['UNREGISTER', self.agent_id]) except zmq.ZMQError: logger.debug('Could not unregister') # give it a chance to finish a message logger.debug('Starting the graceful period') delay = time.time() + self.timeout self.loop.add_timeout(delay, self._stop) def _stop(self): logger.debug('Stopping the agent') self.running = False try: self._backstream.flush() except zmq.core.error.ZMQError: pass try: self.loop.stop() logger.debug('Agent is stopped') finally: logger.debug('Exiting...') os._exit(0) def register(self): # telling the broker we are ready data = {'pid': self.pid, 'hostname': get_hostname(), 'agent_id': self.agent_id} self._reg.send_multipart(['REGISTER', json.dumps(data)]) def start(self): """Starts the agent """ util.PARAMS = self.params logger.debug('Starting the agent loop') if self.ping is not None: # running the pinger self.ping.start() self._check.start() self.running = True # telling the broker we are ready self.register() # arming the exit callback if self.max_age != -1: if self.max_age_delta > 0: delta = random.randint(0, self.max_age_delta) else: delta = 0 cb_time = self.max_age + delta self.loop.add_timeout(time.time() + cb_time, self.stop) while self.running: try: self.loop.start() except zmq.ZMQError as e: logger.debug(str(e)) if e.errno == errno.EINTR: continue elif e.errno == zmq.ETERM: break else: logger.debug("got an unexpected error %s (%s)", str(e), e.errno) raise else: break logger.debug('Agent loop over') def main(args=sys.argv): parser = argparse.ArgumentParser(description='Run an agent.') parser.add_argument('--broker', dest='broker', default=DEFAULT_FRONTEND, help="ZMQ socket to the broker.") parser.add_argument('--debug', action='store_true', default=False, help="Debug mode") parser.add_argument('--logfile', dest='logfile', default='stdout', help="File to log in to.") parser.add_argument('--params', dest='params', default=None, help='The parameters to be used by the agent.') parser.add_argument('--timeout', dest='timeout', type=float, default=DEFAULT_TIMEOUT_MOVF, help=('The maximum time allowed before the thread ' 'stacks is dump and the message result not sent ' 'back.')) parser.add_argument('--max-age', dest='max_age', type=float, default=DEFAULT_MAX_AGE, help=('The maximum age for a worker in seconds. ' 'After that delay, the worker will simply quit. ' 'When set to -1, never quits.')) parser.add_argument('--max-age-delta', dest='max_age_delta', type=int, default=DEFAULT_MAX_AGE_DELTA, help='The maximum value in seconds added to max_age') args = parser.parse_args() set_logger(args.debug, logfile=args.logfile) sys.path.insert(0, os.getcwd()) # XXX if args.params is None: params = {} else: params = decode_params(args.params) logger.info('Connecting to %s' % args.broker) agent = Agent(broker=args.broker, params=params, timeout=args.timeout, max_age=args.max_age, max_age_delta=args.max_age_delta) try: agent.start() except KeyboardInterrupt: return 1 finally: agent.stop() return 0 if __name__ == '__main__': main() <file_sep>/venv/Lib/site-packages/loads/output/null.py class NullOutput(object): """A very useless output, silenting everything.""" name = 'null' options = {} def __init__(self, test_result, args): pass def flush(self): pass def push(self, method_called, *args, **data): pass <file_sep>/venv/Lib/site-packages/loads/tests/ssh_server.py # adapted from # https://github.com/paramiko/paramiko/blob/master/demos/demo_server.py import base64 import os import subprocess import paramiko from paramiko import SFTPServer, SFTPAttributes, SFTPHandle, SFTP_OK import gevent from gevent.event import Event from gevent import socket from gevent import monkey from gevent.queue import Queue, Empty paramiko.util.log_to_file('ssh_server.log') _RSA = os.path.join(os.path.dirname(__file__), 'rsa.key') # taken from https://github.com/rspivak/sftpserver class _SFTPHandle(SFTPHandle): def stat(self): try: return SFTPAttributes.from_stat(os.fstat(self.readfile.fileno())) except OSError, e: return SFTPServer.convert_errno(e.errno) def chattr(self, attr): # python doesn't have equivalents to fchown or fchmod, so we have to # use the stored filename try: SFTPServer.set_file_attr(self.filename, attr) return SFTP_OK except OSError, e: return SFTPServer.convert_errno(e.errno) class _SFTPServer(paramiko.SFTPServerInterface): ROOT = os.getcwd() def _realpath(self, path): return path def list_folder(self, path): path = self._realpath(path) try: out = [] flist = os.listdir(path) for fname in flist: _stat = os.stat(os.path.join(path, fname)) attr = SFTPAttributes.from_stat(_stat) attr.filename = fname out.append(attr) return out except OSError, e: return SFTPServer.convert_errno(e.errno) def stat(self, path): path = self._realpath(path) try: return SFTPAttributes.from_stat(os.stat(path)) except OSError, e: return SFTPServer.convert_errno(e.errno) def lstat(self, path): path = self._realpath(path) try: return SFTPAttributes.from_stat(os.lstat(path)) except OSError, e: return SFTPServer.convert_errno(e.errno) def open(self, path, flags, attr): path = self._realpath(path) try: binary_flag = getattr(os, 'O_BINARY', 0) flags |= binary_flag mode = getattr(attr, 'st_mode', None) if mode is not None: fd = os.open(path, flags, mode) else: # os.open() defaults to 0777 which is # an odd default mode for files fd = os.open(path, flags, 0666) except OSError, e: return SFTPServer.convert_errno(e.errno) if (flags & os.O_CREAT) and (attr is not None): attr._flags &= ~attr.FLAG_PERMISSIONS SFTPServer.set_file_attr(path, attr) if flags & os.O_WRONLY: if flags & os.O_APPEND: fstr = 'ab' else: fstr = 'wb' elif flags & os.O_RDWR: if flags & os.O_APPEND: fstr = 'a+b' else: fstr = 'r+b' else: # O_RDONLY (== 0) fstr = 'rb' try: f = os.fdopen(fd, fstr) except OSError, e: return SFTPServer.convert_errno(e.errno) fobj = _SFTPHandle(flags) fobj.filename = path fobj.readfile = f fobj.writefile = f return fobj def remove(self, path): path = self._realpath(path) try: os.remove(path) except OSError, e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def rename(self, oldpath, newpath): oldpath = self._realpath(oldpath) newpath = self._realpath(newpath) try: os.rename(oldpath, newpath) except OSError, e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def mkdir(self, path, attr): path = self._realpath(path) try: os.mkdir(path) if attr is not None: SFTPServer.set_file_attr(path, attr) except OSError, e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def rmdir(self, path): path = self._realpath(path) try: os.rmdir(path) except OSError, e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def chattr(self, path, attr): path = self._realpath(path) try: SFTPServer.set_file_attr(path, attr) except OSError, e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def symlink(self, target_path, path): path = self._realpath(path) if (len(target_path) > 0) and (target_path[0] == '/'): # absolute symlink target_path = os.path.join(self.ROOT, target_path[1:]) if target_path[:2] == '//': # bug in os.path.join target_path = target_path[1:] else: # compute relative to path abspath = os.path.join(os.path.dirname(path), target_path) if abspath[:len(self.ROOT)] != self.ROOT: target_path = '<error>' try: os.symlink(target_path, path) except OSError, e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def readlink(self, path): path = self._realpath(path) try: symlink = os.readlink(path) except OSError, e: return SFTPServer.convert_errno(e.errno) # if it's absolute, remove the root if os.path.isabs(symlink): if symlink[:len(self.ROOT)] == self.ROOT: symlink = symlink[len(self.ROOT):] if (len(symlink) == 0) or (symlink[0] != '/'): symlink = '/' + symlink else: symlink = '<error>' return symlink class SSHServer(paramiko.ServerInterface): def __init__(self, key=_RSA, port=2200): self.key = paramiko.RSAKey(filename=key) self.data = base64.encodestring(str(self.key)) self.shell = Event() self.pub_key = paramiko.RSAKey(data=base64.decodestring(self.data)) self.port = port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.sock.bind(('', self.port)) self.sock.listen(100) self.cmds = Queue() def check_channel_exec_request(self, channel, command): self.cmds.put((channel, command)) return True def check_channel_request(self, kind, chanid): if kind == 'session': return paramiko.OPEN_SUCCEEDED print '%r not allowed' % kind return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED def check_auth_password(self, username, password): return paramiko.AUTH_SUCCESSFUL def check_auth_publickey(self, username, key): return paramiko.AUTH_SUCCESSFUL def get_allowed_auths(self, username): return 'none' def check_channel_shell_request(self, channel): self.shell.set() return True def check_channel_pty_request(self, *args): return True def run(self): while True: client, addr = self.sock.accept() gevent.spawn(self.handle_connection, client, addr) def handle_connection(self, client, addr): t = paramiko.Transport(client) try: t.load_server_moduli() except Exception: print '(Failed to load moduli -- gex will be unsupported.)' pass t.add_server_key(self.key) t.set_subsystem_handler("sftp", paramiko.SFTPServer, _SFTPServer) try: t.start_server(server=self) except paramiko.SSHException: print '*** SSH negotiation failed.' t.close() return channel = t.accept() while t.is_active(): try: chan, cmd = self.cmds.get(block=False) except Empty: pass else: print cmd try: if hasattr(subprocess, 'check_output'): result = subprocess.check_output(cmd, shell=True) else: result = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) result = result.communicate()[0] chan.send(result.replace('\n', '\r\n')) chan.send_exit_status(0) except subprocess.CalledProcessError, e: if e.output: output = e.output else: output = '%r failed' % cmd chan.send_stderr(output) chan.send_exit_status(e.returncode) chan.close() gevent.sleep(0) channel.close() t.close() return # interactive shell. not needed for now XXX # wait for auth chan = t.accept(20) if chan is None: print '*** No channel.' t.close() return self.shell.wait(10) if not self.shell.isSet(): print '*** Client never asked for a shell.' t.close() return print 'yeah shell!' chan.send('\r\n\r\nTesting SSH Server\r\n\r\n') chan.send('Username: ') f = chan.makefile('rU') username = f.readline().strip('\r\n') print 'user is %r' % username chan.send('\r\nWelcome to a Fake world!\r\n') chan.send('ssh:~ $ ') buffer = [] while True: d = chan.recv(1) if d == '': code = 0 else: code = ord(d) if code == 3: break elif code == 13: chan.send('\r\n') cmd = ''.join(buffer) if cmd == 'exit': chan.send('Bye !\r\n') break try: result = subprocess.check_output(cmd, shell=True) chan.send(result.replace('\n', '\r\n')) except subprocess.CalledProcessError: chan.send('That call failed.\r\n') chan.send('ssh:~ $ ') buffer = [] else: buffer.append(d) chan.send(d) chan.close() t.close() def close(self): self.sock.close() def main(): monkey.patch_all() server = SSHServer() print 'Listening on port %d' % server.port try: server.run() finally: server.close() if __name__ == '__main__': try: main() except KeyboardInterrupt: pass <file_sep>/DjangoStock/Stocks/views.py from django.shortcuts import render import requests import xmltodict def base(request): return render(request, 'Stocks/base.html') def news(request): try: isinCdNm = request.GET['isinCdNm'] url = "http://apis.data.go.kr/1160100/service/GetStocIssuInfoService/getItemBasiInfo?pageNo=1&numOfRows=1&resultType=xml&stckIssuCmpyNm=" + isinCdNm + "&fnccmpNm=%EA%B9%80%EC%B2%9C%EC%A0%80%EC%B6%95%EC%9D%80%ED%96%89&serviceKey=<KEY>Gtn9TyZBTume9A%3D%3D" req = requests.get(url).content xmlObject = xmltodict.parse(req) allData = xmlObject['response']['body']['items']['item'] except Exception as e: allData = "" content = {'allData': allData} return render(request, 'Stocks/news.html', content) def money(request): try: crno = request.GET['crno'] url = "http://apis.data.go.kr/1160100/service/GetFinaStatInfoService/getSummFinaStat?pageNo=1&numOfRows=1&resultType=xml&crno=" + crno + "&fnccmpNm=%EA%B9%80%EC%B2%9C%EC%A0%80%EC%B6%95%EC%9D%80%ED%96%89&serviceKey=<KEY>M6FSOW04r2tK0R8jeQ9Gtn9TyZBTume9A%3D%3D" req = requests.get(url).content xmlObject = xmltodict.parse(req) allData = xmlObject['response']['body']['items']['item'] except Exception as e: allData = "" content = {'allData': allData} return render(request, 'Stocks/money.html', content) <file_sep>/venv/Lib/site-packages/loads/case.py import unittest from requests.adapters import HTTPAdapter from loads.measure import Session, TestApp from loads.results import LoadsTestResult, UnitTestTestResult class FakeTestApp(object): def __getattr__(self, arg): def wrapper(*args, **kwargs): raise ValueError(('If you want to use the webtest.TestApp client, ' 'you need to add a "server_url" property to ' 'your TestCase or call loads with the ' '--server-url option')) return wrapper MAX_CON = 1000 class TestCase(unittest.TestCase): server_url = None def __init__(self, test_name, test_result=None, config=None): super(TestCase, self).__init__(test_name) if config is None: config = {} self.config = config if config.get('server_url') is not None: self.server_url = config['server_url'] self._test_result = test_result self.session = Session(test=self, test_result=test_result) http_adapter = HTTPAdapter(pool_maxsize=MAX_CON, pool_connections=MAX_CON) self.session.mount('http://', http_adapter) self.session.mount('https://', http_adapter) if self.server_url is not None: self.app = TestApp(self.server_url, self.session, test_result) else: self.app = FakeTestApp() self._ws = [] self._loads_status = None def defaultTestResult(self): return LoadsTestResult() def incr_counter(self, name): self._test_result.incr_counter(self, self._loads_status, name) def create_ws(self, url, callback=None, protocols=None, extensions=None, klass=None): from loads.websockets import create_ws ws = create_ws(url, self._test_result, callback=callback, protocols=protocols, extensions=extensions, klass=klass, test_case=self) self._ws.append(ws) return ws def tearDown(self): for ws in self._ws: if ws._th.dead: ws._th.get() # re-raise any exception swallowed by gevent def run(self, result=None, loads_status=None): if (loads_status is not None and result is None and not isinstance(self._test_result, LoadsTestResult)): result = LoadsTestResult(loads_status, self._test_result) if loads_status is not None: self._loads_status = self.session.loads_status = loads_status return super(TestCase, self).run(result) def _patching(): # patching nose if present try: from nose import core core._oldTextTestResult = core.TextTestResult class _TestResult(core._oldTextTestResult): def startTest(self, test, *args, **kw): super(_TestResult, self).startTest(test) def stopTest(self, test, *args, **kw): super(_TestResult, self).stopTest(test) def addError(self, test, exc_info, *args, **kw): super(_TestResult, self).addError(test, exc_info) def addFailure(self, test, exc_info, *args, **kw): super(_TestResult, self).addFailure(test, exc_info) def addSuccess(self, test, *args, **kw): super(_TestResult, self).addSuccess(test) core.TextTestResult = _TestResult from nose import proxy proxy._ResultProxy = proxy.ResultProxy class _ResultProxy(proxy._ResultProxy): def startTest(self, test, *args, **kw): super(_ResultProxy, self).startTest(test) def stopTest(self, test, *args, **kw): super(_ResultProxy, self).stopTest(test) def addError(self, test, exc_info, *args, **kw): super(_ResultProxy, self).addError(test, exc_info) def addFailure(self, test, exc_info, *args, **kw): super(_ResultProxy, self).addFailure(test, exc_info) def addSuccess(self, test, *args, **kw): super(_ResultProxy, self).addSuccess(test) proxy.ResultProxy = _ResultProxy except ImportError: pass # patch unittest TestResult object try: import unittest2.runner unittest2.runner.TextTestResult = UnitTestTestResult except ImportError: pass _patching() <file_sep>/venv/Lib/site-packages/loads/tests/test_util.py from tempfile import mkstemp, mkdtemp import datetime import mock import os import unittest2 as unittest2 import sys import StringIO import shutil import zmq import gevent import loads from loads.tests.support import hush from loads import util from loads.util import (resolve_name, set_logger, logger, dns_resolve, DateTimeJSONEncoder, try_import, split_endpoint, null_streams, get_quantiles, pack_include_files, unpack_include_files, dict_hash) from loads.transport.util import (register_ipc_file, _cleanup_ipc_files, send, TimeoutError, recv, decode_params, dump_stacks) class _BadSocket(object): def __init__(self, error): self.error = error def send(self, msg, *args): err = zmq.ZMQError() err.errno = self.error raise err def recv(self, *args): err = zmq.ZMQError() err.errno = self.error raise err class FakeStdout(object): def fileno(self): return 1 def flush(self): pass def write(self, data): pass class TestUtil(unittest2.TestCase): def setUp(self): util._DNS_CACHE = {} self.stdout = sys.stdout sys.stdout = FakeStdout() def tearDown(self): sys.stdout = self.stdout def test_resolve(self): ob = resolve_name('loads.tests.test_util.TestUtil') self.assertTrue(ob is TestUtil) ob = resolve_name('loads') self.assertTrue(ob is loads) self.assertRaises(ImportError, resolve_name, 'xx.cc') self.assertRaises(ImportError, resolve_name, 'xx') self.assertRaises(ImportError, resolve_name, 'loads.xx') @mock.patch('sys.path', []) def test_resolve_adds_path(self): ob = resolve_name('loads.tests.test_util.TestUtil') self.assertTrue(ob is TestUtil) self.assertTrue('' in sys.path) old_len = len(sys.path) # And checks that it's not added twice ob = resolve_name('loads.tests.test_util.TestUtil') self.assertEquals(len(sys.path), old_len) def test_set_logger(self): before = len(logger.handlers) set_logger() self.assertTrue(len(logger.handlers), before + 1) fd, logfile = mkstemp() os.close(fd) set_logger(debug=True) set_logger(logfile=logfile) os.remove(logfile) def test_ipc_files(self): fd, path = mkstemp() os.close(fd) self.assertTrue(os.path.exists(path)) register_ipc_file('ipc://' + path) _cleanup_ipc_files() self.assertFalse(os.path.exists(path)) def test_send(self): sock = _BadSocket(zmq.EAGAIN) self.assertRaises(TimeoutError, send, sock, 'blabla') sock = _BadSocket(-1) self.assertRaises(zmq.ZMQError, send, sock, 'blabla') def test_recv(self): sock = _BadSocket(zmq.EAGAIN) self.assertRaises(TimeoutError, recv, sock) sock = _BadSocket(-1) self.assertRaises(zmq.ZMQError, recv, sock) def test_decode(self): params = decode_params('one:1|two:2') items = params.items() items.sort() self.assertEqual(items, [('one', '1'), ('two', '2')]) def test_decode_multiple_colons(self): params = decode_params('one:tcp://foo|two:tcp://blah') items = params.items() items.sort() self.assertEqual(items, [('one', 'tcp://foo'), ('two', 'tcp://blah')]) def test_dump(self): dump = dump_stacks() num = len([l for l in dump if l.strip() == 'Greenlet']) def _job(): gevent.sleep(.5) gevent.spawn(_job) gevent.spawn(_job) gevent.sleep(0) dump = dump_stacks() new_num = len([l for l in dump if l.strip() == 'Greenlet']) self.assertTrue(new_num - num in (2, 3)) def test_dns_resolve(self): with mock.patch('loads.util.gevent_socket.gethostbyname_ex') as mocked: mocked.return_value = ('example.com', ['example.com'], ['0.0.0.0', '1.1.1.1']) # Initial query should populate the cache and return # randomly-selected resolved address. url, original, resolved = dns_resolve('http://example.com') self.assertEqual(original, 'example.com') self.assertEqual(url, 'http://' + resolved + ':80') self.assertTrue(resolved in ("0.0.0.0", "1.1.1.1")) self.assertEqual(mocked.call_count, 1) # Subsequent queries should be fulfilled from the cache # and should balance between all resolved addresses. addrs = set() for _ in xrange(10): addrs.add(dns_resolve('http://example.com')[2]) self.assertEqual(addrs, set(('0.0.0.0', '1.1.1.1'))) self.assertEqual(mocked.call_count, 1) @hush @mock.patch('loads.util.gevent_socket.gethostbyname_ex') @mock.patch('loads.util.gevent_socket.gethostbyname') def test_dns_resolve_fallbacks_on_gethostbyname(self, hostbyname, hostbyname_ex): # Older versions of gevent don't have the gethostbyname_ex method. Be # sure we fallback on the right version if the method doesn't exist. hostbyname_ex.side_effect = AttributeError() hostbyname.return_value = '0.0.0.0' self.assertEquals(dns_resolve( 'http://mozilla.org'), ('http://0.0.0.0:80', 'mozilla.org', '0.0.0.0')) self.assertTrue(hostbyname.called) def test_split_endpoint(self): res = split_endpoint('tcp://172.16.58.3:12334') self.assertEqual(res['scheme'], 'tcp') self.assertEqual(res['ip'], '12.22.33.45') self.assertEqual(res['port'], 12334) res = split_endpoint('ipc:///here/it/is') self.assertEqual(res['scheme'], 'ipc') self.assertEqual(res['path'], '/here/it/is') self.assertRaises(NotImplementedError, split_endpoint, 'wat://ddf:ff:f') def test_datetime_json_encoder(self): encoder = DateTimeJSONEncoder() date = datetime.datetime(2013, 5, 30, 18, 35, 11, 550482) delta = datetime.timedelta(0, 12, 126509) self.assertEquals(encoder.encode(date), '"2013-05-30T18:35:11.550482"') self.assertEquals(encoder.encode(delta), '12.126509') self.assertRaises(TypeError, encoder.encode, gevent.socket) def test_try_import(self): try_import("loads") try_import("loads.case", "loads.tests") with self.assertRaises(ImportError): try_import("loads.nonexistent1", "loads.nonexistent2") def test_get_quantiles(self): data = range(100) quantiles = 0, 0.1, 0.5, 0.9, 1 res = get_quantiles(data, quantiles) self.assertEqual(len(res), 5) def test_nullstreams(self): stream = StringIO.StringIO() null_streams([stream, sys.stdout]) stream.write('ok') sys.stdout.write('ok') class TestIncludeFileHandling(unittest2.TestCase): def setUp(self): self.workdir = mkdtemp() self.orig_cwd = os.getcwd() os.chdir(self.workdir) def tearDown(self): os.chdir(self.orig_cwd) shutil.rmtree(self.workdir) def test_include_of_single_file(self): with open("test1.txt", "w") as f: f.write("hello world") filedata = pack_include_files(["test1.txt"]) os.makedirs("outdir") os.chdir("outdir") unpack_include_files(filedata) self.assertEquals(os.listdir("."), ["test1.txt"]) def test_include_of_single_file_with_explicit_location(self): os.makedirs("indir") os.makedirs("outdir") with open("indir/test1.txt", "w") as f: f.write("hello world") filedata = pack_include_files(["*.txt"], "./indir") unpack_include_files(filedata, "./outdir") self.assertEquals(os.listdir("outdir"), ["test1.txt"]) def test_preservation_of_file_mode(self): with open("test1.sh", "w") as f: f.write("#!/bin/sh\necho 'hello world'\n") os.chmod("test1.sh", 0755) with open("private.txt", "w") as f: f.write("TOP SECRET DATA\n") os.chmod("private.txt", 0600) filedata = pack_include_files(["*.*"]) os.unlink("test1.sh") os.unlink("private.txt") unpack_include_files(filedata) self.assertEquals(os.stat("test1.sh").st_mode & 0777, 0755) self.assertEquals(os.stat("private.txt").st_mode & 0777, 0600) def test_relative_globbing_and_direcotry_includes(self): os.makedirs("indir") os.makedirs("outdir") os.chdir("indir") with open("test1.txt", "w") as f: f.write("hello world") with open("test2.txt", "w") as f: f.write("hello world") os.makedirs("subdir/subsubdir") os.chdir("subdir/subsubdir") with open("test3.txt", "w") as f: f.write("hello world") os.chdir("../../../outdir") filedata = pack_include_files(["../indir/*.txt", "../indir/*dir"]) unpack_include_files(filedata) self.assertEquals(sorted(os.listdir(".")), ["subdir", "test1.txt", "test2.txt"]) self.assertEquals(os.listdir("./subdir"), ["subsubdir"]) self.assertEquals(os.listdir("./subdir/subsubdir"), ["test3.txt"]) def test_unicode_unpack(self): # make sure we pass string data = (u'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00' '\x00\x00\x00\x00\x00\x00\x00\x00\x00') unpack_include_files(data.encode('base64')) def test_dict_hash(self): data1 = {1: 2, 3: 4} data2 = {1: 2, 3: 4} self.assertEqual(dict_hash(data1), dict_hash(data2)) data1['count'] = 'b' self.assertNotEqual(dict_hash(data1), dict_hash(data2)) self.assertEqual(dict_hash(data1, omit_keys=['count']), dict_hash(data2)) <file_sep>/venv/Lib/site-packages/loads/output/__init__.py _OUTPUTS = {} def create_output(kind, test_result, args): if kind not in _OUTPUTS: raise NotImplementedError(kind) return _OUTPUTS[kind](test_result, args) def register_output(klass): _OUTPUTS[klass.name] = klass def output_list(): return _OUTPUTS.values() # register our own plugins from loads.output.null import NullOutput from loads.output._file import FileOutput from loads.output.std import StdOutput from loads.output._funkload import FunkloadOutput for output in (NullOutput, FileOutput, StdOutput, FunkloadOutput): register_output(output) <file_sep>/venv/Lib/site-packages/loads/examples/echo_server.py # -*- coding: utf-8 -*- # -*- flake8: noqa -*- from gevent import monkey; monkey.patch_all() import argparse import random import os import base64 import time import gevent import gevent.pywsgi from ws4py.server.wsgiutils import WebSocketWSGIApplication from ws4py.server.geventserver import WebSocketWSGIHandler, WSGIServer from ws4py.websocket import EchoWebSocket PAGE = """<html> <head> <script type='application/javascript' src='https://ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js'></script> <script type='application/javascript'> $(document).ready(function() { websocket = 'ws://%(host)s:%(port)s/ws'; if (window.WebSocket) { ws = new WebSocket(websocket); } else if (window.MozWebSocket) { ws = MozWebSocket(websocket); } else { console.log('WebSocket Not Supported'); return; } window.onbeforeunload = function(e) { $('#chat').val($('#chat').val() + 'Bye bye...\\n'); ws.close(1000, '%(username)s left the room'); if(!e) e = window.event; e.stopPropagation(); e.preventDefault(); }; ws.onmessage = function (evt) { $('#chat').val($('#chat').val() + evt.data + '\\n'); }; ws.onopen = function() { ws.send("%(username)s entered the room"); }; ws.onclose = function(evt) { $('#chat').val($('#chat').val() + 'Connection closed by server: ' + evt.code + ' \"' + evt.reason + '\"\\n'); }; $('#send').click(function() { console.log($('#message').val()); ws.send('%(username)s: ' + $('#message').val()); $('#message').val(""); return false; }); }); </script> </head> <body> <form action='#' id='chatform' method='get'> <textarea id='chat' cols='35' rows='10'></textarea> <br /> <label for='message'>%(username)s: </label><input type='text' id='message' /> <input id='send' type='submit' value='Send' /> </form> </body> </html> """ class PingWebSocket(EchoWebSocket): active = 0 max = 0 def opened(self): PingWebSocket.active += 1 if PingWebSocket.max < PingWebSocket.active: PingWebSocket.max = PingWebSocket.active def closed(self, *args, **kw): PingWebSocket.active -= 1 def received_message(self, m): self.send(m) gevent.sleep(0) class EchoWebSocketApplication(object): def __init__(self, host, port): self.host = host self.port = port self.ws = WebSocketWSGIApplication(handler_cls=PingWebSocket) def active(self, environ, start_response): status = '200 OK' headers = [('Content-type', 'text/plain')] start_response(status, headers) return 'max: %d, current: %d' % (PingWebSocket.max, PingWebSocket.active) def __call__(self, environ, start_response): if environ['PATH_INFO'] == '/active': return self.active(environ, start_response) if environ['PATH_INFO'] == '/favicon.ico': return self.favicon(environ, start_response) if environ['PATH_INFO'] == '/ws': environ['ws4py.app'] = self return self.ws(environ, start_response) if environ['PATH_INFO'] == '/auth': return self.auth(environ, start_response) return self.webapp(environ, start_response) def auth(self, environ, start_response): headers = [('Content-type', 'text/plain')] if 'HTTP_AUTHORIZATION' not in environ: start_response('401 Unauthorized', headers) return ['Unauthorized'] status = '200 OK' start_response(status, headers) _auth = environ['HTTP_AUTHORIZATION'][6:] user, pwd = base64.b64decode(_auth).split(':') return user def webapp(self, environ, start_response): """ Our main webapp that'll display the chat form """ status = '200 OK' headers = [('Content-type', 'text/html')] start_response(status, headers) return PAGE % {'username': "User%d" % random.randint(0, 100), 'host': self.host, 'port': self.port} class NoLog(object): def write(*args, **kw): pass if __name__ == '__main__': parser = argparse.ArgumentParser(description='Echo gevent Server') parser.add_argument('--host', default='127.0.0.1') parser.add_argument('-p', '--port', default=9000, type=int) args = parser.parse_args() server = WSGIServer((args.host, args.port), EchoWebSocketApplication(args.host, args.port), log=NoLog(), backlog=100000) try: server.serve_forever() except KeyboardInterrupt: pass <file_sep>/venv/Lib/site-packages/loads/results/zmqrelay.py from cStringIO import StringIO import traceback import errno from collections import defaultdict try: import zmq.green as zmq except ImportError: import zmq try: import gevent from gevent.queue import Queue except ImportError: from Queue import Queue from loads.util import DateTimeJSONEncoder from loads.transport.util import get_hostname class ZMQTestResult(object): """Relays all the method calls to a zmq endpoint""" def __init__(self, args): self.args = args self.context = args.get('zmq_context', zmq.Context()) self._init_socket() self.encoder = DateTimeJSONEncoder() self.agent_id = self.args.get('agent_id') self.run_id = self.args.get('run_id') def _init_socket(self): receive = self.args['zmq_receiver'] self._push = self.context.socket(zmq.PUSH) self._push.set_hwm(8096 * 10) self._push.setsockopt(zmq.LINGER, -1) self._push.connect(receive) def startTest(self, test, loads_status): self.push('startTest', test=str(test), loads_status=loads_status) def startTestRun(self, agent_id=None): self.push('startTestRun') def stopTestRun(self, agent_id=None): self.push('stopTestRun') def stopTest(self, test, loads_status): self.push('stopTest', test=str(test), loads_status=loads_status) def _transform_exc_info(self, exc): string_tb = StringIO() exc, exc_class, tb = exc traceback.print_tb(tb, file=string_tb) string_tb.seek(0) return str(exc), str(exc_class), string_tb.read() def addFailure(self, test, exc, loads_status): # Because the information to trace the exception is a python object, it # may not be JSON-serialisable, so we just pass its string # representation. self.push('addFailure', test=str(test), exc_info=self._transform_exc_info(exc), loads_status=loads_status) def addError(self, test, exc, loads_status): self.push('addError', test=str(test), exc_info=self._transform_exc_info(exc), loads_status=loads_status) def addSuccess(self, test, loads_status): self.push('addSuccess', test=str(test), loads_status=loads_status) def add_hit(self, **data): self.push('add_hit', **data) def socket_open(self): self.push('socket_open') def socket_close(self): self.push('socket_close') def socket_message(self, size): self.push('socket_message', size=size) def incr_counter(self, test, loads_status, name, agent_id=None): self.push(name, test=str(test), loads_status=loads_status, agent_id=str(agent_id)) def push(self, data_type, **data): data.update({'data_type': data_type, 'agent_id': self.agent_id, 'hostname': get_hostname(), 'run_id': self.run_id}) while True: try: self._push.send(self.encoder.encode(data), zmq.NOBLOCK) return except zmq.ZMQError as e: if e.errno in (errno.EAGAIN, errno.EWOULDBLOCK): continue else: raise def add_observer(self, *args, **kwargs): pass # NOOP def close(self): self.context.destroy() class ZMQSummarizedTestResult(ZMQTestResult): def __init__(self, args): super(ZMQSummarizedTestResult, self).__init__(args) self.interval = 1. self._data = Queue() gevent.spawn_later(self.interval, self._dump_data) def push(self, data_type, **data): self._data.put_nowait((data_type, data)) def close(self): while not self._data.empty(): self._dump_data(loop=False) self.context.destroy() def _dump_data(self, loop=True): if self._data.empty() and loop: gevent.spawn_later(self.interval, self._dump_data) return data = {'data_type': 'batch', 'agent_id': self.agent_id, 'hostname': get_hostname(), 'run_id': self.run_id, 'counts': defaultdict(list)} # grabbing what we have for _ in range(self._data.qsize()): data_type, message = self._data.get() data['counts'][data_type].append(message) while True: try: self._push.send(self.encoder.encode(data), zmq.NOBLOCK) break except zmq.ZMQError as e: if e.errno in (errno.EAGAIN, errno.EWOULDBLOCK): continue else: raise if loop: gevent.spawn_later(self.interval, self._dump_data) <file_sep>/venv/Lib/site-packages/loads/measure.py import datetime import urlparse from requests.sessions import Session as _Session from webtest.app import TestApp as _TestApp from wsgiproxy.proxies import HostProxy as _HostProxy from wsgiproxy.requests_client import HttpClient from loads.util import dns_resolve class TestApp(_TestApp): """A subclass of webtest.TestApp which uses the requests backend per default. """ def __init__(self, app, session, test_result, *args, **kwargs): self.session = session self.test_result = test_result client = HttpClient(session=self.session) self.proxy = HostProxy(app, client=client) super(TestApp, self).__init__(self.proxy, *args, **kwargs) @property def server_url(self): return self.proxy.uri @server_url.setter def server_url(self, value): self.proxy.uri = value # XXX redefine here the _do_request, check_status and check_errors methods. # so we can actually use them to send information to the test_result class HostProxy(_HostProxy): """A proxy to redirect all request to a specific uri""" def __init__(self, uri, *args, **kwargs): super(HostProxy, self).__init__(uri, *args, **kwargs) self._uri = None self.scheme = None self.net_loc = None self.uri = uri @property def uri(self): return self._uri @uri.setter def uri(self, value): self._uri = value.rstrip('/') self.scheme, self.net_loc = urlparse.urlparse(self.uri)[0:2] def extract_uri(self, environ): environ['HTTP_HOST'] = self.net_loc return self.uri class Session(_Session): """Extends Requests' Session object in order to send information to the test_result. """ def __init__(self, test, test_result): _Session.__init__(self) self.test = test self.test_result = test_result self.loads_status = None, None, None, None def request(self, method, url, headers=None, **kwargs): if not url.startswith('https://'): url, original, resolved = dns_resolve(url) if headers is None: headers = {} headers['Host'] = original return super(Session, self).request( method, url, headers=headers, **kwargs) def send(self, request, **kwargs): """Do the actual request from within the session, doing some measures at the same time about the request (duration, status, etc). """ # attach some information to the request object for later use. start = datetime.datetime.utcnow() res = _Session.send(self, request, **kwargs) res.started = start res.method = request.method self._analyse_request(res) return res def _analyse_request(self, req): """Analyse some information about the request and send the information to the test_result. :param req: the request to analyse. """ if self.test_result is not None: self.test_result.add_hit(elapsed=req.elapsed, started=req.started, status=req.status_code, url=req.url, method=req.method, loads_status=self.loads_status) <file_sep>/venv/Lib/site-packages/loads/results/adapter.py import functools class LoadsTestResult(object): """Used to make unitest calls compatible with Loads. This class will add to the API calls the loads_status option Loads uses. """ def __init__(self, loads_status, result): self.result = result self.loads_status = loads_status def __getattribute__(self, name): klass = super(LoadsTestResult, self) result = klass.__getattribute__('result') attr = getattr(result, name) if name in ('startTest', 'stopTest', 'addSuccess', 'addException', 'addError', 'addFailure', 'incr_counter'): status = klass.__getattribute__('loads_status') return functools.partial(attr, loads_status=status) return attr <file_sep>/venv/Lib/site-packages/loads/output/std.py import array import sys import traceback from collections import defaultdict from loads.results import ZMQTestResult def get_terminal_width(fd=1): """Get the width for the given pty fd (default is TTY1).""" if sys.platform == 'win32': return 100 import termios import fcntl sizebuf = array.array('h', [0, 0]) try: fcntl.ioctl(fd, termios.TIOCGWINSZ, sizebuf, True) except IOError: return 100 return sizebuf[1] def get_screen_relative_value(percent, terminal_width): """Convert a percentage into a value relative to the width of the screen""" return int(round(percent * (terminal_width / 100.))) - 8 class StdOutput(object): name = 'stdout' options = {'total': ('Total Number of items', int, None, False), 'duration': ('Duration', int, None, False)} def __init__(self, test_result, args): self.results = test_result self.args = args self.pos = self.current = 0 self.starting = None self._terminal_width = get_terminal_width() def flush(self): write = sys.stdout.write self._duration_progress() write("\nDuration: %.2f seconds" % self.results.duration) write("\nHits: %d" % self.results.nb_hits) write("\nStarted: %s" % self.results.start_time) write("\nApproximate Average RPS: %d" % self.results.requests_per_second()) write("\nAverage request time: %.2fs" % self.results.average_request_time()) write("\nOpened web sockets: %d" % self.results.opened_sockets) write("\nBytes received via web sockets : %d\n" % self.results.socket_data_received) write("\nSuccess: %d" % self.results.nb_success) write("\nErrors: %d" % self.results.nb_errors) write("\nFailures: %d" % self.results.nb_failures) write("\n\n") if self.results.nb_errors: self._print_tb(self.results.errors) write('\n') if self.results.nb_failures: self._print_tb(self.results.failures) write('\n') avt = 'average_request_time' def _metric(item1, item2): return - cmp(item1[-1][avt], item2[-1][avt]) metrics = [(url, metric) for url, metric in self.results.get_url_metrics().items()] metrics.sort(_metric) if len(metrics) > 0: slowest = metrics[0] write("\nSlowest URL: %s \tAverage Request Time: %s" % (slowest[0], slowest[1][avt])) if len(metrics) > 10: write("\n\nStats by URLs (10 slowests):") metrics = metrics[:10] else: write("\n\nStats by URLs:") longer_url = max([len(url) for url, metric in metrics]) for url, metric in metrics: spacing = (longer_url - len(url)) * ' ' write("\n- %s%s\t" % (url, spacing)) res = [] for name, value in metric.items(): res.append("%s: %s" % (name.replace('_', ' ').capitalize(), value)) write('%s' % '\t'.join(res)) write('\n') counters = self.results.get_counters() if len(counters) > 0: write("\nCustom metrics:") for name, value in counters.items(): write("\n- %s : %s" % (name, value)) write('\n') sys.stdout.flush() sys.stderr.flush() def _print_tb(self, data): # 3 most commons errors = defaultdict(int) for line in data: if len(line) == 0: continue exc_class, exc_, tb_ = line[0] if isinstance(exc_class, basestring): name_ = exc_class else: name_ = exc_class.__name__ errors[name_, exc_, tb_] += 1 errors = [(count, name, exc, tb) for (name, exc, tb), count in errors.items()] errors.sort() for count, name, exc, tb in errors[:3]: sys.stderr.write("%d occurrences of: \n" % count) sys.stderr.write(" %s: %s" % (name, exc)) if tb in (None, ''): # XXX fix this sys.stderr.write('\n') else: if isinstance(tb, basestring): sys.stderr.write(tb.replace('\n', ' \n')) else: sys.stderr.write(" Traceback: \n") traceback.print_tb(tb, file=sys.stderr) def refresh(self, run_id=None): if isinstance(self.results, ZMQTestResult): return self._duration_progress(run_id) def _duration_progress(self, run_id=None): if run_id is not None: self.results.sync(run_id) duration = self.args.get('duration') if duration is not None: percent = int(float(self.results.duration) / float(duration) * 100.) else: percent = int(float(self.results.nb_finished_tests) / float(self.args['total']) * 100.) if percent > 100: percent = 100 rel_percent = get_screen_relative_value(percent, self._terminal_width) bar = '[' + ('=' * rel_percent).ljust(self._terminal_width - 8) + ']' out = "\r%s %s%%" % (bar, str(percent).rjust(3)) sys.stdout.write(out) sys.stdout.flush() def push(self, method_called, *args, **data): pass <file_sep>/venv/Lib/site-packages/loads/examples/test_demo.py from loads.case import TestCase class TestWebSite(TestCase): def test_mozilla_org(self): res = self.session.get('http://mozilla.org/en-US/') self.assertTrue('Work with us' in res.content) <file_sep>/venv/Lib/site-packages/loads/tests/test_brokerctrl.py import unittest2 import tempfile import shutil from collections import defaultdict import time import psutil from zmq.green.eventloop import ioloop from loads.util import json from loads.transport.brokerctrl import (BrokerController, NotEnoughWorkersError, _compute_observers) class Stream(object): msgs = [] def send_multipart(self, msg): self.msgs.append(msg) send = send_multipart class FakeBroker(object): _backend = _backstream = Stream() _publisher = Stream() pid = '123456' msgs = defaultdict(list) endpoints = {'receiver': 'xxx'} def send_json(self, target, msg): self.msgs[str(target)].append(msg) class TestBrokerController(unittest2.TestCase): def setUp(self): self.dbdir = tempfile.mkdtemp() loop = ioloop.IOLoop() self.broker = FakeBroker() dboptions = {'directory': self.dbdir} self.ctrl = BrokerController(self.broker, loop, dboptions=dboptions) self.old_exists = psutil.pid_exists psutil.pid_exists = lambda pid: True def tearDown(self): psutil.pid_exists = self.old_exists Stream.msgs[:] = [] shutil.rmtree(self.dbdir) def test_registration(self): self.ctrl.register_agent({'pid': '1', 'agent_id': '1'}) self.assertTrue('1' in self.ctrl.agents) # make the agent busy before we unregister it self.ctrl.send_to_agent('1', ['something']) self.ctrl.reserve_agents(1, 'run') self.ctrl.unregister_agent('1') self.assertFalse('1' in self.ctrl.agents) def test_reserve_agents(self): self.ctrl.register_agent({'pid': '1', 'agent_id': '1'}) self.ctrl.register_agent({'pid': '2', 'agent_id': '2'}) self.assertRaises(NotEnoughWorkersError, self.ctrl.reserve_agents, 10, 'run') agents = self.ctrl.reserve_agents(2, 'run') agents.sort() self.assertEqual(agents, ['1', '2']) def test_run_and_stop(self): self.ctrl.register_agent({'pid': '1', 'agent_id': '1'}) self.ctrl.register_agent({'pid': '2', 'agent_id': '2'}) self.ctrl.register_agent({'pid': '3', 'agent_id': '3'}) self.ctrl.reserve_agents(1, 'run') self.ctrl.reserve_agents(2, 'run2') runs = self.ctrl.list_runs(None, None).keys() runs.sort() self.assertEqual(['run', 'run2'], runs) self.ctrl.stop_run(['somemsg'], {'run_id': 'run'}) # make sure the STOP cmd made it through msgs = [msg for msg in Stream.msgs if '_STATUS' not in msg[-1]] self.assertEqual(msgs[0][-1], '{"command":"STOP"}') self.assertEqual(len(msgs), 1) def test_db_access(self): self.ctrl.register_agent({'agent_id': '1', 'agent_id': '1'}) self.ctrl.reserve_agents(1, 'run') # metadata data = {'some': 'data'} self.ctrl.save_metadata('run', data) self.assertEqual(self.ctrl.get_metadata(None, {'run_id': 'run'}), data) # save data by agent self.ctrl.save_data('1', data) self.ctrl.flush_db() # we get extra run_id key, set for us self.assertEqual(data['run_id'], 'run') back = self.ctrl.get_data(None, {'run_id': 'run'}) self.assertTrue(back[0]['some'], 'data') back2 = self.ctrl.get_data(None, {'run_id': 'run'}) self.assertEqual(back, back2) def test_compute_observers(self): obs = ['irc', 'loads.observers.irc'] observers = _compute_observers(obs) self.assertEqual(len(observers), 2) self.assertRaises(ImportError, _compute_observers, ['blah']) def test_run(self): msg = ['somedata', '', 'target'] data = {'agents': 1, 'args': {}} # not enough agents self.ctrl.run(msg, data) res = self.broker.msgs.values()[0] self.assertEqual(res, [{'error': 'Not enough agents'}]) # one agent, we're good self.ctrl._agents['agent1'] = {'pid': '1234'} self.ctrl.run(msg, data) runs = self.broker.msgs.values()[0][-1] self.assertEqual(runs['result']['agents'], ['agent1']) def test_run_command(self): msg = ['somedata', '', 'target'] data = {'agents': 1, 'args': {}, 'agent_id': '1'} self.ctrl.run_command('RUN', msg, data) self.ctrl.run_command('AGENT_STATUS', msg, data) runs = self.broker.msgs.values()[0][-1] self.assertEqual(runs['result']['agents'], ['agent1']) msg = {"command": "_STATUS", "args": {}, "agents": 1, "agent_id": "1"} msg = msg.items() msg.sort() self.assertTrue(len(self.broker._backstream.msgs), 1) self.assertTrue(len(self.broker._backstream.msgs[0]), 1) got = self.broker._backstream.msgs[0][-1] got = json.loads(got) got = got.items() got.sort() self.assertEqual(msg, got) def test_clean(self): self.ctrl.agent_timeout = 0.1 self.ctrl._associate('run', ['1', '2']) self.ctrl.clean() self.assertTrue('1' in self.ctrl._agent_times) self.assertTrue('2' in self.ctrl._agent_times) time.sleep(.2) self.ctrl.clean() self.assertEqual(self.ctrl._agent_times, {}) self.ctrl.test_ended('run') <file_sep>/venv/Lib/site-packages/konfig/tests/test_config.py # coding: utf8 # ***** BEGIN LICENSE BLOCK ***** # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, # You can obtain one at http://mozilla.org/MPL/2.0/. # ***** END LICENSE BLOCK ***** import argparse import unittest import tempfile import os from six import StringIO from konfig import Config, SettingsDict _FILE_ONE = """\ [DEFAULT] extends = ${TEMPFILE} [one] foo = bar num = -12 not_a_num = 12abc st = "o=k" lines = 1 two 3 env = some ${__STUFF__} location = ${HERE} [two] a = b """ _FILE_TWO = """\ [one] foo = baz bee = 1 two = "a" [three] more = stuff location = ${HERE} """ _FILE_THREE = """\ [DEFAULT] extends = no-no,no-no-no-no,no-no-no-no,theresnolimit [one] foo = bar """ _FILE_FOUR = """\ [global] foo = bar baz = bawlp [auth] a = b c = d [storage] e = f g = h [multi:once] storage.i = j storage.k = l [multi:thrice] storage.i = jjj storage.k = lll """ _EXTRA = """\ [some] stuff = True [other] thing = ok """ _FILE_OVERRIDE = """\ [DEFAULT] overrides = ${TEMPFILE} [one] foo = bar """ _FILE_ARGS = """\ [circus] httpd = True zmq_endpoint = http://ok [other] stuff = 10.3 thing = bleh [floats] stuff = 10.3 float = 9. again = .3 digits = 10.34 digits2 = .34 [bleh] mew = 10 [mi] log_level = DEBUG log_output = stdout daemon = True pidfile = pid multi = one two three """ class ConfigTestCase(unittest.TestCase): def setUp(self): os.environ['__STUFF__'] = 'stuff' fp, filename = tempfile.mkstemp() f = os.fdopen(fp, 'w') f.write(_FILE_TWO) f.close() os.environ['TEMPFILE'] = filename self.file_one = StringIO(_FILE_ONE) self.file_two = filename self.file_three = StringIO(_FILE_THREE) self.file_override = StringIO(_FILE_OVERRIDE) self.file_args = StringIO(_FILE_ARGS) fp, filename = tempfile.mkstemp() f = os.fdopen(fp, 'w') f.write(_FILE_FOUR) f.close() self.file_four = filename def tearDown(self): if '__STUFF__' in os.environ: del os.environ['__STUFF__'] os.remove(self.file_two) def test_reader(self): config = Config(self.file_one) # values conversion self.assertEquals(config.get('one', 'foo'), 'bar') self.assertEquals(config.get('one', 'num'), -12) self.assertEquals(config.get('one', 'not_a_num'), "12abc") self.assertEquals(config.get('one', 'st'), 'o=k') self.assertEquals(config.get('one', 'lines'), [1, 'two', 3]) self.assertEquals(config.get('one', 'env'), 'some stuff') # getting a map map = config.get_map() self.assertEquals(map['one.foo'], 'bar') map = config.get_map('one') self.assertEquals(map['foo'], 'bar') del os.environ['__STUFF__'] self.assertEquals(config.get('one', 'env'), 'some stuff') # extends self.assertEquals(config.get('three', 'more'), 'stuff') self.assertEquals(config.get('one', 'two'), 'a') def test_nofile(self): # if a user tries to use an inexistant file in extensions, # pops an error self.assertRaises(IOError, Config, self.file_three) def test_settings_dict_copy(self): settings = SettingsDict({"a.one": 1, "a.two": 2, "b.three": 3, "four": 4}) new_settings = settings.copy() self.assertEqual(settings, new_settings) self.assertTrue(isinstance(new_settings, SettingsDict)) def test_settings_dict_getsection(self): settings = SettingsDict({"a.one": 1, "a.two": 2, "b.three": 3, "four": 4}) self.assertEquals(settings.getsection("a"), {"one": 1, "two": 2}) self.assertEquals(settings.getsection("b"), {"three": 3}) self.assertEquals(settings.getsection("c"), {}) self.assertEquals(settings.getsection(""), {"four": 4}) def test_settings_dict_setdefaults(self): settings = SettingsDict({"a.one": 1, "a.two": 2, "b.three": 3, "four": 4}) settings.setdefaults({"a.two": "TWO", "a.five": 5, "new": "key"}) self.assertEquals(settings.getsection("a"), {"one": 1, "two": 2, "five": 5}) self.assertEquals(settings.getsection("b"), {"three": 3}) self.assertEquals(settings.getsection("c"), {}) self.assertEquals(settings.getsection(""), {"four": 4, "new": "key"}) def test_location_interpolation(self): config = Config(self.file_one) # file_one is a StringIO, so it has no location. self.assertEquals(config.get('one', 'location'), '${HERE}') # file_two is a real file, so it has a location. file_two_loc = os.path.dirname(self.file_two) self.assertEquals(config.get('three', 'location'), file_two_loc) def test_override_mode(self): config = Config(self.file_override) self.assertEquals(config.get('one', 'foo'), 'baz') self.assertEquals(config.get('three', 'more'), 'stuff') def test_convert_float(self): config = Config(self.file_args) self.assertEqual(config['floats']['stuff'], 10.3) self.assertEqual(config['floats']['float'], 9.0) self.assertEqual(config['floats']['again'], .3) self.assertEqual(config['floats']['digits'], 10.34) self.assertEqual(config['floats']['digits2'], .34) def test_as_args(self): config = Config(self.file_args) args = config.as_args(strip_prefixes=['circus'], omit_sections=['bleh', 'mi', 'floats'], omit_options=[('other', 'thing')]) wanted = ['--other-stuff', '10.3', '--httpd', '--zmq-endpoint', 'http://ok'] wanted.sort() args.sort() self.assertEqual(args, wanted) args = config.as_args(omit_sections=['bleh', 'mi', 'floats']) wanted = ['--circus-zmq-endpoint', 'http://ok', '--other-thing', 'bleh', '--other-stuff', '10.3', '--circus-httpd'] wanted.sort() args.sort() self.assertEqual(args, wanted) # it also works with an argparse parser parser = argparse.ArgumentParser(description='Run some watchers.') parser.add_argument('config', help='configuration file', nargs='?') parser.add_argument('-L', '--log-level', dest='loglevel') parser.add_argument('--log-output', dest='logoutput') parser.add_argument('--daemon', dest='daemonize', action='store_true') parser.add_argument('--pidfile', dest='pidfile') parser.add_argument('--multi', action='append') args = config.scan_args(parser, strip_prefixes=['mi']) args.sort() wanted = ['--log-level', u'DEBUG', '--log-output', u'stdout', '--daemon', '--pidfile', u'pid', '--multi', 'one', '--multi', 'two', '--multi', 'three'] wanted.sort() self.assertEqual(wanted, args) def test_utf8(self): utf8 = os.path.join(os.path.dirname(__file__), 'utf8.ini') config = Config(utf8) self.assertEqual(config.get('ok', 'yeah'), u'é') <file_sep>/venv/Lib/site-packages/src/loadtest.py RADIO_TYPE = { '': -1, 'gsm': 0, 'cdma': 1, 'umts': 2, 'lte': 3, } from loads.case import TestCase import binascii import json import os import os.path import pickle import random from ConfigParser import SafeConfigParser cfg = SafeConfigParser() cfg.read(os.path.join('src', 'ichnaea.ini')) TOWER_FILE = os.path.join('src', 'tower.pickle') AP_FILE = os.path.join('src', 'ap.pickle') HOST = 'https://' + cfg.get('loadtest', 'WEBAPP_HOST') TESTING_AP_SUBSET = TESTING_CELL_SUBSET = 10000 def random_ap(): for channel in range(1, 12): for frequency in range(1, 5000): for signal in range(-50, 0): key = binascii.b2a_hex(os.urandom(15))[:12] key = ':'.join(key[i:i+2] for i in range(0, len(key), 2)) yield {"key": key, "channel": random.randint(1, 12), "frequency": random.randint(0, 5000), "signal": random.randint(-50, 0)} def random_cell(): for radio in range(0, 3): for mcc in range(1, 5): for mnc in range(1, 5): for cid in range(1, 20000): for lac in range(1, 5): yield {'cid': cid, 'mnc': mnc, 'lac': lac, 'mcc': mcc, 'radio': radio} def generate_data(): if not os.path.isfile(TOWER_FILE) or not os.path.isfile(AP_FILE): tower_data = {} ap_data = {} cell_gen = random_cell() wifi_gen = random_ap() for i in range(TESTING_CELL_SUBSET): lat = random.randint(-900000000, 900000000) / ((10**7)*1.0) lon = random.randint(-900000000, 900000000) / ((10**7)*1.0) tower_data[(lat, lon)] = [] ap_data[(lat, lon)] = [] for x in range(random.randint(1, 20)): rcell = cell_gen.next() data = {"radio": rcell['radio'], "mcc": rcell['mcc'], "mnc": rcell['mnc'], "lac": rcell['lac'], "cid": rcell['cid']} if data not in tower_data[(lat, lon)]: tower_data[(lat, lon)].append(data) for x in range(random.randint(1, 20)): rapp = wifi_gen.next() ap_data[(lat, lon)].append({"key": rapp['key']}) open(TOWER_FILE, 'w').write(pickle.dumps(tower_data)) open(AP_FILE, 'w').write(pickle.dumps(ap_data)) else: ap_data = pickle.load(open(AP_FILE)) tower_data = pickle.load(open(TOWER_FILE)) return tower_data, ap_data class TestIchnaea(TestCase): TOWER_DATA = None AP_DATA = None def setUp(self): if self.TOWER_DATA is None: self.TOWER_DATA, self.AP_DATA = generate_data() self.TOWER_DATA, self.AP_DATA = self.TOWER_DATA.items(), self.AP_DATA.items() def test_submit_cell_data(self): """ This iterates over all generated cell data and submits it in batches """ (lat, lon), all_cell_data = random.choice(self.TOWER_DATA) cells = [] for cell_data in all_cell_data: cells.append({"radio": "umts", "mcc": cell_data['mcc'], "mnc": cell_data['mnc'], "lac": cell_data['lac'], "cid": cell_data['cid']}) json_data = {"items": [{"lat": lat, "lon": lon, "accuracy": 10, "altitude": 1, "altitude_accuracy": 7, "radio": "gsm", "cell": cells}]} blob = json.dumps(json_data) res = self.session.post(HOST+'/v1/submit', blob) self.assertEqual(res.status_code, 204) def test_submit_ap_data(self): """ This iterates over all generated cell data and submits it in batches """ (lat, lon), ap_data = random.choice(self.AP_DATA) jdata = {"items": [{"lat": lat, "lon": lon, "accuracy": 17, "wifi": ap_data}]} blob = json.dumps(jdata) res = self.session.post(HOST+'/v1/submit', blob) self.assertEqual(res.status_code, 204) def test_submit_mixed_data(self): (lat, lon), ap_data = random.choice(self.AP_DATA) cells = [] for cell_data in ap_data: cells.append({"radio": "umts", "mcc": cell_data['mcc'], "mnc": cell_data['mnc'], "lac": cell_data['lac'], "cid": cell_data['cid']}) jdata = {"items": [{"lat": lat, "lon": lon, "accuracy": 10, "altitude": 1, "altitude_accuracy": 7, "radio": "gsm", "cell": cells}]} jdata['items'].append({"lat": lat, "lon": lon, "accuracy": 17, "wifi": ap_data}) blob = json.dumps(jdata) res = self.session.post(HOST+'/v1/submit', blob) self.assertEqual(res.status_code, 204) def test_search_wifi(self): """ Grab 3 keys for a lat lon """ (lat, lon), ap_data = random.choice(self.TOWER_DATA) expected_lat = int(lat * 1000) expected_lon = int(lon * 1000) if len(ap_data) >= 3: wifi_data = ap_data[:3] if random.random() >= 0.5: # Throw in some garbage wifi_data.append({'key': 'aa:aa:aa:aa:aa:aa'}) jdata = json.dumps({'wifi': wifi_data}) res = self.session.post(HOST+'/v1/search?key=test', jdata) self.assertEqual(res.status_code, 200) jdata = json.loads(res.content) if jdata['status'] != 'not_found': actual_lat = int(jdata['lat']*1000) actual_lon = int(jdata['lon']*1000) self.assertEquals(actual_lat, expected_lat) self.assertEquals(actual_lon, expected_lon) def test_search_cell(self): RADIO_MAP = dict([(v, k) for k, v in RADIO_TYPE.items() if k != '']) (lat, lon), all_cells = random.choice(self.TOWER_DATA) expected_lat = int(lat * 1000) expected_lon = int(lon * 1000) query_data = {"radio": '', "cell": []} for cell_data in all_cells: radio_name = RADIO_MAP[cell_data['radio']] if query_data['radio'] == '': query_data['radio'] = radio_name query_data['cell'].append(dict(radio=radio_name, cid=cell_data['cid'], mcc=cell_data['mcc'], mnc=cell_data['mnc'], lac=cell_data['lac'])) jdata = json.dumps(query_data) res = self.session.post(HOST+'/v1/search?key=test', jdata) self.assertEqual(res.status_code, 200) jdata = json.loads(res.content) if jdata['status'] != 'not_found': actual_lat = int(jdata['lat']*1000) actual_lon = int(jdata['lon']*1000) self.assertEquals(actual_lat, expected_lat) self.assertEquals(actual_lon, expected_lon) <file_sep>/venv/Lib/site-packages/loads/tests/test_external_runner.py from unittest import TestCase import time import mock from zmq.eventloop import ioloop from loads.runners import ExternalRunner as ExternalRunner_ from loads.util import json class ExternalRunner(ExternalRunner_): """Subclass the ExternalRunner to be sure we don't use the std output in the tests unless asked especially to do so.""" def register_output(self, output_name): pass class FakeProcess(object): """Mimics the API of subprocess.Popen""" def __init__(self, running=True, options=None): self._running = running self.terminated = False self.options = options self.returncode = 0 def poll(self): if self._running: return None else: return 1 def terminate(self): self.terminated = True class TestExternalRunner(TestCase): def test_step_hits(self): runner = ExternalRunner({'hits': [1, 2, 10]}) self.assertEquals(runner.step_hits, 1) runner._current_step += 1 self.assertEquals(runner.step_hits, 2) runner._current_step += 1 self.assertEquals(runner.step_hits, 10) runner._current_step += 1 self.assertEquals(runner.step_hits, 10) def test_step_users(self): runner = ExternalRunner({'users': [1, 2, 10]}) self.assertEquals(runner.step_users, 1) runner._current_step += 1 self.assertEquals(runner.step_users, 2) runner._current_step += 1 self.assertEquals(runner.step_users, 10) runner._current_step += 1 self.assertEquals(runner.step_users, 10) def test_nb_steps(self): runner = ExternalRunner({'users': [1, 2, 10]}) self.assertEquals(runner._nb_steps, 3) runner = ExternalRunner({'hits': [1, 2, 10]}) self.assertEquals(runner._nb_steps, 3) runner = ExternalRunner({'users': [1, 2, 10], 'hits': [1, 2, 3, 4]}) self.assertEquals(runner._nb_steps, 4) def test_check_processes_waits_for_step_to_complete(self): runner = ExternalRunner() runner._start_next_step = mock.MagicMock() runner._step_started_at = time.time() runner._processes = [FakeProcess(running=False), FakeProcess(running=True)] runner._check_processes() self.assertFalse(runner._start_next_step.called) runner._processes[0]._running = False runner._check_processes() self.assertTrue(runner._start_next_step.called) def test_check_processes_ends_step_if_procs_time_out(self): runner = ExternalRunner({'process_timeout': 2}) runner._start_next_step = mock.MagicMock() runner._step_started_at = time.time() - 5 runner._processes = [FakeProcess(running=False), FakeProcess(running=True)] runner._check_processes() self.assertTrue(runner._start_next_step.called) def test_check_processes_reaps_pending_processes(self): runner = ExternalRunner() runner._start_next_step = mock.MagicMock() runner._step_started_at = time.time() runner._processes_pending_cleanup = [FakeProcess(running=True), FakeProcess(running=False)] runner._check_processes() self.assertEquals(len(runner._processes_pending_cleanup), 1) def test_processes_are_reaped_at_end_of_step(self): runner = ExternalRunner() runner.stop_run = mock.MagicMock() runner._current_step = 1 runner._nb_steps = 1 procs = [FakeProcess(running=True), FakeProcess(running=False)] runner._processes = procs runner._start_next_step() self.assertTrue(procs[0].terminated) self.assertFalse(procs[1].terminated) self.assertEquals(len(runner._processes), 0) self.assertTrue(procs[0] in runner._processes_pending_cleanup) def test_runner_is_reinitialized_on_each_step(self): runner = ExternalRunner() runner.stop_run = mock.MagicMock() runner.spawn_external_runner = mock.MagicMock() runner._current_step = 0 runner._nb_steps = 2 self.assertTrue(runner._step_started_at is None) runner._start_next_step() self.assertFalse(runner.stop_run.called) self.assertEqual(runner._current_step, 1) self.assertTrue(runner._step_started_at is not None) runner._step_started_at = None runner._start_next_step() self.assertFalse(runner.stop_run.called) self.assertEqual(runner._current_step, 2) self.assertTrue(runner._step_started_at is not None) runner._start_next_step() self.assertTrue(runner.stop_run.called) def test_messages_are_relayed(self): runner = ExternalRunner() runner._test_result = mock.MagicMock() data = json.dumps({'data_type': 'foo', 'bar': 'barbaz', 'run_id': 1}) runner._process_result([data, ]) runner.test_result.foo.assertCalledWith(bar='barbaz') def test_execute(self): loop = ioloop.IOLoop() loop.start = mock.Mock() runner = ExternalRunner({'hits': [2], 'users': [2]}, loop) runner._prepare_filesystem = mock.Mock() runner.spawn_external_runner = mock.Mock() runner._execute() self.assertTrue(loop.start.called) self.assertTrue(runner._prepare_filesystem.called) self.assertEquals(runner.spawn_external_runner.call_count, 2) def test_execute_step_users(self): loop = ioloop.IOLoop() loop.start = mock.Mock() runner = ExternalRunner({'hits': [1], 'users': [1, 3, 5]}, loop) runner._prepare_filesystem = mock.Mock() runner.spawn_external_runner = mock.Mock() runner._execute() self.assertTrue(loop.start.called) self.assertTrue(runner._prepare_filesystem.called) self.assertEquals(runner.spawn_external_runner.call_count, 1) runner._start_next_step() self.assertEquals(runner.spawn_external_runner.call_count, 4) runner._start_next_step() self.assertEquals(runner.spawn_external_runner.call_count, 9) @mock.patch('loads.runners.external.subprocess.Popen', lambda *args, **kwargs: FakeProcess(options=(args, kwargs))) def test_spawn_external_runner(self): runner = ExternalRunner({'test_runner': 'foobar', 'hits': [2, 3], 'users': [2, 4], 'fqn': 'baz'}) runner.spawn_external_runner(1) self.assertEquals(len(runner._processes), 1) args, kwargs = runner._processes[0].options self.assertTrue(['foobar'] in args) loads_options = [e for e in kwargs['env'] if e.startswith('LOADS_')] loads_options.sort() self.assertEquals(loads_options, ["LOADS_AGENT_ID", "LOADS_CURRENT_USER", "LOADS_RUN_ID", "LOADS_TOTAL_HITS", "LOADS_TOTAL_USERS", "LOADS_ZMQ_RECEIVER"]) @mock.patch('loads.runners.external.subprocess.Popen', lambda *args, **kwargs: FakeProcess(options=(args, kwargs))) def test_spawn_external_runner_with_duration(self): runner = ExternalRunner({'test_runner': 'foobar', 'duration': 5, 'users': [2, 4], 'fqn': 'baz'}) runner.spawn_external_runner(1) self.assertEquals(len(runner._processes), 1) args, kwargs = runner._processes[0].options self.assertTrue(['foobar'] in args) loads_options = [e for e in kwargs['env'] if e.startswith('LOADS_')] loads_options.sort() self.assertEquals(loads_options, ["LOADS_AGENT_ID", "LOADS_CURRENT_USER", "LOADS_DURATION", "LOADS_RUN_ID", "LOADS_TOTAL_USERS", "LOADS_ZMQ_RECEIVER"]) <file_sep>/venv/Lib/site-packages/loads/results/__init__.py from loads.results.adapter import LoadsTestResult # NOQA from loads.results._unittest import UnitTestTestResult # NOQA from loads.results.zmqrelay import ZMQTestResult # NOQA from loads.results.zmqrelay import ZMQSummarizedTestResult # NOQA from loads.results.base import TestResult # NOQA from loads.results.remote import RemoteTestResult # NOQA <file_sep>/venv/Lib/site-packages/loads/tests/test_redis_db.py import unittest2 import time from zmq.green.eventloop import ioloop try: from loads.db._redis import RedisDB import redis redis.StrictRedis().ping() NO_TEST = False except Exception: NO_TEST = True from loads.tests.test_python_db import ONE_RUN from loads.util import json _KEYS = ['errors:1', 'errors:2', 'data:1', 'data:2', 'counters:1', 'counters:2', 'bcounters:1', 'bcounters:2', 'metadata:1', 'metadata:2', 'urls:1', 'urls:2'] for type_ in ('addSuccess', 'stopTestRun', 'stopTest', 'startTest', 'startTestRun', 'add_hit'): _KEYS.append('count:1:%s' % type_) _KEYS.append('count:2:%s' % type_) @unittest2.skipIf(NO_TEST, 'No redis') class TestRedisDB(unittest2.TestCase): def setUp(self): self.loop = ioloop.IOLoop() self.db = RedisDB(self.loop) self._redis = redis.StrictRedis() def tearDown(self): self.loop.close() for md5 in self._redis.smembers('bcounters:1'): self._redis.delete('bcount:1:%s' % md5) for md5 in self._redis.smembers('bcounters:2'): self._redis.delete('bcount:1:%s' % md5) for url in self._redis.smembers('urls:2'): self._redis.delete('url:2:%s' % url) for url in self._redis.smembers('urls:1'): self._redis.delete('url:1:%s' % url) for key in _KEYS: self._redis.delete(key) self.db.flush() self.db.close() def test_brokerdb(self): self.assertEqual(list(self.db.get_data('swwqqsw')), []) self.assertTrue(self.db.ping()) def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + .5, self.loop.stop) self.loop.start() # let's check if we got the data in the file data = [json.loads(self._redis.lindex('data:1', i)) for i in range(self._redis.llen('data:1'))] data.sort() data2 = [json.loads(self._redis.lindex('data:2', i)) for i in range(self._redis.llen('data:2'))] data2.sort() self.assertEqual(len(data), 14) self.assertEqual(len(data2), 14) counts = self.db.get_counts('1') for type_ in ('addSuccess', 'stopTestRun', 'stopTest', 'startTest', 'startTestRun', 'add_hit'): self.assertEqual(dict(counts)[type_], 2) # we got 12 lines, let's try batching batch = list(self.db.get_data('1', size=2)) self.assertEqual(len(batch), 2) batch = list(self.db.get_data('1', start=2)) self.assertEqual(len(batch), 12) batch = list(self.db.get_data('1', start=2, size=5)) self.assertEqual(len(batch), 5) data3 = list(self.db.get_data('1')) data3.sort() self.assertEqual(data3, data) # filtered data3 = list(self.db.get_data('1', data_type='add_hit')) self.assertEqual(len(data3), 2) # group by res = list(self.db.get_data('1', groupby=True)) self.assertEqual(len(res), 7) self.assertEqual(res[0]['count'], 2) res = list(self.db.get_data('1', data_type='add_hit', groupby=True)) self.assertEqual(res[0]['count'], 2) self.assertTrue('1' in self.db.get_runs()) self.assertTrue('2' in self.db.get_runs()) # len(data) < asked ize batch = list(self.db.get_data('1', start=2, size=5000)) self.assertEqual(len(batch), 12) def test_metadata(self): self.assertEqual(self.db.get_metadata('1'), {}) self.db.save_metadata('1', {'hey': 'ho'}) self.assertEqual(self.db.get_metadata('1'), {'hey': 'ho'}) self.db.update_metadata('1', one=2) meta = self.db.get_metadata('1').items() meta.sort() self.assertEqual(meta, [('hey', 'ho'), ('one', 2)]) def test_get_urls(self): def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + .5, self.loop.stop) self.loop.start() urls = self.db.get_urls('1') self.assertEqual(urls, {'http://127.0.0.1:9200/': 2}) def test_get_errors(self): def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + .5, self.loop.stop) self.loop.start() self.assertTrue(self.db.ping()) errors = list(self.db.get_errors('2')) self.assertEqual(len(errors), 2, errors) errors = list(self.db.get_errors('1')) self.assertEqual(len(errors), 2, errors) <file_sep>/venv/Lib/site-packages/loads/tests/test_test_result.py from unittest2 import TestCase from datetime import datetime, timedelta from mock import Mock from loads.results.base import TestResult, Hit, Test TIME1 = datetime(2013, 5, 14, 0, 51, 8) TIME2 = datetime(2013, 5, 14, 0, 53, 8) _1 = timedelta(seconds=1) _2 = timedelta(seconds=2) _3 = timedelta(seconds=3) class TestTestResult(TestCase): def _get_data(self, url='http://notmyidea.org', method='GET', status=200, started=None, elapsed=None, series=1, user=1, current_hit=1, current_user=1): started = started or TIME1 loads_status = series, user, current_hit, current_user return {'elapsed': elapsed or 0.2000, 'started': started, 'status': status, 'url': url, 'method': method, 'loads_status': loads_status} def test_add_hits(self): test_result = TestResult() test_result.add_hit(**self._get_data()) self.assertEquals(len(test_result.hits), 1) def test_nb_hits(self): test_result = TestResult() test_result.add_hit(**self._get_data()) test_result.add_hit(**self._get_data()) test_result.add_hit(**self._get_data()) self.assertEquals(test_result.nb_hits, 3) self.assertEquals(len(test_result.hits), 3) def test_average_request_time_without_filter(self): test_result = TestResult() test_result.add_hit(**self._get_data(elapsed=_1)) test_result.add_hit(**self._get_data(elapsed=_3)) test_result.add_hit(**self._get_data(elapsed=_2)) test_result.add_hit(**self._get_data(url='http://another-one', elapsed=_3)) self.assertEquals(test_result.average_request_time(), 2.25) def test_average_request_time_with_url_filtering(self): test_result = TestResult() test_result.add_hit(**self._get_data(elapsed=_1)) test_result.add_hit(**self._get_data(elapsed=_3)) test_result.add_hit(**self._get_data(elapsed=_2)) test_result.add_hit(**self._get_data(url='http://another-one', elapsed=_3)) # We want to filter out some URLs avg = test_result.average_request_time('http://notmyidea.org') self.assertEquals(avg, 2.0) avg = test_result.average_request_time('http://another-one') self.assertEquals(avg, 3.0) def test_average_request_time_with_series_filtering(self): test_result = TestResult() test_result.add_hit(**self._get_data(elapsed=_1, series=1)) test_result.add_hit(**self._get_data(elapsed=_3, series=2)) test_result.add_hit(**self._get_data(elapsed=_2, series=3)) test_result.add_hit(**self._get_data(elapsed=_3, series=3)) avg = test_result.average_request_time(series=3) self.assertEquals(avg, 2.5) # try adding another filter on the URL test_result.add_hit(**self._get_data(elapsed=_3, series=3, url='http://another-one')) avg = test_result.average_request_time(series=3, url='http://notmyidea.org') self.assertEquals(avg, 2.5) self.assertEquals(test_result.average_request_time(series=3), 2.6666666666666665) def test_average_request_time_when_no_data(self): test_result = TestResult() self.assertEquals(test_result.average_request_time(), 0) def test_urls(self): test_result = TestResult() test_result.add_hit(**self._get_data()) test_result.add_hit(**self._get_data(url='http://another-one')) urls = set(['http://notmyidea.org', 'http://another-one']) self.assertEquals(test_result.urls, urls) def test_hits_success_rate(self): test_result = TestResult() for x in range(4): test_result.add_hit(**self._get_data(status=200)) test_result.add_hit(**self._get_data(status=400, series=2)) self.assertEquals(test_result.hits_success_rate(), 0.8) self.assertEquals(test_result.hits_success_rate(series=1), 1) def test_requests_per_second(self): test_result = TestResult() for x in range(20): test_result.add_hit(**self._get_data(status=200)) test_result.start_time = TIME1 test_result.stop_time = TIME2 self.assertTrue(0.16 < test_result.requests_per_second() < 0.17) def test_average_test_duration(self): t = Test(TIME1) t.end = TIME2 test_result = TestResult() test_result.tests['toto', 1] = t test_result.tests['tutu', 1] = t self.assertEquals(test_result.average_test_duration(), 120) def test_tests_per_second(self): test_result = TestResult() for x in range(20): test_result.startTest('rainbow', (1, 1, x, 1)) test_result.start_time = TIME1 test_result.stop_time = TIME2 self.assertTrue(0.16 < test_result.tests_per_second() < 0.17) def test_get_tests_filters_series(self): test_result = TestResult() test_result.tests['bacon', 1] = Test(name='bacon', series=1) test_result.tests['egg', 1] = Test(name='egg', series=1) test_result.tests['spam', 2] = Test(name='spam', series=2) self.assertEquals(len(test_result._get_tests(series=1)), 2) def test_get_tests_filters_names(self): test_result = TestResult() test_result.tests['bacon', 1] = Test(name='bacon', series=1) test_result.tests['bacon', 2] = Test(name='bacon', series=2) test_result.tests['spam', 2] = Test(name='spam', series=2) self.assertEquals(len(test_result._get_tests(name='bacon')), 2) def test_get_tests_filters_by_both_fields(self): test_result = TestResult() test_result.tests['bacon', 1] = Test(name='bacon', series=1) test_result.tests['bacon', 2] = Test(name='bacon', series=2) test_result.tests['spam', 2] = Test(name='spam', series=2) self.assertEquals(len(test_result._get_tests(name='bacon', series=2)), 1) def test_test_success_rate_when_not_started(self): # it should be none if no tests had been collected yet. test_result = TestResult() self.assertEquals(1, test_result.test_success_rate()) def test_test_success_rate_is_correct(self): test_result = TestResult() loads_status = (1, 1, 1, 1) test_result.startTest('bacon', loads_status) test_result.addSuccess('bacon', loads_status) test_result.addFailure('bacon', 'A failure', loads_status) self.assertEquals(0.5, test_result.test_success_rate()) def test_duration_is_zero_if_not_started(self): test_result = TestResult() self.assertEquals(test_result.duration, 0) def test_requests_per_second_if_not_started(self): test_result = TestResult() self.assertEquals(test_result.requests_per_second(), 0) def test_get_url_metrics(self): test_result = TestResult() test_result.average_request_time = Mock(return_value=0.5) test_result.hits_success_rate = Mock(return_value=0.9) test_result.add_hit(**self._get_data('http://notmyidea.org')) test_result.add_hit(**self._get_data('http://lolnet.org')) metrics = test_result.get_url_metrics() self.assertEquals(metrics['http://notmyidea.org'], { 'average_request_time': 0.5, 'hits_success_rate': 0.9}) self.assertEquals(metrics['http://lolnet.org'], { 'average_request_time': 0.5, 'hits_success_rate': 0.9}) def test_counters(self): test_result = TestResult() loads_status = (1, 1, 1, 1) test_result.incr_counter('bacon', loads_status, 'sent') test_result.incr_counter('bacon', loads_status, 'sent') test_result.incr_counter('bacon', loads_status, 'received') self.assertEqual(test_result.get_counter('sent'), 2) self.assertEqual(test_result.get_counter('received', test='bacon'), 1) self.assertEqual(test_result.get_counter('bacon', 'xxxx'), 0) self.assertEqual(test_result.get_counter('xxx', 'xxxx'), 0) def test_socket_count(self): test_result = TestResult() # Open 5 sockets for _ in range(5): test_result.socket_open() self.assertEquals(test_result.sockets, 5) self.assertEquals(test_result.opened_sockets, 5) self.assertEquals(test_result.closed_sockets, 0) for _ in range(4): test_result.socket_close() self.assertEquals(test_result.sockets, 1) self.assertEquals(test_result.opened_sockets, 5) self.assertEquals(test_result.closed_sockets, 4) class TestHits(TestCase): def test_loads_status_default_to_None(self): started = None h = Hit(url='http://notmyidea.org', method='GET', status=200, started=started, elapsed=0.0, loads_status=None) self.assertEquals(h.series, None) self.assertEquals(h.user, None) self.assertEquals(h.current_hit, None) def test_loads_status_extract_values(self): started = None h = Hit(url='http://notmyidea.org', method='GET', status=200, started=started, elapsed=0.0, loads_status=(1, 2, 3, 4)) self.assertEquals(h.series, 1) self.assertEquals(h.user, 2) self.assertEquals(h.current_hit, 3) class TestTest(TestCase): def test_duration_is_zero_if_not_finished(self): test = Test() # no end value is provided self.assertEquals(test.duration, 0) def test_duration_is_valid(self): test = Test(TIME1) test.end = TIME2 self.assertEquals(test.duration, 120) def test_success_rate_when_none(self): test = Test() self.assertEquals(test.success_rate, 1) def test_success_rate_when_failures_and_success(self): test = Test() test.success = 2 test.failures.append(0) # Acts as a failure. test.failures.append(0) self.assertEquals(test.success_rate, 0.5) <file_sep>/venv/Lib/site-packages/loads/db/__init__.py from loads.util import logger class BaseDB(object): name = '' options = {} def __init__(self, loop, **kw): if self.name == '': raise ValueError('You need to set a name') self.loop = loop self.params = {} for key, (default, help, type) in self.options.items(): self.params[key] = type(kw.get(key, default)) self._initialize() def _initialize(self): raise NotImplementedError() # # APIs # def save_metadata(self, run_id, metadata): raise NotImplementedError() def get_metadata(self, run_id): raise NotImplementedError() def add(self, data): raise NotImplementedError() def flush(self): raise NotImplementedError() def close(self): raise NotImplementedError() def get_counts(self, run_id): raise NotImplementedError() def get_data(self, run_id): raise NotImplementedError() def get_urls(self, run_id): raise NotImplementedError() def get_database(name='python', loop=None, **options): if name == 'python': from loads.db._python import BrokerDB klass = BrokerDB elif name == 'redis': from loads.db._redis import RedisDB klass = RedisDB else: raise NotImplementedError(name) db = klass(loop, **options) logger.info('Created a %r database connection' % name) return db def get_backends(): backends = [] def _options(backend): return[(name, default, help, type_) for name, (default, help, type_) in backend.options.items()] # pure python from loads.db._python import BrokerDB backends.append((BrokerDB.name, _options(BrokerDB))) try: from loads.db._redis import RedisDB except ImportError: return backends backends.append((RedisDB.name, _options(RedisDB))) return backends <file_sep>/venv/Lib/site-packages/loads/tests/test_case.py import unittest2 import mock from loads.case import TestCase from loads.results import UnitTestTestResult class _MyTestCase(TestCase): def test_one(self): self.incr_counter('meh') def test_two(self): raise AttributeError() def test_three(self): self.assertTrue(False) class TestTestCase(unittest2.TestCase): def test_fake(self): results = UnitTestTestResult() loads_status = 1, 1, 1, 1 case = _MyTestCase('test_one', test_result=results) case(loads_status=loads_status) self.assertEqual(results.testsRun, 1) self.assertEqual(results.wasSuccessful(), True) self.assertEqual(len(results.errors), 0) case = _MyTestCase('test_two', test_result=results) case(loads_status=loads_status) self.assertEqual(results.testsRun, 2) self.assertEqual(results.wasSuccessful(), False) self.assertEqual(len(results.errors), 1) case = _MyTestCase('test_three', test_result=results) case(loads_status=loads_status) self.assertEqual(results.testsRun, 3) self.assertEqual(results.wasSuccessful(), False) self.assertEqual(len(results.errors), 1) self.assertRaises(ValueError, case.app.get, 'boh') def test_config_is_passed(self): test = _MyTestCase('test_one', test_result=mock.sentinel.results, config={}) self.assertEquals(test.config, {}) def test_serverurl_is_overwrited(self): _MyTestCase.server_url = 'http://example.org' try: test = _MyTestCase('test_one', test_result=mock.sentinel.results, config={'server_url': 'http://notmyidea.org'}) self.assertEquals(test.server_url, 'http://notmyidea.org') finally: del _MyTestCase.server_url def test_serverurl_is_not_overwrited_by_none(self): _MyTestCase.server_url = 'http://example.org' try: test = _MyTestCase('test_one', test_result=mock.sentinel.results, config={'server_url': None}) self.assertEquals(test.server_url, 'http://example.org') finally: del _MyTestCase.server_url <file_sep>/venv/Lib/site-packages/loads/users.py import argparse import logging import sys import traceback from datetime import datetime from konfig import Config from loads import __version__ from loads.output import output_list from loads.runners import (LocalRunner, DistributedRunner, ExternalRunner, RUNNERS) from loads.transport.client import Client, TimeoutError from loads.transport.util import (DEFAULT_FRONTEND, DEFAULT_PUBLISHER, DEFAULT_SSH_FRONTEND) from loads.util import logger, set_logger from loads.observers import observers def _detach_question(runner): res = '' while res not in ('s', 'd'): res = raw_input('Do you want to (s)top the test or (d)etach ? ') res = res.lower().strip() if len(res) > 1: res = res[0] if res == 's': runner.cancel() def add_options(items, parser, fmt): """Read the list of items and add options to the parser using the given format. :param items: A list of class objects to iterate over. They should contain at least a name and an options argument. :param parser: The parser object from argparse. :param fmt: The format to use for the option to add to the parser. It should contain {name} and {option}, for instance '--output-{name}-{option}' is a valid format. """ for item in items: for option, value in item.options.items(): help_, type_, default, cli = value if not cli: continue kw = {'help': help_, 'type': type_} if default is not None: kw['default'] = default parser.add_argument(fmt.format(name=item.name, option=option), **kw) def run(args): is_slave = args.get('slave', False) has_agents = args.get('agents', None) attach = args.get('attach', False) if not attach and (is_slave or not has_agents): if args.get('test_runner', None) is not None: runner = ExternalRunner else: runner = LocalRunner try: return runner(args).execute() except Exception: print traceback.format_exc() raise else: if attach: # find out what's running client = Client(args['broker']) try: runs = client.list_runs() except TimeoutError: logger.info("Can't reach the broker at %r" % args['broker']) client.close() return 1 if len(runs) == 0: logger.info("Nothing seem to be running on that broker.") client.close() return 1 elif len(runs) == 1: run_id, run_data = runs.items()[0] __, started = run_data[-1] else: # we need to pick one raise NotImplementedError() counts = client.get_counts(run_id) events = [event for event, hits in counts] if 'stopTestRun' in events: logger.info("This test has just stopped.") client.close() return 1 metadata = client.get_metadata(run_id) logger.debug('Reattaching run %r' % run_id) started = datetime.utcfromtimestamp(started) runner = DistributedRunner(args) try: return runner.attach(run_id, started, counts, metadata) except KeyboardInterrupt: _detach_question(runner) else: logger.debug('Summoning %d agents' % args['agents']) runner = DistributedRunner(args) try: return runner.execute() except KeyboardInterrupt: _detach_question(runner) def _parse(sysargs=None): if sysargs is None: sysargs = sys.argv[1:] parser = argparse.ArgumentParser(description='Runs a load test.') parser.add_argument('fqn', help='Fully Qualified Name of the test', nargs='?') parser.add_argument('--config', help='Configuration file to read', type=str, default=None) parser.add_argument('-u', '--users', help='Number of virtual users', type=str, default='1') parser.add_argument('--test-dir', help='Directory to run the test from', type=str, default=None) parser.add_argument('--python-dep', help='Python (PyPI) dependencies ' 'to install', action='append', default=[]) parser.add_argument('--include-file', help='File(s) to include (needed for the test) ' '- glob-style', action='append', default=[]) parser.add_argument('--ssh', help='SSH tunnel - e.g. user@server:port', type=str, default=None) # loads works with hits or duration group = parser.add_mutually_exclusive_group() group.add_argument('--hits', help='Number of hits per user', type=str, default=None) group.add_argument('-d', '--duration', help='Duration of the test (s)', type=int, default=None) parser.add_argument('--version', action='store_true', default=False, help='Displays Loads version and exits.') parser.add_argument('--test-runner', default=None, help='The path to binary to use as the test runner ' 'when in distributed mode. The default is ' 'this (python) runner') parser.add_argument('--server-url', default=None, help='The URL of the server you want to test. It ' 'will override any value your provided in ' 'the tests for the WebTest client.') parser.add_argument('--observer', action='append', choices=[observer.name for observer in observers], help='Callable that will receive the final results. ' 'Only in distributed mode (runs on the broker)') # # Loading observers options # for observer in observers: prefix = '--observer-%s-' % observer.name for option in observer.options: name = prefix + option['name'] parser.add_argument(name, help=option.get('help'), default=option.get('default'), type=option.get('type'), action=option.get('action')) parser.add_argument('--no-patching', help='Deactivate Gevent monkey patching.', action='store_true', default=False) parser.add_argument('--project-name', help='Project name.', default='N/A') # # distributed options # parser.add_argument('-a', '--agents', help='Number of agents to use.', type=int) parser.add_argument('--zmq-receiver', default=None, help=('ZMQ socket where the runners send the events to' ' (opened on the agent side).')) parser.add_argument('--zmq-publisher', default=DEFAULT_PUBLISHER, help='ZMQ socket where the test results messages ' 'are published.') parser.add_argument('--ping-broker', action='store_true', default=False, help='Pings the broker to get info, display it and ' 'exits.') parser.add_argument('--check-cluster', action='store_true', default=False, help='Runs a test on all agents then exits.') parser.add_argument('--purge-broker', action='store_true', default=False, help='Stops all runs on the broker and exits.') parser.add_argument('-b', '--broker', help='Broker endpoint', default=DEFAULT_FRONTEND) parser.add_argument('--user-id', help='Name of the user who runs the test', type=str, default='undefined') outputs = [st.name for st in output_list()] outputs.sort() parser.add_argument('--quiet', action='store_true', default=False, help='Do not print any log messages.') parser.add_argument('--output', action='append', default=['stdout'], help='The output which will get the results', choices=outputs) parser.add_argument('--attach', help='Reattach to a distributed run', action='store_true', default=False) parser.add_argument('--detach', help='Detach immediatly the current ' 'distributed run', action='store_true', default=False) # Adds the per-output and per-runner options. add_options(RUNNERS, parser, fmt='--{name}-{option}') add_options(output_list(), parser, fmt='--output-{name}-{option}') args = parser.parse_args(sysargs) if args.config is not None: # second pass ! config = Config(args.config) config_args = config.scan_args(parser, strip_prefixes=['loads']) if 'fqn' in config['loads']: config_args += [config['loads']['fqn']] args = parser.parse_args(args=sysargs + config_args) if args.quiet and 'stdout' in args.output: args.output.remove('stdout') return args, parser def main(sysargs=None): # parsing the command line args, parser = _parse(sysargs) # loggers setting wslogger = logging.getLogger('ws4py') ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) wslogger.addHandler(ch) set_logger() if args.version: print(__version__) sys.exit(0) if args.ssh: if args.broker == DEFAULT_FRONTEND: args.broker = DEFAULT_SSH_FRONTEND # control that we have pexpect try: import pexpect # NOQA except ImportError: print("To use --ssh you need pexpect") print("Try: pip install pexpect") sys.exit(0) if args.ping_broker or args.purge_broker or args.check_cluster: client = Client(args.broker, ssh=args.ssh) ping = client.ping() if args.purge_broker: runs = client.purge_broker() if len(runs) == 0: print('Nothing to purge.') else: print('We have %d run(s) right now:' % len(runs)) print('Purged.') sys.exit(0) elif args.ping_broker: print('Broker running on pid %d' % ping['pid']) print('%d agents registered' % len(ping['agents'])) print('endpoints:') for name, location in ping['endpoints'].items(): print(' - %s: %s' % (name, location)) runs = client.list_runs() if len(runs) == 0: print('Nothing is running right now.') else: print('We have %d run(s) right now:' % len(runs)) for run_id, agents in runs.items(): print(' - %s with %d agent(s)' % (run_id, len(agents))) sys.exit(0) elif args.check_cluster: total_agents = len(ping['agents']) runs = client.list_runs().items() busy_agents = sum([len(agents) for run_id, agents in runs]) avail = total_agents - busy_agents if avail == 0: # no agents are available. print('All agents are busy.') sys.exit(0) args.fqn = 'loads.examples.test_blog.TestWebSite.test_health' args.agents = avail args.hits = '1' print('Running a health check on all %d agents' % args.agents) # if we don't have an fqn or we're not attached, something's wrong if args.fqn is None and not args.attach: parser.print_usage() sys.exit(0) args = dict(args._get_kwargs()) res = run(args) return res <file_sep>/venv/Lib/site-packages/loads/examples/test_blog.py import gevent import random import os import time from loads.case import TestCase class TestWebSite(TestCase): server_url = 'http://blog.ziade.org' def test_health(self): self.incr_counter('health-check') def test_volume(self): self.incr_counter('volume-check') # eat the CPU a bit, and do a little bit of fd for i in range(1000): os.urandom(2048) i * (i * i * i) for i in range(10): self.incr_counter('volume-check-%d' % i) gevent.sleep(.2) def test_hold_health(self): self.incr_counter('health-check') gevent.sleep(1.) raise Exception('BOUH') def test_wontdie(self): def _spin(*args): while True: print('WWWWWwwwwwwoooo') import signal signal.signal(signal.SIGTERM, _spin) gevent.sleep(1.) def test_public(self): self.session.get('http://google.com') def test_gonuts(self): root = 'http://ec2-54-244-173-6.us-west-2.compute.amazonaws.com:8282' res = self.session.get(root + '/%s' % random.choice(['here', 'there', 'foo'])) self.assertTrue('nuts' in res.content.lower()) def test_es(self): self.incr_counter('es') self.session.get('http://localhost:9200') def test_hold_ws(self): results = [] def callback(m): self.incr_counter('socket-callback') results.append(m.data) self.incr_counter('socket-created') ws = self.create_ws('ws://localhost:9000/ws', callback=callback) start = time.time() while time.time() - start < 240: gevent.sleep(1) ws.send('x') gevent.sleep(2) ws.receive() gevent.sleep(7) ws.close() def test_from_doc(self): results = [] def callback(m): results.append(m.data) ws = self.create_ws('ws://localhost:9000/ws', protocols=['chat', 'http-only'], callback=callback) ws.send('something') ws.receive() ws.send('happened') ws.receive() while len(results) < 2: time.sleep(.1) self.assertEqual(results, ['something', 'happened']) def test_something(self): res = self.session.get('http://localhost:9000') self.assertTrue('chatform' in res.content) results = [] def callback(m): self.incr_counter('socket-callback') results.append(m.data) self.incr_counter('socket-created') ws = self.create_ws('ws://localhost:9000/ws', protocols=['chat', 'http-only'], callback=callback) one = 'something' + os.urandom(10).encode('hex') two = 'happened' + os.urandom(10).encode('hex') ws.send(one) ws.receive() ws.send(two) ws.receive() self.incr_counter('socket-sent') start = time.time() while one not in results and two not in results: gevent.sleep(0) if time.time() - start > 1: raise AssertionError('Too slow') def _test_will_fail(self): res = self.session.get('http://localhost:9200') self.assertTrue('xFsj' in res.content) def test_will_error(self): raise ValueError("boom") def test_concurrency(self): self.incr_counter('beau') user = 'user%s' % random.randint(1, 200) self.session.auth = (user, 'X' * 10) self.app.server_url = 'http://localhost:9000' res = self.app.get('/auth') # don't use assertIn so this works with 2.6 self.assertTrue(user in res.body) res = self.app.get('/auth') self.assertTrue(user in res.body) self.incr_counter('lavabo') <file_sep>/venv/Lib/site-packages/loads/transport/client.py import threading from Queue import Queue import errno import contextlib import functools import zmq from loads.util import json from loads.transport.exc import TimeoutError, ExecutionError from loads.transport.message import Message from loads.util import logger, pack_include_files from loads.transport.util import (send, recv, DEFAULT_FRONTEND, timed, DEFAULT_TIMEOUT, DEFAULT_TIMEOUT_MOVF, DEFAULT_TIMEOUT_OVF) class Client(object): """Class to drive a Loads cluster. Options: - **frontend**: ZMQ socket to call. - **timeout**: maximum allowed time for a job to run. Defaults to 1s. - **timeout_max_overflow**: maximum timeout overflow allowed. Defaults to 1.5s - **timeout_overflows**: number of times in a row the timeout value can be overflowed per agent. The client keeps a counter of executions that were longer than the regular timeout but shorter than **timeout_max_overflow**. When the number goes over **timeout_overflows**, the usual TimeoutError is raised. When a agent returns on time, the counter is reset. - **ssh** ssh tunnel server. """ def __init__(self, frontend=DEFAULT_FRONTEND, timeout=DEFAULT_TIMEOUT, timeout_max_overflow=DEFAULT_TIMEOUT_MOVF, timeout_overflows=DEFAULT_TIMEOUT_OVF, debug=False, ctx=None, ssh=None): self.ssh = ssh self.kill_ctx = ctx is None self.ctx = ctx or zmq.Context() self.frontend = frontend self.master = self.ctx.socket(zmq.REQ) if ssh: from zmq import ssh ssh.tunnel_connection(self.master, frontend, self.ssh) else: self.master.connect(frontend) self.poller = zmq.Poller() self.poller.register(self.master, zmq.POLLIN) self.timeout = timeout * 1000 self.lock = threading.Lock() self.timeout_max_overflow = timeout_max_overflow * 1000 self.timeout_overflows = timeout_overflows self.debug = debug def execute(self, job, timeout=None, log_exceptions=True): """Runs the job Options: - **job**: Job to be performed. Can be a :class:`Job` instance or a string. If it's a string a :class:`Job` instance will be automatically created out of it. - **timeout**: maximum allowed time for a job to run. If not provided, uses the one defined in the constructor. If the job fails after the timeout, raises a :class:`TimeoutError`. This method is thread-safe and uses a lock. If you need to execute a lot of jobs simultaneously on a broker, use the :class:`Pool` class. """ if timeout is None: timeout = self.timeout_max_overflow try: duration, res = timed(self.debug)(self._execute)(job, timeout) except Exception: # logged, connector replaced. if log_exceptions: logger.exception('Failed to execute the job.') logger.debug(str(job)) raise if 'error' in res: raise ValueError(res['error']) return res['result'] def close(self): self.master.setsockopt(zmq.LINGER, 0) self.master.close() if self.kill_ctx: self.ctx.destroy(0) def _execute(self, job, timeout=None): if not isinstance(job, Message): job = Message(**job) if timeout is None: timeout = self.timeout_max_overflow with self.lock: send(self.master, job.serialize()) while True: try: socks = dict(self.poller.poll(timeout)) break except zmq.ZMQError as e: if e.errno != errno.EINTR: raise if socks.get(self.master) == zmq.POLLIN: data = recv(self.master) return json.loads(data) raise TimeoutError(timeout) def run(self, args, async=True): # let's ask the broker how many agents it has res = self.execute({'command': 'LIST'}) # do we have enough ? agents = len(res) agents_needed = args.get('agents', 1) if len(res) < agents_needed: msg = 'Not enough agents running on that broker. ' msg += 'Asked: %d, Got: %d' % (agents_needed, agents) raise ExecutionError(msg) # let's copy over some files if we need includes = args.get('include_file', []) cmd = {'command': 'CTRL_RUN', 'async': async, 'agents': agents_needed, 'args': args} cmd['filedata'] = pack_include_files(includes) res = self.execute(cmd) logger.debug('Run on its way') logger.debug(res) return res def ping(self, timeout=None, log_exceptions=True): return self.execute({'command': 'PING'}, timeout=timeout, log_exceptions=log_exceptions) def list(self): return self.execute({'command': 'LIST'}) # # commands handled by the broker controller. # def list_runs(self): return self.execute({'command': 'CTRL_LIST_RUNS'}) def get_urls(self, run_id): return self.execute({'command': 'CTRL_GET_URLS', 'run_id': run_id}) def stop_run(self, run_id): return self.execute({'command': 'CTRL_STOP_RUN', 'run_id': run_id}) def get_counts(self, run_id): res = self.execute({'command': 'CTRL_GET_COUNTS', 'run_id': run_id}) # XXX why ? if isinstance(res, dict): return res.items() return res def get_metadata(self, run_id): return self.execute({'command': 'CTRL_GET_METADATA', 'run_id': run_id}) def get_data(self, run_id, **kw): cmd = {'command': 'CTRL_GET_DATA', 'run_id': run_id} cmd.update(kw) return self.execute(cmd) def status(self, agent_id): return self.execute({'command': 'CTRL_AGENT_STATUS', 'agent_id': agent_id}) def stop(self, agent_id): return self.execute({'command': 'CTRL_AGENT_STOP', 'agent_id': agent_id}) def purge_broker(self): runs = self.list_runs() if len(runs) == 0: return runs for run_id, workers in runs.items(): self.stop_run(run_id) return runs class Pool(object): """The pool class manage several :class:`Client` instances and publish the same interface, Options: - **size**: size of the pool. Defaults to 10. - **frontend**: ZMQ socket to call. - **timeout**: maximum allowed time for a job to run. Defaults to 5s. - **timeout_max_overflow**: maximum timeout overflow allowed - **timeout_overflows**: number of times in a row the timeout value can be overflowed per agent. The client keeps a counter of executions that were longer than the regular timeout but shorter than **timeout_max_overflow**. When the number goes over **timeout_overflows**, the usual TimeoutError is raised. When a agent returns on time, the counter is reset. """ def __init__(self, size=10, frontend=DEFAULT_FRONTEND, timeout=DEFAULT_TIMEOUT, timeout_max_overflow=DEFAULT_TIMEOUT_MOVF, timeout_overflows=DEFAULT_TIMEOUT_OVF, debug=False, ctx=None): self._connectors = Queue() self.frontend = frontend self.timeout = timeout self.timeout_overflows = timeout_overflows self.timeout_max_overflow = timeout_max_overflow self.debug = debug self.ctx = ctx or zmq.Context() for i in range(size): self._connectors.put(self._create_client()) def _create_client(self): return Client(self.frontend, self.timeout, self.timeout_max_overflow, self.timeout_overflows, debug=self.debug, ctx=self.ctx) @contextlib.contextmanager def _connector(self, timeout): connector = self._connectors.get(timeout=timeout) try: yield connector except Exception: # connector replaced try: connector.close() finally: self._connectors.put(self._create_client()) raise else: self._connectors.put(connector) def __getattribute__(self, name): if not hasattr(Client, name): return object.__getattribute__(self, name) return functools.partial(self._runner, name) def _runner(self, name, *args, **kw): timeout = kw.get('timeout', self.timeout) with self._connector(timeout) as connector: meth = getattr(connector, name) return meth(*args, **kw) def close(self): self.ctx.destroy(0) <file_sep>/venv/Lib/site-packages/loads/websockets.py import gevent from collections import defaultdict from socket import error from ws4py.client.geventclient import WebSocketClient as _WS _SOCKETS = defaultdict(list) class WebSocketClient(_WS): def __init__(self, url, test_result, protocols=None, extensions=None, callback=None, test_case=None): super(WebSocketClient, self).__init__(url, protocols, extensions) self.callback = callback self._test_result = test_result self.test_case = test_case def received_message(self, m): if self.callback is not None: self.callback(m) if self._test_result is not None: self._test_result.socket_message(len(m.data)) super(WebSocketClient, self).received_message(m) def opened(self): if self._test_result is not None: self._test_result.socket_open() super(WebSocketClient, self).opened() def close(self, code=1000, reason=''): if self.client_terminated: return if self._test_result is not None: self._test_result.socket_close() super(WebSocketClient, self).close(code, reason) def cleanup(greenlet): for sock in _SOCKETS[id(greenlet)]: sock.close() # XXX we get [Errno 48] Address already in use errors o/wise # on very high load (>10k sockets per agent) # # XXX I don't know why yet _TENTATIVE = 200 def create_ws(url, test_result, callback=None, protocols=None, extensions=None, klass=None, test_case=None): for i in range(_TENTATIVE): try: return _create_ws(url, test_result, callback, protocols, extensions, klass, test_case) except error, e: gevent.sleep(0) raise e def _create_ws(url, test_result, callback=None, protocols=None, extensions=None, klass=None, test_case=None): custom_klass = klass is not None if klass is None: klass = WebSocketClient socket = klass(url=url, test_result=test_result, protocols=protocols, extensions=extensions, callback=callback, test_case=test_case) socket.daemon = True if not custom_klass: current = gevent.getcurrent() # XXX sometimes I get greenlets objects, sometime Greenlets... ???? if hasattr(current, 'link'): current.link(cleanup) current_id = id(current) socket.connect() _SOCKETS[current_id].append(socket) return socket <file_sep>/venv/Lib/site-packages/loads/__init__.py import pkg_resources from loads import _patch # NOQA from loads.case import TestCase # NOQA __version__ = pkg_resources.get_distribution('loads').version <file_sep>/venv/Lib/site-packages/loads/runners/local.py import os import subprocess import sys import gevent from loads.util import (resolve_name, logger, pack_include_files, unpack_include_files, set_logger) from loads.results import ZMQTestResult, TestResult, ZMQSummarizedTestResult from loads.output import create_output DEFAULT_LOGFILE = os.path.join('/tmp', 'loads-worker.log') def _compute_arguments(args): """ Read the given :param args: and builds up the total number of runs, the number of hits, duration, users and agents to use. Returns a tuple of (total, hits, duration, users, agents). """ users = args.get('users', '1') if isinstance(users, str): users = users.split(':') users = [int(user) for user in users] hits = args.get('hits') duration = args.get('duration') if duration is None and hits is None: hits = '1' if hits is not None: if not isinstance(hits, list): hits = [int(hit) for hit in hits.split(':')] agents = args.get('agents', 1) # XXX duration based == no total total = 0 if duration is None: for user in users: total += sum([hit * user for hit in hits]) if agents is not None: total *= agents return total, hits, duration, users, agents class LocalRunner(object): """Local tests runner. Runs the tests for the given number of users. This runner can be used in two different modes: - The "classical" mode where the results are collected and passed to the outputs. - The "slave" mode where the results are sent to a ZMQ endpoint and no output is called. """ name = 'local' options = {} def __init__(self, args): self.args = args self.fqn = args.get('fqn') self.test = None self.slave = args.get('slave', False) if self.slave: set_logger(True, logfile=args.get('logfile', DEFAULT_LOGFILE)) self.run_id = None self.project_name = args.get('project_name', 'N/A') self._test_result = None self.outputs = [] self.stop = False (self.total, self.hits, self.duration, self.users, self.agents) = _compute_arguments(args) self.args['hits'] = self.hits self.args['users'] = self.users self.args['agents'] = self.agents self.args['total'] = self.total def _resolve_name(self): if self.fqn is not None: self.test = resolve_name(self.fqn) @property def test_result(self): if self._test_result is None: # If we are in slave mode, set the test_result to a 0mq relay if self.slave: if self.args.get('batched', False): self._test_result = ZMQSummarizedTestResult(self.args) else: self._test_result = ZMQTestResult(self.args) # The normal behavior is to collect the results locally. else: self._test_result = TestResult(args=self.args) return self._test_result def register_output(self, output_name): output = create_output(output_name, self.test_result, self.args) self.outputs.append(output) self.test_result.add_observer(output) def _deploy_python_deps(self, deps=None): # XXX pip hack to avoid uninstall # deploy python deps if asked deps = deps or self.args.get('python_dep', []) if deps == []: return # accepting lists and list of comma-separated values pydeps = [] for dep in deps: dep = [d.strip() for d in dep.split(',')] for d in dep: if d == '': continue pydeps.append(d) build_dir = os.path.join(self.args['test_dir'], 'build-', str(os.getpid())) nil = "lambda *args, **kw: None" code = ["from pip.req import InstallRequirement", "InstallRequirement.uninstall = %s" % nil, "InstallRequirement.commit_uninstall = %s" % nil, "import pip", "pip.main()"] cmd = [sys.executable, '-c', '"%s"' % ';'.join(code), 'install', '-t', 'deps', '-I', '-b', build_dir] for dep in pydeps: logger.debug('Deploying %r in %r' % (dep, os.getcwd())) process = subprocess.Popen(' '.join(cmd + [dep]), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() # XXX see https://github.com/mozilla-services/loads/issues/253 if 'Successfully installed' not in stdout: logger.debug('Failed to deploy %r' % dep) logger.debug('Error: %s' % str(stderr)) logger.debug('Stdout: %s' % str(stdout)) logger.debug("Command used: %s" % str(' '.join(cmd + [dep]))) raise Exception(stderr) else: logger.debug('Successfully deployed %r' % dep) sys.path.insert(0, 'deps') def execute(self): """The method to start the load runner.""" if not self.slave: for output in self.args.get('output', ['stdout']): self.register_output(output) old_location = os.getcwd() self.running = True try: self._execute() if (not self.slave and self.test_result.nb_errors + self.test_result.nb_failures): return 1 return 0 except Exception: self.test_result.addError('XXX', sys.exc_info(), (0, 0, 0, 0)) raise finally: self.test_result.close() self.running = False os.chdir(old_location) def _run(self, num, user): """This method is actually spawned by gevent so there is more than one actual test suite running in parallel. """ # creating the test case instance test = self.test.im_class(test_name=self.test.__name__, test_result=self.test_result, config=self.args) if self.stop: return if self.duration is None: for hit in self.hits: gevent.sleep(0) loads_status = list(self.args.get('loads_status', (hit, user, 0, num))) for current_hit in range(hit): loads_status[2] = current_hit + 1 test(loads_status=list(loads_status)) gevent.sleep(0) else: def spawn_test(): loads_status = list(self.args.get('loads_status', (0, user, 0, num))) while True: loads_status[2] += 1 test(loads_status=loads_status) gevent.sleep(0) spawned_test = gevent.spawn(spawn_test) timer = gevent.Timeout(self.duration).start() try: spawned_test.join(timeout=timer) except (gevent.Timeout, KeyboardInterrupt): pass def _prepare_filesystem(self): test_dir = self.args.get('test_dir') # in standalone mode we take care of creating # the files if test_dir is not None: if not self.slave: test_dir = test_dir + '-%d' % os.getpid() if not os.path.exists(test_dir): os.makedirs(test_dir) # Copy over the include files, if any. # It's inefficient to package them up and then immediately # unpackage them, but this has the advantage of ensuring # consistency with how it's done in the distributed case. includes = self.args.get('include_file', []) logger.debug("unpacking %s" % str(includes)) filedata = pack_include_files(includes) unpack_include_files(filedata, test_dir) # change to execution directory if asked logger.debug('chdir %r' % test_dir) os.chdir(test_dir) def _execute(self): """Spawn all the tests needed and wait for them to finish. """ self._prepare_filesystem() self._deploy_python_deps() self._run_python_tests() def _run_python_tests(self): # resolve the name now logger.debug('Resolving the test fqn') self._resolve_name() logger.debug('Ready to spawn greenlets for testing.') agent_id = self.args.get('agent_id') exception = None try: if not self.args.get('no_patching', False): logger.debug('Gevent monkey patches the stdlib') from gevent import monkey monkey.patch_all() if not hasattr(self.test, 'im_class'): raise ValueError("The FQN of the test doesn't point to a test " "class (%s)." % self.test) gevent.spawn(self._grefresh) if not self.args.get('externally_managed'): self.test_result.startTestRun(agent_id) for user in self.users: if self.stop: break group = [] for i in range(user): group.append(gevent.spawn(self._run, i, user)) gevent.sleep(0) gevent.joinall(group) gevent.sleep(0) if not self.args.get('externally_managed'): self.test_result.stopTestRun(agent_id) except KeyboardInterrupt: pass except Exception as e: exception = e finally: logger.debug('Test over - cleaning up') # be sure we flush the outputs that need it. # but do it only if we are in "normal" mode try: if not self.slave: self.flush() else: # in slave mode, be sure to close the zmq relay. self.test_result.close() finally: if exception: logger.debug('We had an exception, re-raising it') raise exception def flush(self): for output in self.outputs: if hasattr(output, 'flush'): output.flush() def refresh(self): if not self.stop: for output in self.outputs: if hasattr(output, 'refresh'): output.refresh(self.run_id) def _grefresh(self): self.refresh() if not self.stop: gevent.spawn_later(.1, self._grefresh) <file_sep>/venv/Lib/site-packages/loads/runners/distributed.py import json import zmq.green as zmq from zmq.green.eventloop import ioloop, zmqstream from loads.runners.local import LocalRunner from loads.transport.util import DEFAULT_PUBLISHER, DEFAULT_SSH_PUBLISHER from loads.util import logger, split_endpoint from loads.results import TestResult, RemoteTestResult from loads.transport.client import Client class DistributedRunner(LocalRunner): """Test runner distributing the load on a cluster of agents, collecting the results via a ZMQ stream. The runner need to have agents already up and running. It will send them commands trough the ZMQ pipeline and get back their results, which will be in turn sent to the local test_result object. """ name = 'distributed' options = {} def __init__(self, args): super(DistributedRunner, self).__init__(args) self.ssh = args.get('ssh') self.run_id = None self._stopped_agents = 0 self._nb_agents = args.get('agents') # socket where the results are published self.context = zmq.Context() self.sub = self.context.socket(zmq.SUB) self.sub.setsockopt(zmq.SUBSCRIBE, '') self.sub.set_hwm(8096 * 10) self.sub.setsockopt(zmq.LINGER, -1) self.zmq_publisher = None self.zstream = None # io loop self.loop = ioloop.IOLoop() self.zstream = zmqstream.ZMQStream(self.sub, self.loop) self.zstream.on_recv(self._recv_result) self.agents = [] self._client = None self.refresh_rate = 100 @property def client(self): if self._client is None: self._client = Client(self.args['broker'], ssh=self.args.get('ssh')) return self._client @property def test_result(self): if self._test_result is None: if self.args.get('attach', False): self._test_result = RemoteTestResult(args=self.args) self.refresh_rate = 500 else: self._test_result = TestResult(args=self.args) # we want to reattach the outputs from Local for output in self.outputs: self._test_result.add_observer(output) return self._test_result def _recv_result(self, msg): """When we receive some data from zeromq, send it to the test_result for later use.""" self.loop.add_callback(self._process_result, msg) def _process_result(self, msg): try: data = json.loads(msg[0]) data_type = data.pop('data_type') run_id = data.pop('run_id', None) if hasattr(self.test_result, data_type): method = getattr(self.test_result, data_type) method(**data) agent_stopped = (data_type == 'batch' and 'stopTestRun' in data['counts']) agent_stopped = agent_stopped or data_type == 'stopTestRun' if agent_stopped: # Make sure all the agents are finished before stopping the # loop. self._stopped_agents += 1 if self._stopped_agents == self._nb_agents: self.test_result.sync(self.run_id) self.loop.stop() elif data_type == 'run-finished': if run_id == self.run_id: self.test_result.sync(self.run_id) self.loop.stop() except Exception: self.loop.stop() raise def _attach_publisher(self): zmq_publisher = self.args.get('zmq_publisher') if zmq_publisher in (None, DEFAULT_PUBLISHER): # if this option is not provided by the command line, # we ask the broker about it res = self.client.ping() endpoint = res['endpoints']['publisher'] if endpoint.startswith('ipc'): # IPC - lets hope we're on the same box zmq_publisher = endpoint elif endpoint.startswith('tcp'): # TCP, let's see what IP & port we have splitted = split_endpoint(endpoint) if splitted['ip'] == '0.0.0.0': # let's use the broker ip broker = self.args['broker'] broker_ip = split_endpoint(broker)['ip'] zmq_publisher = 'tcp://%s:%d' % (broker_ip, splitted['port']) else: # let's use the original ip zmq_publisher = endpoint else: zmq_publisher = DEFAULT_PUBLISHER if not self.ssh: self.sub.connect(zmq_publisher) else: if zmq_publisher == DEFAULT_PUBLISHER: zmq_publisher = DEFAULT_SSH_PUBLISHER from zmq import ssh ssh.tunnel_connection(self.sub, zmq_publisher, self.ssh) self.zstream = zmqstream.ZMQStream(self.sub, self.loop) self.zstream.on_recv(self._recv_result) self.zmq_publisher = zmq_publisher def _execute(self): # calling the clients now self.test_result.startTestRun() detached = self.args.get('detach') if not detached: cb = ioloop.PeriodicCallback(self.refresh, self.refresh_rate, self.loop) cb.start() try: self._attach_publisher() logger.debug('Calling the broker...') res = self.client.run(self.args) self.run_id = res['run_id'] self.agents = res['agents'] if not detached: logger.debug('Waiting for results') self.loop.start() else: logger.info('Detached. run --attach to reattach') finally: if not detached: # end.. cb.stop() self.test_result.stopTestRun() self.context.destroy() self.flush() def cancel(self): self.client.stop_run(self.run_id) def attach(self, run_id, started, counts, args): self._attach_publisher() self.test_result.args = args self.test_result.startTestRun(when=started) self.test_result.set_counts(counts) for output in self.outputs: output.args = args cb = ioloop.PeriodicCallback(self.refresh, self.refresh_rate, self.loop) cb.start() self.run_id = run_id try: self.loop.start() finally: # end cb.stop() self.test_result.stopTestRun() self.context.destroy() self.flush() <file_sep>/venv/Lib/site-packages/loads/tests/test_db.py import unittest2 from loads.db import get_backends, get_database, BaseDB try: import redis NO_REDIS_LIB = False try: redis.StrictRedis().ping() NO_REDIS_RUNNING = False except Exception: NO_REDIS_RUNNING = True except ImportError: NO_REDIS_RUNNING = NO_REDIS_LIB = True class TestDB(unittest2.TestCase): def test_get_backends(self): backends = get_backends() if NO_REDIS_LIB: self.assertEqual(len(backends), 1) else: self.assertEqual(len(backends), 2) def test_get_database(self): db = get_database('python') self.assertTrue(db.ping()) if not NO_REDIS_RUNNING: db = get_database('redis') self.assertTrue(db.ping()) self.assertRaises(NotImplementedError, get_database, 'cobol') def test_basedb(self): self.assertRaises(ValueError, BaseDB, None) class MyDB(BaseDB): name = 'my' self.assertRaises(NotImplementedError, MyDB, None) class MyDB2(BaseDB): name = 'my' def _initialize(self): pass db2 = MyDB2(None) self.assertRaises(NotImplementedError, db2.save_metadata, None, None) self.assertRaises(NotImplementedError, db2.get_metadata, None) self.assertRaises(NotImplementedError, db2.add, None) self.assertRaises(NotImplementedError, db2.flush) self.assertRaises(NotImplementedError, db2.close) self.assertRaises(NotImplementedError, db2.get_counts, None) self.assertRaises(NotImplementedError, db2.get_data, None) self.assertRaises(NotImplementedError, db2.get_urls, None) self.assertRaises(NotImplementedError, db2.flush) <file_sep>/venv/Lib/site-packages/loads/output/_funkload.py from calendar import timegm from datetime import datetime from itertools import chain import platform import os.path from traceback import format_tb from xml.sax.saxutils import quoteattr from loads import __version__ from loads.util import total_seconds from loads.results.base import Test # This may be compatible with earlier versions of funkload, but that's the # one that had been used when writing this output class. FUNKLOAD_VERSION = "1.17.0" LOADS_EXPORT_VERSION = "{0}-0.2".format(__version__) # XML templates _HEADER = '<funkload version="{version}" time="{time}">' _FOOTER = '</funkload>' _CONFIG = '<config key="{key}" value="{value}"/>' _RESPONSE = '''\ <response cycle="{cycle:03}" cvus="{cvus:03}" thread="{thread:03}" suite="" name="" step="001" number="001" type="{method}" result="Successful" url="{url}" code="{status}" description="" time="{started}" duration="{elapsed}" />''' _RESULT = '''\ <testResult cycle="{cycle:03}" cvus="{cvus:03}" thread="{thread:03}" suite="{suite}" name="{name}" time="{time}" result="{result}" steps="1" duration="{duration}" connection_duration="0" requests="{requests}" pages="{requests}" xmlrpc="0" redirects="0" images="0" links="0" {traceback}/>''' class FunkloadOutput(object): """Generates outputs in the (undocumented) Funkload XML format. These reports can then be used with the the `fl-build-report <filename>` command-line tool to generate reports about the load. """ name = 'funkload' options = {'filename': ('Full path where to output funkload XML files', str, 'funkload-report.xml', True)} def __init__(self, test_results, args): self.filename = args['output_funkload_filename'] self.args = args self._test_results = test_results self.tests = {} self.current_tests = {} self.nodes = [] self.test_url = args.get('server_url', '') self.entries = [] def _get_key(self, test, loads_status, agent_id): return tuple((str(test),) + tuple(loads_status) + (agent_id,)) def _get_test(self, test, loads_status, agent_id): key = self._get_key(test, loads_status, agent_id) if key not in self.tests: self.startTest(test, loads_status, agent_id) return self.tests[key] def _get_current_test(self, loads_status, agent_id): # The current 'active' test for this status and agent key = self._get_key(None, loads_status, agent_id) return self.current_tests.get(key) # # Observer API # def push(self, called_method, *args, **kwargs): # Delegate to per-called-method handlers m = getattr(self, called_method, None) if m is not None: m(*args, **kwargs) def flush(self, _FOOTER=_FOOTER): self.nodes.append(_FOOTER) with open(self.filename, 'w') as f: for node in self.nodes: f.write(node + '\n') # # Push handlers # def startTestRun(self, agent_id=None, when=None, _HEADER=_HEADER, _CONFIG=_CONFIG): self.start_time = when or datetime.utcnow() cycles = self.args['users'] or ['1'] if isinstance(cycles, str): cycles = cycles.split(':') self.cycle_ids = dict((c, i) for i, c in enumerate(cycles)) module, class_, method = self.args['fqn'].rsplit('.', 2) config = { 'id': method, 'class': class_, 'class_description': 'Loads Funkload export {0}'.format( LOADS_EXPORT_VERSION), 'cycle_time': '0', # until issue #99 is resolved 'cycles': cycles, 'description': 'No test description', 'duration': self.args['duration'] or '1', 'log_xml': os.path.abspath(self.filename), 'method': method, 'module': module, 'node': platform.node(), 'python_version': platform.python_version(), 'server_url': self.test_url, # Maybe we can drop the following ones; depending if # funkload complains when they're not present # (but they don't really mean anything to loads.) 'class_title': '', 'configuration_file': '', 'sleep_time': '0.0', 'sleep_time_max': '0.0', 'sleep_time_min': '0.0', 'startup_delay': '0.0', } self.nodes.append(_HEADER.format( version=FUNKLOAD_VERSION, time=self.start_time.isoformat())) for key, value in config.items(): if value is not None: self.nodes.append(_CONFIG.format(key=key, value=value)) def add_hit(self, loads_status=None, started=0, elapsed=0, url='', method="GET", status=200, agent_id=None, _RESPONSE=_RESPONSE): """Generates a funkload XML item with the data coming from the request. Adds the new XML node to the list of nodes for this output. """ hit, user, current_hit, current_user = loads_status self.nodes.append(_RESPONSE.format( cycle=self.cycle_ids[user], cvus=user, method=method.lower(), url=url, status=status, thread=current_user, started=timegm(started.timetuple()), elapsed=total_seconds(elapsed))) test = self._get_current_test(loads_status, agent_id) if test: test.incr_counter('__funkload_requests') def addSuccess(self, test, loads_status, agent_id=None): test = self._get_test(test, loads_status, agent_id) test.success += 1 def addError(self, test, exc_info, loads_status, agent_id=None): test = self._get_test(test, loads_status, agent_id) test.errors.append(exc_info[2]) def addFailure(self, test, exc_info, loads_status, agent_id=None): test = self._get_test(test, loads_status, agent_id) test.failures.append(exc_info[2]) def startTest(self, test, loads_status=None, agent_id=None): hit, user = loads_status[:2] key = self._get_key(test, loads_status, agent_id) current = self._get_key(None, loads_status, agent_id) t = Test(name=test, hit=hit, user=user) # also record the *current* test for the given loads_status self.current_tests[current] = self.tests[key] = t def stopTest(self, test, loads_status=None, agent_id=None, _RESULT=_RESULT): """Generates funkload XML items with the data concerning test results. Adds new XML nodes to the list of nodes for this output. """ hit, user, current_hit, current_user = loads_status t = self._get_test(test, loads_status, agent_id) t.end = datetime.utcnow() try: requests = t.get_counter('__funkload_requests') except KeyError: requests = 0 per_test = { 'cycle': self.cycle_ids[user], 'cvus': user, 'duration': t.duration, 'name': test._testMethodName, 'suite': test.__class__.__name__, 'thread': current_user, 'time': timegm(t.start.timetuple()), 'requests': requests, } for traceback in chain(t.errors, t.failures): traceback = 'traceback={0}'.format( quoteattr('\n'.join(format_tb(traceback)))) self.nodes.append(_RESULT.format( result='Failure', traceback=traceback, **per_test)) for _ in xrange(t.success): self.nodes.append(_RESULT.format( result='Successful', traceback='', **per_test)) <file_sep>/venv/Lib/site-packages/loads/tests/test_main.py import unittest2 import os from StringIO import StringIO import sys import contextlib import re import mock from unittest2 import skipIf from loads.main import main, add_options from loads.tests.test_functional import start_servers, stop_servers from loads.tests.support import hush from loads import __version__ config = os.path.join(os.path.dirname(__file__), 'config.ini') _WANTED = """\ Broker running on pid [0-9]+ 10 agents registered - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* - [0-9]+ on .* endpoints: - backend: ipc:///tmp/loads-back.ipc - frontend: ipc:///tmp/loads-front.ipc - heartbeat: tcp://0.0.0.0:9876 - publisher: ipc:///tmp/loads-publisher.ipc - receiver: ipc:///tmp/loads-broker-receiver.ipc - register: ipc:///tmp/loads-reg.ipc We have 1 run\(s\) right now: - .* with 10 agent\(s\)""" @skipIf('TRAVIS' in os.environ, 'not running this on Travis') class TestRunner(unittest2.TestCase): @classmethod def setUpClass(cls): if 'TRAVIS' in os.environ: return start_servers() @classmethod def tearDownClass(cls): if 'TRAVIS' in os.environ: return stop_servers() @hush def test_config(self): args = ['--config', config, 'loads.examples.test_blog.TestWebSite.test_something', '--quiet'] main(args) @contextlib.contextmanager def capture_stdout(self): output = StringIO() old = sys.stdout sys.stdout = output try: yield output except SystemExit: pass finally: sys.stdout = old output.seek(0) @hush def test_check_cluster(self): args = ['--check-cluster'] with self.capture_stdout() as output: main(args) output = output.read().strip() self.assertTrue('Success: 10' in output, output) def test_help(self): args = [] with self.capture_stdout() as output: main(args) self.assertTrue(output.read().strip().startswith('usage')) def test_version(self): args = ['--version'] with self.capture_stdout() as output: main(args) self.assertEqual(output.read().strip(), __version__) def test_purge_broker(self): args = ['--purge-broker'] with self.capture_stdout() as output: main(args) wanted = ['Nothing to purge.', 'We have 1 run(s) right now:\nPurged.'] self.assertTrue(output.read().strip() in wanted) def test_ping_broker(self): args = ['--ping-broker'] with self.capture_stdout() as output: main(args) output = output.read().strip() self.assertTrue(re.search(_WANTED, output) is not None, output) def test_add_options(self): class ClassA(object): name = 'classa' options = {'foo': ('helptext', int, 2, True)} class ClassB(object): name = 'classb' options = {'bar': ('helptext', str, 'bar', True)} parser = mock.MagicMock() items = [ClassA, ClassB] add_options(items, parser, fmt='--test-{name}-{option}') self.assertEquals(parser.add_argument.mock_calls[0], mock.call('--test-classa-foo', default=2, type=int, help='helptext')) self.assertEquals(parser.add_argument.mock_calls[1], mock.call('--test-classb-bar', default='bar', type=str, help='helptext')) <file_sep>/venv/Lib/site-packages/loads/tests/test_websockets.py import unittest2 from loads.websockets import WebSocketClient, create_ws class TestWebSockets(unittest2.TestCase): def test_custom_klass(self): class WS(WebSocketClient): data = [] def received_message(self, m): super(WS, self).received_message(m) self.data.append(m) ws = create_ws('ws://example.com', None, None, klass=WS) self.assertTrue(isinstance(ws, WS)) <file_sep>/venv/Lib/site-packages/loads/tests/test_message.py import unittest2 from loads.transport.message import Message from loads.util import json class TestMessage(unittest2.TestCase): def test_message(self): data = {'1': 2} msg = Message(**data) self.assertEquals(msg.serialize(), json.dumps(data)) msg = Message.load_from_string(json.dumps(data)) self.assertEquals(msg.serialize(), json.dumps(data)) <file_sep>/venv/Lib/site-packages/loads/db/_python.py import zlib import os from collections import defaultdict from gevent.queue import Queue from zmq.green.eventloop import ioloop from loads.db import BaseDB from loads.util import json, dict_hash DEFAULT_DBDIR = os.path.join('/tmp', 'loads') ZLIB_START = 'x\x9c' ZLIB_END = 'x\x8c' GIGA = 1024. * 1024. * 1024. def read_zfile(filename): remaining = '' with open(filename, 'rb') as f: while True: data = remaining + f.read(1024) if not data: raise StopIteration() size = len(data) pos = 0 while pos < size: # grabbing a record rstart = data.find(ZLIB_START, pos) rend = data.find(ZLIB_END, rstart+1) if rend == -1 or rstart == rend: # not a full record break line = data[rstart:rend] if not line: break try: line = zlib.decompress(line) except zlib.error: raise ValueError(line) record = json.loads(line) yield record, line pos = rend + len(ZLIB_END) if pos < size: remaining = data[pos:] else: remaining = '' def get_dir_size(path): """Returns directory size in gigabytes """ total_size = 0. for dirpath, dirnames, filenames in os.walk(path): for file_ in filenames: fp = os.path.join(dirpath, file_) total_size += os.path.getsize(fp) return total_size / GIGA class BrokerDB(BaseDB): """A simple DB that's synced on disc eventually """ name = 'python' options = {'directory': (DEFAULT_DBDIR, 'DB path.', str), 'sync_delay': (2000, 'Sync delay', int), 'max_size': (-1, 'Max Size in Gigabytes', float)} def _initialize(self): self.directory = self.params['directory'] self.sync_delay = self.params['sync_delay'] self.max_size = self.params['max_size'] if not os.path.exists(self.directory): os.makedirs(self.directory) self._buffer = defaultdict(Queue) self._errors = defaultdict(Queue) self._callback = ioloop.PeriodicCallback(self.flush, self.sync_delay, self.loop) self._callback.start() self._counts = defaultdict(lambda: defaultdict(int)) self._dirty = False self._metadata = defaultdict(dict) self._urls = defaultdict(lambda: defaultdict(int)) self._headers = defaultdict(dict) self._key_headers = defaultdict(dict) def ping(self): return True def _update_headers(self, run_id): filename = os.path.join(self.directory, run_id + '-headers.json') if os.path.exists(filename): with open(filename) as f: headers = json.load(f) # int keys are saved as strings in json # so we need to convert them back headers = dict([(int(key), value) for key, value in headers.items()]) self._headers[run_id].update(headers) for key, value in self._headers[run_id].items(): self._key_headers[run_id][value] = key def _compress_headers(self, run_id, data): result = {} headers = self._headers[run_id] for key, value in data.items(): if key not in self._key_headers[run_id]: self._dirty = True compressed_keys = headers.keys() if len(compressed_keys) == 0: next_compressed_key = 0 else: compressed_keys.sort() next_compressed_key = compressed_keys[-1] + 1 self._headers[run_id][next_compressed_key] = key self._key_headers[run_id][key] = next_compressed_key key = next_compressed_key else: key = self._key_headers[run_id][key] result[key] = value return result def _uncompress_headers(self, run_id, data): result = {} for key, value in data.items(): result[self._headers[run_id][int(key)]] = value return result def update_metadata(self, run_id, **metadata): existing = self._metadata.get(run_id, {}) # reload existing file if any if existing == {}: filename = os.path.join(self.directory, run_id + '-metadata.json') if os.path.exists(filename): with open(filename) as f: existing = json.load(f) existing.update(metadata) self._dirty = True self._metadata[run_id] = existing def save_metadata(self, run_id, metadata): self._metadata[run_id] = metadata self._dirty = True def get_metadata(self, run_id): self.flush() filename = os.path.join(self.directory, run_id + '-metadata.json') if not os.path.exists(filename): return {} with open(filename) as f: return json.load(f) def add(self, data): run_id = data['run_id'] self.update_metadata(run_id, has_data=1) data_type = data.get('data_type', 'unknown') self._counts[run_id][data_type] += data.get('size', 1) self._buffer[run_id].put(dict(data)) if 'url' in data: self._urls[run_id][data['url']] += 1 if data_type == 'addError': self._errors[run_id].put(dict(data)) self._dirty = True def _dump_queue(self, run_id, queue, filename, compress=True): # lines qsize = queue.qsize() if qsize == 0: return if run_id is None: run_id = 'unknown' with open(filename, 'ab+') as f: for i in range(qsize): line = queue.get() if 'run_id' not in line: line['run_id'] = run_id line = self._compress_headers(run_id, line) if compress: f.write(zlib.compress(json.dumps(line)) + ZLIB_END) else: f.write(json.dumps(line) + '\n') def prepare_run(self): if self.max_size == -1: return current_size = get_dir_size(self.directory) runs = self.get_runs() if current_size >= self.max_size: # we need to wipe up older runs until we have enough space for run_id in runs: self.delete_run(run_id) if get_dir_size(self.directory) < self.max_size: return def delete_run(self, run_id): for suffix in ('metadata', 'errors', 'db', 'counts', 'urls', 'headers'): filename = os.path.join(self.directory, '%s-%s.json' % (run_id, suffix)) if os.path.exists(filename): os.remove(filename) for mapping in (self._counts, self._metadata, self._urls, self._headers, self._key_headers): if run_id in mapping: del mapping[run_id] def is_summarized(self, run_id): db = os.path.join(self.directory, '%s-db.json' % run_id) meta = os.path.join(self.directory, '%s-metadata.json' % run_id) return os.path.exists(meta) and not os.path.exists(db) def summarize_run(self, run_id): # we just remove the -db file # XXX in the future we'll want to move it to another # storage so we keep the details. filename = os.path.join(self.directory, '%s-db.json' % run_id) if os.path.exists(filename): os.remove(filename) def flush(self): if not self._dirty: return # saving metadata files for run_id in self._metadata: # metadata filename = os.path.join(self.directory, run_id + '-metadata.json') with open(filename, 'w') as f: json.dump(self._metadata[run_id], f) if len(self._buffer) == 0: return for run_id, queue in self._errors.items(): # error lines filename = os.path.join(self.directory, run_id + '-errors.json') self._dump_queue(run_id, queue, filename, compress=False) for run_id, queue in self._buffer.items(): # all lines filename = os.path.join(self.directory, run_id + '-db.json') self._dump_queue(run_id, queue, filename) # counts filename = os.path.join(self.directory, run_id + '-counts.json') counts = dict(self._counts[run_id]) with open(filename, 'w') as f: json.dump(counts, f) # urls filename = os.path.join(self.directory, run_id + '-urls.json') with open(filename, 'w') as f: json.dump(self._urls[run_id], f) # headers filename = os.path.join(self.directory, run_id + '-headers.json') with open(filename, 'w') as f: json.dump(self._headers[run_id], f) self._dirty = False def close(self): self._callback.stop() def get_urls(self, run_id): self.flush() filename = os.path.join(self.directory, run_id + '-urls.json') if not os.path.exists(filename): return {} with open(filename) as f: return json.load(f) def get_counts(self, run_id): self.flush() filename = os.path.join(self.directory, run_id + '-counts.json') if not os.path.exists(filename): return {} with open(filename) as f: return json.load(f) def get_runs(self): runs = [] for path in os.listdir(self.directory): if path.endswith('-metadata.json'): creat_ = os.stat(os.path.join(self.directory, path)).st_mtime runs.append((creat_, path)) # from older to newer... runs.sort() return [path[:-len('-metadata.json')] for created, path in runs] def _batch(self, filename, start=None, size=None, filter=None, run_id=None, decompress=True): if start is not None and size is not None: end = start + size else: end = None # XXX suboptimal iterates until start is reached. sent = 0 current = 0 def _reader(): if decompress: for record, line in read_zfile(filename): yield record, line else: with open(filename, 'rb') as f: for line in f: yield json.loads(line), line for current, (record, line) in enumerate(_reader()): record = self._uncompress_headers(run_id, record) # filtering if filter is not None and filter(record): continue if start is not None and current < start: continue elif end is not None and current > end or sent == size: raise StopIteration() yield record sent += 1 def get_errors(self, run_id, start=None, size=None): if size is not None and start is None: start = 0 self.flush() filename = os.path.join(self.directory, run_id + '-errors.json') if not os.path.exists(filename): raise StopIteration() self._update_headers(run_id) for data in self._batch(filename, start, size, run_id=run_id, decompress=False): yield data def get_data(self, run_id, data_type=None, groupby=False, start=None, size=None): if size is not None and start is None: start = 0 self.flush() filename = os.path.join(self.directory, run_id + '-db.json') if not os.path.exists(filename): raise StopIteration() self._update_headers(run_id) def _filtered(data): return (data_type is not None and data_type != data.get('data_type')) if not groupby: for data in self._batch(filename, start, size, _filtered, run_id=run_id): yield data else: result = {} for data in self._batch(filename, start, size, _filtered, run_id=run_id): data_hash = dict_hash(data, ['count']) if data_hash in result: result[data_hash]['count'] += 1 else: data['count'] = 1 result[data_hash] = data for data in result.values(): yield data <file_sep>/DjangoStock/Stocks/templates/Stocks/news.html {% extends 'Stocks/first.html' %} {% block content %} <head> <!-- Required meta tags --> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <!-- Bootstrap CSS --> <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous"> <title>Searching KorStock</title> </head> <body> {% csrf_token %} <div class="container-fluid"> <!-- Page Heading --> <br> <h1 class="h3 mb-2 text-gray-800">국내 주식 발행정보 조회</h1> <br> <ul> <li><p class="mb-4">국내 회사명을 조회하면 주식발행정보를 확인 할 수 있습니다.</p></li> <li><p class="mb-4">법인등록번호를 복사하여 Financial statement에 검색하시면 해당 기업의 재무제표를 확인 할 수 있습니다.</p></li> </ul> <!-- DataTales Example --> <div class="card shadow mb-4"> <div class="card-header py-3"> <h6 class="m-0 font-weight-bold text-primary">Stock statement Tables</h6> </div> <div class="card-body"> <div class="table-responsive"> <table class="table table-bordered" id="dataTable" width="100%" cellspacing="0"> <thead> <tr class = "table-primary" align = center> <th scope="col">#</th> <th scope="col">항목명</th> <th scope="col">데이터</th> <th scope="col">항목설명</th> </tr> </thead> <tbody> <tr align = center> <th scope="row">1</th> <td>주식회사발행회사명</td> <td>{{ allData.stckIssuCmpyNm }}</td> <td>주식발행사의 명칭</td> </tr> <tr align = center> <th scope="row">2</th> <td>기준일자</td> <td>{{ allData.basDt }}</td> <td>작업 또는 거래의 기준이 되는 일자(년월일)</td> </tr> <tr align = center> <th scope="row">3</th> <td>법인등록번호</td> <td><a href = "">{{ allData.crno }}</a></td> <td>법인등록번호</td> </tr> <tr align = center> <th scope="row">4</th> <td>예탁취소일자</td> <td>{{ allData.dpsgCanDt }}</td> <td>예탁을 취소한 일자</td> </tr> <tr align = center> <th scope="row">5</th> <td>예탁등록일자</td> <td>{{ allData.dpsgRegDt }}</td> <td>종목상장 적용일자(상장일)</td> </tr> <tr align = center> <th scope="row">6</th> <td>ISIN코드</td> <td>{{ allData.isinCd }}</td> <td>국제 채권 식별 번호</td> </tr> <tr align = center> <th scope="row">7</th> <td>ISIN코드명</td> <td>{{ allData.isinCdNm }}</td> <td>국제인증 고유번호 코드 이름</td> </tr> <tr align = center> <th scope="row">8</th> <td>발행형태 구분명</td> <td>{{ allData.issuFrmtClsfNm }}</td> <td>발행형태를 구분하는 명칭</td> </tr> <tr align = center> <th scope="row">9</th> <td>발행 주식 수</td> <td>{{ allData.issuStckCnt }}</td> <td>발행 주식의 총 수량</td> </tr> <tr align = center> <th scope="row">10</th> <td>상장폐지일자</td> <td>{{ allData.lstgAbolDt }}</td> <td>종목상장 폐지일자</td> </tr> <tr align = center> <th scope="row">11</th> <td>상장일자</td> <td>{{ allData.lstgDt }}</td> <td>종목상장 적용일자(상장일)</td> </tr> <tr align = center> <th scope="row">12</th> <td>유가증권종목종류코드</td> <td>{{ allData.scrsItmsKcd }}</td> <td>해당 유가증권의 종목종류</td> </tr> <tr align = center> <th scope="row">13</th> <td>유가증권종목종류코드명</td> <td>{{ allData.scrsItmsKcdNm }}</td> <td>해당 유가증권의 종목종류를 관리하는 코드명</td> </tr> <tr align = center> <th scope="row">14</th> <td>주식액면가</td> <td>{{ allData.stckParPrc }}</td> <td>주식의 권면에 기재된 금액</td> </tr> </tbody> </table> </div> </div> </div> </div> <!-- /.container-fluid --> </div> <script src="https://code.jquery.com/jquery-3.5.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <script src="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script> {% endblock %} </body> <file_sep>/venv/Lib/site-packages/loads/transport/message.py """ Message class. """ from loads.util import json class Message(object): def __init__(self, **data): self.data = data def __str__(self): return 'Message(%s)' % self.serialize() def serialize(self): return json.dumps(self.data) @classmethod def load_from_string(cls, data): return cls(**json.loads(data)) <file_sep>/venv/Lib/site-packages/loads/observers/__init__.py from loads.observers._irc import IRCObserver as irc from loads.observers._email import EMailObserver as email observers = (irc, email) <file_sep>/venv/Lib/site-packages/loads/tests/jobs.py # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. import time import sys from loads.util import logger, set_logger from loads.case import TestCase set_logger(True, logfile='stdout') def _p(msg): sys.stdout.write(msg + '\n') logger.debug(msg) sys.stdout.flush() def fail(job): _p('Starting loads.tests.jobs.fail') try: raise ValueError(job.data) finally: _p('Ending loads.tests.jobs.fail') def timeout(job): _p('Starting loads.tests.jobs.timeout') time.sleep(2.) try: return job.data finally: _p('Ending loads.tests.jobs.timeout') def timeout_overflow(job): _p('Starting loads.tests.jobs.timeout_overflow') time.sleep(job.data['age']) try: return str(job.data['age']) finally: _p('Ending loads.tests.jobs.timeout_overflow') class SomeTests(TestCase): def test_one(self): pass <file_sep>/venv/Lib/site-packages/loads/tests/test_full.py import time import os import gevent import subprocess import sys import requests import webtest from loads.case import TestCase from loads.tests.support import hush, patch_socket, unpatch_socket _HERE = os.path.dirname(__file__) _SERVER = [sys.executable, os.path.join(_HERE, '..', 'examples', 'echo_server.py')] class TestWebSite(TestCase): server_url = 'http://localhost:9000' @classmethod def setUpClass(cls): cls.old_attrs = patch_socket() devnull = open('/dev/null', 'w') cls._server = subprocess.Popen(_SERVER, stdout=devnull, stderr=devnull) # wait for the echo server to be started tries = 0 while True: try: requests.get(cls.server_url) break except requests.ConnectionError: time.sleep(.1) tries += 1 if tries > 20: raise @classmethod def tearDownClass(cls): cls._server.terminate() cls._server.wait() unpatch_socket(cls.old_attrs) @hush def test_something(self): res = self.app.get('/') self.assertTrue('chatform' in res.body) results = [] def callback(m): results.append(m.data) ws = self.create_ws('ws://localhost:9000/ws', protocols=['chat', 'http-only'], callback=callback) one = 'something' + os.urandom(10).encode('hex') two = 'happened' + os.urandom(10).encode('hex') ws.send(one) ws.receive() ws.send(two) ws.receive() start = time.time() while one not in results and two not in results: gevent.sleep(0) if time.time() - start > 1: raise AssertionError('Too slow') def test_will_fail(self): res = self.app.get('/') self.assertFalse('xFsj' in res.body) def test_webtest_integration(self): self.assertRaises(webtest.AppError, self.app.get, '/', status=400) <file_sep>/venv/Lib/site-packages/loads/transport/brokerctrl.py import functools import random import time import sys import traceback from collections import defaultdict import datetime from uuid import uuid4 from loads.db import get_database from loads.transport.util import DEFAULT_AGENT_TIMEOUT from loads.util import logger, resolve_name, json, unbatch from loads.results import RemoteTestResult class NotEnoughWorkersError(Exception): pass class NoDetailedDataError(Exception): pass def _compute_observers(observers): """Reads the arguments and returns an observers list""" def _resolver(name): try: return resolve_name('loads.observers.%s' % name) except ImportError: return resolve_name(name) if observers is None: return [] return [_resolver(observer) for observer in observers] class BrokerController(object): def __init__(self, broker, loop, db='python', dboptions=None, agent_timeout=DEFAULT_AGENT_TIMEOUT): self.broker = broker self.loop = loop # agents registration and timers self._agents = {} self._agent_times = {} self.agent_timeout = agent_timeout self._runs = {} # local DB if dboptions is None: dboptions = {} self._db = get_database(db, self.loop, **dboptions) # cached agents results def default_status(): return {"result": {"status": {}, "command": "STATUS"}} self._cached_status = defaultdict(default_status) @property def agents(self): return self._agents def _remove_agent(self, agent_id, reason='unspecified'): logger.debug('%r removed. %s' % (agent_id, reason)) if agent_id in self._agents: del self._agents[agent_id] if agent_id in self._agent_times: del self._agent_times[agent_id] if agent_id in self._runs: del self._runs[agent_id] if agent_id in self._cached_status: del self._cached_status[agent_id] def register_agent(self, agent_info): agent_id = agent_info['agent_id'] if agent_id not in self._agents: logger.debug('registring agent %s' % str(agent_info)) self._agents[agent_id] = agent_info def unregister_agents(self, reason='unspecified', keep_fresh=True): logger.debug('unregistring some agents') for agent_id in self._agents.keys(): self.unregister_agent(agent_id, reason) def unregister_agent(self, agent_id, reason='unspecified'): if agent_id in self._agents: self._remove_agent(agent_id, reason) def _associate(self, run_id, agents): when = time.time() for agent_id in agents: self._runs[agent_id] = run_id, when def reserve_agents(self, num, run_id): # we want to run the same command on several agents # provisionning them agents = [] available = [wid for wid in self._agents.keys() if wid not in self._runs] if num > len(available): raise NotEnoughWorkersError('Not Enough agents') while len(agents) < num: agent_id = random.choice(available) if self._check_agent(agent_id): agents.append(agent_id) available.remove(agent_id) self._associate(run_id, agents) return agents def send_to_agent(self, agent_id, msg, target=None): # now we can send to the right guy data = [str(agent_id), '', self.broker.pid, ''] if target is not None: data += [target, ''] data.append(msg) try: self.broker._backend.send_multipart(data) except Exception, e: logger.debug('Failed to send %s' % str(msg)) # we don't want to die on error. we just log it exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) logger.error('\n'.join(exc)) logger.debug('Removing agent') self._remove_agent(agent_id, '\n'.join(exc)) def clean(self): """This is called periodically to : - send a _STATUS command to all active agents to refresh their status - detect agents that have not responded for a while and discard them from the run and from the agents list """ now = time.time() for agent_id, (run_id, when) in self._runs.items(): # when was the last time we've got a response ? last_contact = self._agent_times.get(agent_id) # is the agent not responding since a while ? if (last_contact is not None and now - last_contact > self.agent_timeout): # let's kill the agent... lag = now - last_contact logger.debug('No response from agent since %d s.' % lag) logger.debug('Killing agent %s' % str(agent_id)) quit = json.dumps({'command': 'QUIT'}) self.send_to_agent(agent_id, quit) # and remove it from the run run_id = self._terminate_run(agent_id) if run_id is not None: logger.debug('publishing end of run') # if the tests are finished, publish this on the pubsub. msg = json.dumps({'data_type': 'run-finished', 'run_id': run_id}) self.broker._publisher.send(msg) else: # initialize the timer if last_contact is None: self._agent_times[agent_id] = now # sending a _STATUS call to on each active agent status_msg = json.dumps({'command': '_STATUS', 'run_id': run_id}) self.send_to_agent(agent_id, status_msg) def update_status(self, agent_id, result): """Checks the status of the processes. If all the processes are done, call self.test_ended() and return the run_id. Returns None otherwise. """ if result.get('command') == '_STATUS': self._cached_status[agent_id] = {'result': result} def _extract_status(st): if isinstance(st, basestring): return st return st['status'] statuses = [_extract_status(st) for st in result['status'].values()] if 'running' not in statuses: logger.debug('agent %s not running anything' % agent_id) return self._terminate_run(agent_id) self._agent_times[agent_id] = time.time() def _terminate_run(self, agent_id): # ended if agent_id in self._agent_times: del self._agent_times[agent_id] if agent_id not in self._runs: return run_id, when = self._runs[agent_id] logger.debug('removing %s from run %s' % (agent_id, run_id)) del self._runs[agent_id] # is the whole run over ? running = [run_id_ for (run_id_, when_) in self._runs.values()] # we want to tell the world if the run has ended if run_id not in running: logger.debug('the whole run %s is over, removing it' % run_id) self.test_ended(run_id) return run_id # # DB APIs # def save_metadata(self, run_id, data): self._db.save_metadata(run_id, data) def update_metadata(self, run_id, **metadata): self._db.update_metadata(run_id, **metadata) def get_metadata(self, msg, data): return self._db.get_metadata(data['run_id']) def save_data(self, agent_id, data): # registering the agent as alive hostname = data.get('hostname', '?') agent_pid = agent_id.split('-')[-1] self.register_agent({'pid': agent_pid, 'hostname': hostname, 'agent_id': agent_id}) if agent_id in self._runs: data['run_id'], data['started'] = self._runs[agent_id] else: # this means we are receiving data from an agent that's # no longer associated with the run, so # we want to associate it back self._associate(data.get('run_id'), [agent_id]) if data.get('data_type') == 'batch': for data_type, message in unbatch(data): message['data_type'] = data_type callback = functools.partial(self._db.add, message) self.loop.add_callback(callback) else: self._db.add(data) def get_urls(self, msg, data): run_id = data['run_id'] return self._db.get_urls(run_id) def get_data(self, msg, data): # XXX stream ? run_id = data['run_id'] if self._db.is_summarized(run_id): raise NoDetailedDataError(run_id) start = data.get('start') if start is not None: start = int(start) size = data.get('size') if size is not size: size = int(size) options = {'data_type': data.get('data_type'), 'groupby': data.get('groupby', False), 'start': start, 'size': size} return list(self._db.get_data(run_id, **options)) def get_counts(self, msg, data): run_id = data['run_id'] return self._db.get_counts(run_id) def flush_db(self): return self._db.flush() def _check_agent(self, agent_id): # XXX we'll want agents to register themselves # again after each heartbeat # # The broker will removing idling agents # just before sending a hearbeat. # # That will let us make sure a dead agent on # a distant box is removed if agent_id in self._agent_times: last_contact = self._agent_times.get(agent_id) if last_contact is not None: duration = time.time() - last_contact if duration > self.agent_timeout: logger.debug('The agent %r is slow (%.2f)' % (agent_id, duration)) return False return True def run_command(self, cmd, msg, data): cmd = cmd.lower() target = msg[0] # command for agents if cmd.startswith('agent_'): command = cmd[len('agent_'):].upper() # when a STATUS call is made, we make it # an indirect call if command == 'STATUS': command = '_STATUS' data['command'] = command agent_id = str(data['agent_id']) self.send_to_agent(agent_id, json.dumps(data), target=target) if command == '_STATUS': logger.debug('current cache %s' % str(self._cached_status)) return self._cached_status[agent_id] return if not hasattr(self, cmd): raise AttributeError(cmd) # calling the command asynchronously def _call(): try: res = getattr(self, cmd)(msg, data) res = {'result': res} self.broker.send_json(target, res) except Exception, e: logger.debug('Failed') exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) self.broker.send_json(target, {'error': exc}) self.loop.add_callback(_call) return def list_runs(self, msg, data): runs = defaultdict(list) for agent_id, (run_id, when) in self._runs.items(): runs[run_id].append((agent_id, when)) return runs def stop_run(self, msg, data): run_id = data['run_id'] agents = [] for agent_id, (_run_id, when) in self._runs.items(): if run_id != _run_id: continue agents.append(agent_id) if len(agents) == 0: # we don't have any agents running that test, let's # force the flags in the DB self.update_metadata(run_id, stopped=True, active=False, ended=time.time()) return [] # now we have a list of agents to stop stop_msg = json.dumps({'command': 'STOP'}) for agent_id in agents: self.send_to_agent(agent_id, stop_msg) return agents # # Observers # def test_ended(self, run_id): # first of all, we want to mark it done in the DB logger.debug('test %s ended marking the metadata' % run_id) self.update_metadata(run_id, stopped=True, active=False, ended=time.time()) # we want to ping all observers that things are done # for a given test. # get the list of observers args = self._db.get_metadata(run_id) observers = _compute_observers(args.get('observer')) if observers == []: self._db.summarize_run(run_id) return logger.debug('test %s ended calling the observers' % run_id) # if we are using the web dashboard - we're just providing a link if self.broker.web_root is not None: test_result = '%s/run/%s' % (self.broker.web_root, run_id) else: # rebuild the test result instance test_result = RemoteTestResult(args=args) test_result.args = args if 'started' in args: started = args['started'] started = datetime.datetime.utcfromtimestamp(started) test_result.startTestRun(when=started) test_result.set_counts(self._db.get_counts(run_id)) # for each observer we call it with the test results for observer in observers: options = {} prefix = 'observer_%s_' % observer.name for name, value in args.items(): if name.startswith(prefix): options[name[len(prefix):]] = value # get the options try: observer(args=args, **options)(test_result) except Exception: # the observer code failed. We want to log it logger.error('%r failed' % observer) self._db.summarize_run(run_id) # # The run apis # def run(self, msg, data): target = msg[0] # create a unique id for this run run_id = str(uuid4()) # get some agents try: agents = self.reserve_agents(data['agents'], run_id) except NotEnoughWorkersError: self.broker.send_json(target, {'error': 'Not enough agents'}) return # make sure the DB is prepared self._db.prepare_run() # send to every agent with the run_id and the receiver endpoint data['run_id'] = run_id data['args']['zmq_receiver'] = self.broker.endpoints['receiver'] # replace CTRL_RUN by RUN data['command'] = 'RUN' # rebuild the ZMQ message to pass to agents msg = json.dumps(data) # notice when the test was started data['args']['started'] = time.time() data['args']['active'] = True # save the tests metadata in the db self.save_metadata(run_id, data['args']) self.flush_db() for agent_id in agents: self.send_to_agent(agent_id, msg) # tell the client which agents where selected. res = {'result': {'agents': agents, 'run_id': run_id}} self.broker.send_json(target, res) <file_sep>/venv/Lib/site-packages/loads/runners/external.py import time import os import subprocess import sys import zmq from zmq.eventloop import ioloop, zmqstream from loads.runners.local import LocalRunner from loads.util import null_streams, json, logger DEFAULT_EXTERNAL_RUNNER_RECEIVER = "ipc:///tmp/loads-external-receiver.ipc" class ExternalRunner(LocalRunner): """Test runner which uses a subprocess to do the actual job. When ran locally, this runner makes the spawned processes report to this instance, otherwise it makes them report to the broker if the run is using a cluster. This runner watches the state of the underlying processes to determine if the runs are finished or not. Once all the runs are done, it exits. """ name = 'external' options = { 'process-timeout': ('Time to wait until we consider the run is over', int, 2, True), } def __init__(self, args=None, loop=None): if args is None: args = {} super(ExternalRunner, self).__init__(args) self._current_step = 0 self._step_started_at = None self._duration = self.args.get('duration') self._timeout = args.get('external_process_timeout', 2) self._processes = [] self._processes_pending_cleanup = [] # hits and users are lists that can be None. hits, users = [1], [1] if self.args.get('hits') is not None: hits = self.args['hits'] if self.args.get('users') is not None: users = self.args['users'] self.args['hits'] = hits self.args['users'] = users self._nb_steps = max(len(hits), len(users)) self._loop = loop or ioloop.IOLoop() # Check the status of the processes every so-often.(500ms) cb = ioloop.PeriodicCallback(self._check_processes, 500, self._loop) cb.start() self._receiver_socket = (self.args.get('zmq_receiver') or DEFAULT_EXTERNAL_RUNNER_RECEIVER) @property def step_hits(self): """How many hits to perform in the current step.""" # Take the last value or fallback on the last one. if len(self.args['hits']) >= self._current_step + 1: step = self._current_step else: step = -1 return self.args['hits'][step] @property def step_users(self): """How many users to spawn for the current step.""" # Take the last value or fallback on the last one. if len(self.args['users']) >= self._current_step + 1: step = self._current_step else: step = -1 return self.args['users'][step] def _check_processes(self): """When all the processes are finished or the duration of the test is more than the wanted duration, stop the loop and exit. """ # Poll procs that are pending cleanup, so we don't leave zombies. pending = [] for proc in self._processes_pending_cleanup: if proc.poll() is None: pending.append(proc) self._processes_pending_cleanup = pending # Find which processes have terminated, which are still active. active = [] terminated = [] for proc in self._processes: if proc.poll() is None: active.append(proc) else: if proc.returncode != 0: logger.warning('Process terminated with code %r' % proc.returncode) terminated.append(proc) self._processes = active # Force step to be over if the processes have run for too long. if self._duration is not None: time_limit = self._duration + self._timeout else: time_limit = self.step_hits * self._timeout time_limit = self._step_started_at + time_limit # If we've reached the end of the step, start the next one. now = time.time() if len(self._processes) == 0 or now > time_limit: self._start_next_step() # Refresh the outputs every time we check the processes status, # but do it only if we're not in slave mode. if not self.slave: self.refresh() def _start_next_step(self): # Reap any outstanding procs from the previous step. # We will poll them for successful termination at next proc check. for proc in self._processes: if proc.poll() is None: proc.terminate() self._processes_pending_cleanup.append(proc) self._processes = [] # Reinitialize some variables and start a new run, or exit. if self._current_step >= self._nb_steps: self.stop_run() else: self._step_started_at = time.time() for cur_user in range(self.step_users): self.spawn_external_runner(cur_user + 1) self._current_step += 1 def _recv_result(self, msg): """Called each time the underlying processes send a message via ZMQ. This is used only if we are *not* in slave mode (in slave mode, the messages are sent directly to the broker). """ # Actually add a callback to process the results to avoid blocking the # receival of messages. self._loop.add_callback(self._process_result, msg) def _process_result(self, msg): data = json.loads(msg[0]) data_type = data.pop('data_type') # run_id is only used when in distributed mode, which isn't the # case here, so we get rid of it. data.pop('run_id') if hasattr(self.test_result, data_type): method = getattr(self.test_result, data_type) method(**data) def _execute(self): """Spawn all the tests needed and wait for them to finish. """ # If we're not in slave mode, we need to receive the data ourself # and build up a TestResult object. In slave mode the spawned procs # will report directly to the broker. if not self.slave: self.context = zmq.Context() self._receiver = self.context.socket(zmq.PULL) self._receiver.bind(self._receiver_socket) self._rcvstream = zmqstream.ZMQStream(self._receiver, self._loop) self._rcvstream.on_recv(self._recv_result) self._prepare_filesystem() self.test_result.startTestRun(self.args.get('agent_id')) self._start_next_step() self._loop.start() if not self.slave: self._receiver.close() self.context.destroy() def spawn_external_runner(self, cur_user): """Spawns an external runner with the given arguments. The loads options are passed via environment variables, that is: - LOADS_AGENT_ID for the id of the agent. - LOADS_ZMQ_RECEIVER for the address of the ZMQ socket to send the results to. - LOADS_RUN_ID for the id of the run (shared among workers of the same run). - LOADS_TOTAL_USERS for the total number of users in this step - LOADS_CURRENT_USER for the current user number - LOADS_TOTAL_HITS for the total number of hits in this step - LOADS_DURATION for the total duration of this step, if any We use environment variables because that's the easiest way to pass parameters to non-python executables. """ cmd = self.args['test_runner'].format(test=self.args['fqn']) env = os.environ.copy() env['LOADS_AGENT_ID'] = str(self.args.get('agent_id')) env['LOADS_ZMQ_RECEIVER'] = self._receiver_socket env['LOADS_RUN_ID'] = self.args.get('run_id', '') env['LOADS_TOTAL_USERS'] = str(self.step_users) env['LOADS_CURRENT_USER'] = str(cur_user) if self._duration is None: env['LOADS_TOTAL_HITS'] = str(self.step_hits) else: env['LOADS_DURATION'] = str(self._duration) def silent_output(): null_streams([sys.stdout, sys.stderr, sys.stdin]) os.setsid() # Run the subprocess in a new session. cmd_args = { 'env': env, 'preexec_fn': silent_output, 'cwd': self.args.get('test_dir'), } process = subprocess.Popen(cmd.split(' '), **cmd_args) self._processes.append(process) def stop_run(self): self.test_result.stopTestRun(self.args.get('agent_id')) self._loop.stop() self.flush() <file_sep>/venv/Lib/site-packages/loads/tests/test_results_zmqrelay.py from unittest2 import TestCase import traceback from StringIO import StringIO import zmq.green as zmq from loads.results import ZMQTestResult from loads.tests.support import get_tb, hush from loads.util import json import mock class TestZmqRelay(TestCase): def setUp(self): self.context = zmq.Context() self._pull = self.context.socket(zmq.PULL) self._pull.bind('inproc://ok') self.relay = ZMQTestResult(args={'zmq_receiver': 'inproc://ok', 'zmq_context': self.context}) def tearDown(self): self.context.destroy() self.relay.close() def test_add_success(self): self.relay.addSuccess(mock.sentinel.test, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test)) @hush def test_add_failure(self): exc = get_tb() __, __, tb = exc string_tb = StringIO() traceback.print_tb(tb, file=string_tb) string_tb.seek(0) self.relay.addFailure(mock.sentinel.test, exc, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test)) exc_info = ["<type 'exceptions.Exception'>", 'Error message', string_tb.read()] self.assertEqual(recv['exc_info'], exc_info) @hush def test_add_error(self): exc = get_tb() __, __, tb = exc string_tb = StringIO() traceback.print_tb(tb, file=string_tb) string_tb.seek(0) self.relay.addError(mock.sentinel.test, exc, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test)) exc_info = ["<type 'exceptions.Exception'>", 'Error message', string_tb.read()] self.assertEqual(recv['exc_info'], exc_info) def test_start_test(self): self.relay.startTest(mock.sentinel.test, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test)) def test_stop_test(self): self.relay.stopTest(mock.sentinel.test, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test)) def test_start_testrun(self): self.relay.startTestRun() recv = json.loads(self._pull.recv()) self.assertEqual(recv['data_type'], 'startTestRun') def test_stop_testrun(self): self.relay.stopTestRun() recv = json.loads(self._pull.recv()) self.assertEqual(recv['data_type'], 'stopTestRun') def test_socket_open_close(self): for action in ('open', 'close'): action = 'socket_%s' % action meth = getattr(self.relay, action) meth() recv = json.loads(self._pull.recv()) self.assertEqual(recv['data_type'], action) def test_socket_message_received(self): self.relay.socket_message(123) recv = self._pull.recv() self.assertEqual(json.loads(recv)['size'], 123) def test_add_hit(self): args = {'foo': 'bar', 'baz': 'foobar'} self.relay.add_hit(**args) recv = self._pull.recv() self.assertDictContainsSubset(args, json.loads(recv)) def test_incr_counter(self): args = 'test', (1, 1, 1, 1), 'metric' self.relay.incr_counter(*args) wanted = {'test': 'test', 'loads_status': [1, 1, 1, 1], 'agent_id': None} recv = self._pull.recv() self.assertDictContainsSubset(wanted, json.loads(recv)) def test_add_observer(self): # The observer API should silently accept the observers we pass to it, # and be future proof self.relay.add_observer('foo', bar='baz') def test_error(self): self.context.destroy() args = {'foo': 'bar', 'baz': 'foobar'} self.assertRaises(zmq.ZMQError, self.relay.add_hit, **args) <file_sep>/venv/Lib/site-packages/loads/tests/test_observer_irc.py import unittest2 import socket import select import os from loads.observers import irc from loads.tests.support import hush _SOCKETS = [] def _select(*args): return _SOCKETS, [], [] class FakeSocket(object): _file = os.path.join(os.path.dirname(__file__), 'ircdata.txt') def __init__(self, *args, **kw): self.sent = [] with open(self._file) as f: self.data = list(reversed(f.readlines())) _SOCKETS.append(self) def bind(self, *args): pass close = shutdown = connect = bind def send(self, data): self.sent.append(data) def recv(self, *args): if self.data == []: return '' return self.data.pop() class TestIRC(unittest2.TestCase): def setUp(self): self.old = socket.socket socket.socket = FakeSocket self.old_select = select.select select.select = _select def tearDown(self): socket.socket = self.old select.select = self.old_select @hush def test_send(self): results = 'yeah' client = irc(ssl=False) client(results) # what did we send on IRC wanted = ['NICK loads', 'USER loads 0 * :loads', 'JOIN #services-dev', 'PRIVMSG #services-dev :[loads] Test Over. \x1fyeah', 'QUIT :Bye !', 'QUIT :Connection reset by peer'] data = [line.strip('\r\n') for line in _SOCKETS[0].sent] self.assertEqual(data, wanted) <file_sep>/venv/Lib/site-packages/loads/util.py import datetime import ujson as json # NOQA import json as _json import logging import logging.handlers import os import sys import urlparse import math import fnmatch import random import zipfile from cStringIO import StringIO import hashlib try: from gevent import socket as gevent_socket except ImportError: gevent_socket = None logger = logging.getLogger('loads') def set_logger(debug=False, name='loads', logfile='stdout'): # setting up the logger logger_ = logging.getLogger(name) logger_.setLevel(logging.DEBUG) if logfile == 'stdout': ch = logging.StreamHandler() else: ch = logging.handlers.RotatingFileHandler(logfile, mode='a+') if debug: ch.setLevel(logging.DEBUG) else: ch.setLevel(logging.INFO) formatter = logging.Formatter('[%(asctime)s][%(process)d] %(message)s') ch.setFormatter(formatter) logger_.addHandler(ch) # for the tests if 'TESTING' in os.environ: fh = logging.FileHandler('/tmp/loads.log') fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logger.addHandler(fh) def total_seconds(td): # works for 2.7 and 2.6 diff = (td.seconds + td.days * 24 * 3600) * 10 ** 6 return (td.microseconds + diff) / float(10 ** 6) class DateTimeJSONEncoder(_json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime.datetime): return obj.isoformat() elif isinstance(obj, datetime.timedelta): return total_seconds(obj) else: return super(DateTimeJSONEncoder, self).default(obj) def split_endpoint(endpoint): """Returns the scheme, the location, and maybe the port. """ res = {} parts = urlparse.urlparse(endpoint) res['scheme'] = parts.scheme if parts.scheme == 'tcp': netloc = parts.netloc.rsplit(':') if len(netloc) == 1: netloc.append('80') res['ip'] = netloc[0] res['port'] = int(netloc[1]) elif parts.scheme == 'ipc': res['path'] = parts.path else: raise NotImplementedError() return res _DNS_CACHE = {} def dns_resolve(url): """Resolve hostname in the given url, using cached results where possible. Given a url, this function does DNS resolution on the contained hostname and returns a 3-tuple giving: the URL with hostname replace by IP addr, the original hostname string, and the resolved IP addr string. The results of DNS resolution are cached to make sure this doesn't become a bottleneck for the loadtest. If the hostname resolves to multiple addresses then a random address is chosen. """ parts = urlparse.urlparse(url) netloc = parts.netloc.rsplit(':') if len(netloc) == 1: netloc.append('80') original = netloc[0] addrs = _DNS_CACHE.get(original) if addrs is None: try: addrs = gevent_socket.gethostbyname_ex(original)[2] except AttributeError: # gethostbyname_ex was introduced by gevent 1.0, # fallback on gethostbyname instead. logger.info('gevent.socket.gethostbyname_ex is not present, ' 'Falling-back on gevent.socket.gethostbyname') addrs = [gevent_socket.gethostbyname(original)] _DNS_CACHE[original] = addrs resolved = random.choice(addrs) netloc = resolved + ':' + netloc[1] parts = (parts.scheme, netloc) + parts[2:] return urlparse.urlunparse(parts), original, resolved # taken from distutils2 def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. This functions supports packages and attributes without depth limitation: ``package.package.module.class.class.function.attr`` is valid input. However, looking up builtins is not directly supported: use ``__builtin__.name``. Raises ImportError if importing the module fails or if one requested attribute is not found. """ # Depending how loads is ran, "" can or cannot be present in the path. This # adds it if it's missing. if len(sys.path) < 1 or sys.path[0] not in ('', os.getcwd()): sys.path.insert(0, '') if '.' not in name: # shortcut __import__(name) return sys.modules[name] # FIXME clean up this code! parts = name.split('.') cursor = len(parts) module_name = parts[:cursor] ret = '' while cursor > 0: try: ret = __import__('.'.join(module_name)) break except ImportError: cursor -= 1 module_name = parts[:cursor] if ret == '': raise ImportError(parts[0]) for part in parts[1:]: try: ret = getattr(ret, part) except AttributeError, exc: raise ImportError(exc) return ret def get_quantiles(data, quantiles): """Computes the quantiles for the data array you pass along. :param data: the input array :param quantiles: a list of quantiles you want to compute. This is an adapted version of an implementation by <NAME> Ph.D. UP Extension Program in Pampanga, Clark Field. Warning: this implentation is probably slow. We are using this atm to avoid depending on scipy, who have a much better and faster version, see scipy.stats.mstats.mquantiles References: http://reference.wolfram.com/mathematica/ref/Quantile.html http://wiki.r-project.org/rwiki/doku.php?id=rdoc:stats:quantile http://adorio-research.org/wordpress/?p=125 """ def _get_quantile(q, data_len): a, b, c, d = (1.0 / 3, 1.0 / 3, 0, 1) g, j = math.modf(a + (data_len + b) * q - 1) if j < 0: return data[0] elif j >= data_len: return data[data_len - 1] j = int(math.floor(j)) if g == 0 or j == len(data) - 1: return data[j] else: return data[j] + (data[j + 1] - data[j]) * (c + d * g) data = sorted(data) data_len = len(data) return [_get_quantile(q, data_len) for q in quantiles] def try_import(*packages): failed_packages = [] for package in packages: try: __import__(package) except ImportError: failed_packages.append(package) if failed_packages: failed_packages = " ".join(failed_packages) raise ImportError('You need to run "pip install %s"' % failed_packages) def glob(patterns, location='.'): for pattern in patterns: basedir, pattern = os.path.split(pattern) basedir = os.path.abspath(os.path.join(location, basedir)) for file_ in os.listdir(basedir): if fnmatch.fnmatch(file_, pattern): yield os.path.join(basedir, file_) def pack_include_files(include_files, location='.'): """Package up the specified include_files into a zipfile data bundle. This is a convenience function for packaging up data files into a binary blob, that can then be shipped to the different agents. Unpack the files using unpack_include_files(). """ file_data = StringIO() zf = zipfile.ZipFile(file_data, "w", compression=zipfile.ZIP_DEFLATED) def store_file(name, filepath): info = zipfile.ZipInfo(name) info.external_attr = os.stat(filepath).st_mode << 16L with open(filepath) as f: zf.writestr(info, f.read()) for basepath in glob(include_files, location): basedir, basename = os.path.split(basepath) if not os.path.isdir(basepath): store_file(basename, basepath) else: for root, dirnames, filenames in os.walk(basepath): for filename in filenames: filepath = os.path.join(root, filename) store_file(filepath[len(basedir):], filepath) zf.close() return file_data.getvalue().encode('base64') def maybe_makedirs(dirpath): """Like os.makedirs, but not an error if the final directory exists.""" if not os.path.isdir(dirpath): os.makedirs(dirpath) def unpack_include_files(file_data, location='.'): """Unpackage a blob of include_files data into the specified directory. This is a convenience function for unpackaging data files from a binary blob, that can be used on the different agents. It accepts data in the format produced by pack_include_files(). """ file_data = str(file_data).decode('base64') zf = zipfile.ZipFile(StringIO(file_data)) for itemname in zf.namelist(): itempath = os.path.join(location, itemname.lstrip("/")) if itemname.endswith("/"): maybe_makedirs(itempath) else: maybe_makedirs(os.path.dirname(itempath)) with open(itempath, "w") as f: f.write(zf.read(itemname)) mode = zf.getinfo(itemname).external_attr >> 16L if mode: os.chmod(itempath, mode) zf.close() def null_streams(streams): """Set the given outputs to /dev/null to be sure we don't store their content in memory. This is useful when you want to spawn new processes and don't care about their outputs. The other approach, using subprocess.PIPE can slow down things and uses memory without any rationale. """ devnull = os.open(os.devnull, os.O_RDWR) try: for stream in streams: if not hasattr(stream, 'fileno'): # we're probably dealing with a file-like continue try: stream.flush() os.dup2(devnull, stream.fileno()) except IOError: # some streams, like stdin - might be already closed. pass finally: os.close(devnull) def dict_hash(data, omit_keys=None): """Useful to identify a data mapping. """ if omit_keys is None: omit_keys = [] hash = hashlib.md5() for key, value in data.items(): if key in omit_keys: continue hash.update(str(key)) hash.update(str(value)) hash.update('ENDMARKER') return hash.hexdigest() def seconds_to_time(seconds, loose=False): if seconds == 0: return 'Now.' minutes, seconds = divmod(seconds, 60) hours, minutes = divmod(minutes, 60) days, hours = divmod(hours, 24) res = [] def _join(): if len(res) == 1: return res[0] else: return '%s and %s.' % (' '.join(res[:-1]), res[-1]) if days > 0: res.append('%d d' % days) if loose: return _join() if hours > 0: res.append('%d h' % hours) if loose: return _join() if minutes > 0: res.append('%d min' % minutes) if loose: return _join() if seconds > 0: res.append('%d sec' % seconds) return _join() def unbatch(data): for field, messages in data['counts'].items(): for message in messages: message['agent_id'] = data['agent_id'] if 'run_id' in data: message['run_id'] = data['run_id'] yield field, message <file_sep>/venv/Lib/site-packages/loads/output/_file.py from loads.util import DateTimeJSONEncoder class FileOutput(object): """A output writing to a file.""" name = 'file' options = {'filename': ('Filename', str, None, True)} def __init__(self, test_result, args): self.test_result = test_result self.current = 0 self.filename = args['output_file_filename'] self.encoder = DateTimeJSONEncoder() self.fd = open(self.filename, 'a+') def push(self, called_method, *args, **data): self.fd.write(' - '.join((called_method, self.encoder.encode(data)))) def flush(self): self.fd.close()
f537732e75dab9a0524983510007ab5371e338e5
[ "Python", "HTML" ]
67
Python
kayoung1222/KorStock
29283d3b65465d0bd1ff39add95eb83423e2871c
6d1ef96b3802c7a999fd38ed37226055ccea36bf
refs/heads/main
<file_sep>#ifndef SAFE_MAP_HPP #define SAFE_MAP_HPP #include <map> #include <mutex> template<typename Key, typename Value> class SafeMap { private: std::map<Key, Value> _map; std::mutex _mutex; public: SafeMap() = default; SafeMap(const SafeMap &sm) : _map(std::move(sm._map)), _mutex(std::move(sm._mutex)) {} bool empty() { std::unique_lock<std::mutex> lock(_mutex); return _map.empty(); } uint64_t size() { std::unique_lock<std::mutex> lock(_mutex); return _map.size(); } void emplace(Key &&k, Value &&v) { std::unique_lock<std::mutex> lock(_mutex); _map.emplace(k, v); } void clear() { std::unique_lock<std::mutex> lock(_mutex); _map.clear(); } void erase(const Key &k) { std::unique_lock<std::mutex> lock(_mutex); _map.erase(k); } auto find(const Key &k) { std::unique_lock<std::mutex> lock(_mutex); return _map.find(k); } auto end() { std::unique_lock<std::mutex> lock(_mutex); return _map.end(); } }; #endif<file_sep>#ifndef SAFE_VECTOR_HPP #define SAFE_VECTOR_HPP #include <vector> #include <mutex> template<typename T> class SafeVector { private: std::vector<T> _vector; std::mutex _mutex; public: SafeVector() = default; SafeVector(const SafeVector &sv) : _vector(std::move(sv._vector)) {} T &at(size_t n) { std::unique_lock<std::mutex> lock(_mutex); return _vector.at(n); } auto begin() { std::unique_lock<std::mutex> lock(_mutex); return _vector.begin(); } auto end() { std::unique_lock<std::mutex> lock(_mutex); return _vector.end(); } const T &back() { std::unique_lock<std::mutex> lock(_mutex); return _vector.back(); } const T &front() { std::unique_lock<std::mutex> lock(_mutex); return _vector.front(); } bool empty() { std::unique_lock<std::mutex> lock(_mutex); return _vector.empty(); } uint64_t size() { std::unique_lock<std::mutex> lock(_mutex); return _vector.size(); } template<typename SafeVectorIterator> void emplace(SafeVectorIterator it, T &&t) { std::unique_lock<std::mutex> lock(_mutex); _vector.emplace(it, t); } void emplace_back(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _vector.emplace_back(t); } void emplace_front(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _vector.emplace_front(t); } template<typename SafeVectorIterator> auto erase(SafeVectorIterator it) { std::unique_lock<std::mutex> lock(_mutex); _vector.erase(it); } void pop_back(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _vector.pop_back(); } void pop_front(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _vector.pop_front(); } void push_back(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _vector.push_back(t); } void push_front(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _vector.push_front(t); } bool clear() { std::unique_lock<std::mutex> lock(_mutex); _vector.clear(); } bool reserve(unsigned long n) { std::unique_lock<std::mutex> lock(_mutex); _vector.reserve(n); } }; #endif<file_sep>#ifndef SAFE_LIST_HPP #define SAFE_LIST_HPP #include <list> #include <mutex> template<typename T> class SafeList { private: std::list<T> _list; std::mutex _mutex; public: SafeList() = default; SafeList(const SafeList &sl) : _list(std::move(sl._list)), _mutex(std::move(sl._mutex)) {} auto begin() { std::unique_lock<std::mutex> lock(_mutex); return _list.begin(); } auto end() { std::unique_lock<std::mutex> lock(_mutex); return _list.end(); } const T &back() { std::unique_lock<std::mutex> lock(_mutex); return _list.back(); } const T &front() { std::unique_lock<std::mutex> lock(_mutex); return _list.front(); } bool empty() { std::unique_lock<std::mutex> lock(_mutex); return _list.empty(); } uint64_t size() { std::unique_lock<std::mutex> lock(_mutex); return _list.size(); } void emplace(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _list.emplace(t); } void emplace_back(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _list.emplace_back(t); } void emplace_front(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _list.emplace_front(t); } template<typename SafeListIterator> auto erase(SafeListIterator it) { std::unique_lock<std::mutex> lock(_mutex); _list.erase(it); } void pop_back(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _list.pop_back(); } void pop_front(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _list.pop_front(); } void push_back(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _list.push_back(t); } void push_front(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _list.push_front(t); } bool clear() { std::unique_lock<std::mutex> lock(_mutex); _list.clear(); } }; #endif <file_sep>cmake_minimum_required(VERSION 3.14) project(SafeSTL) set(CMAKE_CXX_STANDARD 17) add_library(SafeSTL STATIC Includes/SafeVector.hpp Includes/SafeMap.hpp Includes/SafeList.hpp Includes/SafeQueue.hpp main.cpp)<file_sep># SafeSTL SafeSTL is a thread safe implementation of STL data structures <file_sep>#ifndef SAFE_QUEUE_HPP #define SAFE_QUEUE_HPP #include <mutex> #include <queue> template<typename T> class SafeQueue { private: std::queue<T> _queue; std::mutex _mutex; public: SafeQueue() = default; SafeQueue(const SafeQueue &sq) : _queue(std::move(sq._queue)), _mutex(std::move(sq._mutex)) {} bool empty() { std::unique_lock<std::mutex> lock(_mutex); return _queue.empty(); } uint64_t size() { std::unique_lock<std::mutex> lock(_mutex); return _queue.size(); } void push(T &t) { std::unique_lock<std::mutex> lock(_mutex); _queue.push(t); } void emplace(T &&t) { std::unique_lock<std::mutex> lock(_mutex); _queue.emplace(t); } void top(T &t) { std::unique_lock<std::mutex> lock(_mutex); t = _queue.front(); } T &top() { std::unique_lock<std::mutex> lock(_mutex); return _queue.front(); } bool pop(T &t) { std::unique_lock<std::mutex> lock(_mutex); if (_queue.empty()) { return false; } t = std::move(_queue.front()); _queue.pop(); return true; } bool popAll(SafeQueue<T> &t) { std::unique_lock<std::mutex> lock(_mutex); T tmp; while (!_queue.empty()) { t.push(_queue.front()); _queue.pop(); } } bool clear() { std::unique_lock<std::mutex> lock(_mutex); while (!_queue.empty()) { _queue.pop(); } } }; #endif
8b023bfb6a43a9de7b17ebbb05430b1d1cdefafd
[ "Markdown", "CMake", "C++" ]
6
C++
NadavLevi/SafeSTL
2d6aef64122e0cc057f4fd0e1e16a29587703f6d
b993f1cb7ad8b4fefd4601c76cfea477724e64a0
refs/heads/master
<repo_name>zhangtqx/zhangtqx.com<file_sep>/Public/Home/js/index.js /** * Created by kain on 2015/8/6. */ $(function(){ var aside = $('.aside'), slide = $('.slide'); var style = function(){ if(aside.height() <= $(window).height() && $(window).width() > 768){ aside.css({ 'height':'100%' }); } if(slide.height() <= $(window).height()){ slide.css({ 'height':'100%' }); } }; var setScroll = function(){ var slide_article = $('.slide-article-content'); slide_article.slimScroll({ height: slide_article.height(), alwaysVisible: true }); var slim = $('.slim'); slim.slimScroll({ height: slim.height(), alwaysVisible: false }); }; style(); setScroll(); $(window).on("resize",setScroll); }(jQuery));<file_sep>/Runtime/Cache/Home/225445aa7f2ece568583e4ec766924ed.php <?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE html> <html> <head> <!--seo--> <title>Kain's website</title> <meta name="author" content="kain"> <meta name="keywords" content="kain Website"> <!----> <!-- webiste meta --> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no"> <!-- Set render engine for 360 browser --> <meta name="renderer" content="webkit"> <!-- No Baidu Siteapp--> <meta http-equiv="Cache-Control" content="no-siteapp"/> <!-- website ico --> <link rel="shortcut icon" href="/zhangtqx.com/Public/static/favicon.ico"> <link rel="apple-touch-icon" href="/zhangtqx.com/Public/static/favicon.ico"> <!----> <link href="//cdn.bootcss.com/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet"> <link href="//cdn.bootcss.com/bootstrap/3.3.5/css/bootstrap-theme.min.css" rel="stylesheet"> <link href="//cdn.bootcss.com/font-awesome/4.3.0/css/font-awesome.min.css" rel="stylesheet"> <link href="//cdn.bootcss.com/flat-ui/2.2.2/css/flat-ui.min.css" rel="stylesheet"> <link href="/zhangtqx.com/Public/Home/css/style.css" rel="stylesheet"/> <script src="//cdn.bootcss.com/jquery/2.1.4/jquery.min.js"></script> <!--[if lte IE 9]> <script src="//cdn.bootcss.com/jquery/1.11.3/jquery.min.js"></script> <![endif]--> <script src="//cdn.bootcss.com/bootstrap/3.3.5/js/bootstrap.min.js"></script> <script src="//cdn.bootcss.com/flat-ui/2.2.2/js/flat-ui.min.js"></script> <script src="//cdn.bootcss.com/flat-ui/2.2.2/js/vendor/respond.min.js"></script> <script src="//cdn.bootcss.com/flat-ui/2.2.2/js/vendor/video.min.js"></script> <!-- Le HTML5 shim, for IE6-8 support of HTML5 elements --> <!--[if lt IE 9]> <script src="//cdn.bootcss.com/ink/3.1.9/js/html5shiv.js"></script> <![endif]--> <script src="/zhangtqx.com/Public/Home/js/jquery.slimscroll.min.js"></script> </head> <body> <div class="aside col-lg-3 col-md-3 col-sm-3 col-xs-12"> <h3 class="col-lg-10 col-lg-offset-1"> <a href="http://localhost/zhangtqx.com">kain's website</a> </h3> <div class="nav"> <ul> <?php $__NAV__ = M('Channel')->field(true)->where("status=1")->order("sort")->select(); if(is_array($__NAV__)): $i = 0; $__LIST__ = $__NAV__;if( count($__LIST__)==0 ) : echo "" ;else: foreach($__LIST__ as $key=>$nav): $mod = ($i % 2 );++$i; if(($nav["pid"]) == "0"): ?><li class="col-lg-12 col-md-12 col-sm-12 col-xs-12"> <a href="<?php echo (get_nav_url($nav["url"])); ?>" target="<?php if(($nav["target"]) == "1"): ?>_blank<?php else: ?>_self<?php endif; ?>"> <?php echo ($nav["title"]); ?> </a> <div> </div> </li><?php endif; endforeach; endif; else: echo "" ;endif; ?> </ul> </div> <div class="footer"> </div> </div> <div class="slide col-lg-9 col-md-9 col-sm-9 col-xs-12"> <div class="slide-container col-lg-12 col-md-12 col-sm-12 col-xs-12"> <div class="slim"> <?php if(is_array($lists)): $i = 0; $__LIST__ = $lists;if( count($__LIST__)==0 ) : echo "" ;else: foreach($__LIST__ as $key=>$vo): $mod = ($i % 2 );++$i;?><div class="slide-child"> <div> <h6><a href="<?php echo U('Article/detail?id='.$vo['id']);?>"><?php echo ($vo["title"]); ?></a></h6> </div> <div> <p class="lead"><?php echo ($vo["description"]); ?></p> </div> <div> <span class="more"> <a href="<?php echo U('Article/detail?id='.$vo['id']);?>">查看全文</a> </span> <span class="pull-right"> <span class="author"><?php echo (get_username($vo["uid"])); ?></span> <span>于 <?php echo (date('Y-m-d H:i',$vo["create_time"])); ?></span> <span class="category"> 发表在 <a href="<?php echo U('Article/lists?category='.get_category_name($vo['category_id']));?>"> <?php echo (get_category_title($vo["category_id"])); ?> </a> </span> <span>阅读( <?php echo ($vo["view"]); ?> )</span> </span> </div> </div><?php endforeach; endif; else: echo "" ;endif; ?> </div> </div> </div> <script src="/zhangtqx.com/Public/Home/js/index.js"></script> </body> </html>
38243572e6b084a5fd500a41a66ec9be1e041cd5
[ "JavaScript", "PHP" ]
2
JavaScript
zhangtqx/zhangtqx.com
0483c7b2225c45fbdfd787ce0e3f8cc9ead3769c
459dd49be121237cc303afd91332f3a7ff6d6933
refs/heads/master
<repo_name>580107030011/PreProject<file_sep>/Pre_project/Pre_project/Controllers/RegisterController.cs using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using Pre_project.Models; using System.Data.Entity.Validation; using System.IO; namespace PreProject.Controllers { public class RegisterController : Controller { private PreprojectEntities db = new PreprojectEntities(); // GET: Register public ActionResult Register() { return View(); } public ActionResult Tutor_Register() { return View(); } [HttpPost] [ValidateAntiForgeryToken] public ActionResult Tutor_Register(HttpPostedFileBase file) { Tutor tutor = new Tutor (); string username = Request.Form["Username"]; tutor.Username = username; tutor.Password = Request.Form["<PASSWORD>"]; tutor.Tutor_name = Request.Form["Tutor_name"]; tutor.Tutor_lastname = Request.Form["Tutor_lastname"]; tutor.Sex = Request.Form["Sex"]; tutor.Tel = Request.Form["Tel"]; tutor.Email = Request.Form["Email"]; tutor.Education = Request.Form["Education"]; tutor.Price = Request.Form["Price"]; tutor.Image = Request.Form["Image"]; if (ModelState.IsValid) { if (username != null) { var check2_Tutor = db.Tutors.Where(a => a.Username.Equals(username)).FirstOrDefault<Tutor>(); if (check2_Tutor != null) { ViewBag.Message = "Sorry, this account name has already been used. Please try again."; return View(); } else { try { if (file != null) { if(file.ContentType != "image/jpeg" && file.ContentType != "image/png") { ViewBag.Message1 = "XXXXX"; return View(); } string ImageName = Path.GetFileName(file.FileName); if (file.ContentType == "image/jpeg") { ImageName = username + ".jpg"; } else if(file.ContentType == "image/png") { ImageName = username + ".png"; } string path = Server.MapPath("~/Uploadfiles/" + ImageName); file.SaveAs(path); tutor.Image = "~/Uploadfiles/" + ImageName; } db.Tutors.Add(tutor); db.SaveChanges(); return RedirectToAction("Login", "Login"); } catch (DbEntityValidationException ex) { var errorMessages = ex.EntityValidationErrors.SelectMany(x => x.ValidationErrors).Select(x => x.ErrorMessage); var fullErrorMessage = string.Join("; ", errorMessages); var exceptionMessage = string.Concat(ex.Message, " The validation errors are: ", fullErrorMessage); throw new DbEntityValidationException(exceptionMessage, ex.EntityValidationErrors); } } } } return RedirectToAction("Index", "Index"); } public ActionResult Student_Register() { return View(); } [HttpPost] [ValidateAntiForgeryToken] public ActionResult Student_Register(Student student) { string username = Request.Form["Username"]; student.Username = username; student.Password = Request.Form["<PASSWORD>"]; student.Student_name = Request.Form["Student_name"]; student.Student_lastname = Request.Form["Student_lastname"]; student.Sex = Request.Form["Sex"]; student.School = Request.Form["School"]; student.Grade = Request.Form["Grade"]; student.Email = Request.Form["Email"]; if (ModelState.IsValid) { if (username != null) { var check_student = db.Students.Where(a => a.Username.Equals(username)).FirstOrDefault<Student>(); if (check_student != null) { ViewBag.Message = "Sorry, this account name has already been used. Please try again."; return View(); } else { try { db.Students.Add(student); db.SaveChanges(); return RedirectToAction("Login", "Login"); } catch (DbEntityValidationException ex) { var errorMessages = ex.EntityValidationErrors.SelectMany(x => x.ValidationErrors).Select(x => x.ErrorMessage); var fullErrorMessage = string.Join("; ", errorMessages); var exceptionMessage = string.Concat(ex.Message, " The validation errors are: ", fullErrorMessage); throw new DbEntityValidationException(exceptionMessage, ex.EntityValidationErrors); } } } } return RedirectToAction("Index", "Index"); } } } <file_sep>/Pre_project/Pre_project/Controllers/FindFacultyController.cs using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using Pre_project.Models; using System.Data.Entity.Validation; namespace Pre_project.Controllers { public class FindFacultyController : Controller { private PreprojectEntities db = new PreprojectEntities(); public ActionResult Faculty() { return View(); } public ActionResult Social() { int id = 1; string query = "SELECT * FROM Type_fac WHERE Type_no ="+id; var type = db.Database.SqlQuery<Type_fac>(query).SingleOrDefault(); string query2 = "SELECT * FROM Faculty WHERE Type_no =" + id; var fac = db.Database.SqlQuery<Faculty>(query2).ToList(); return View(Tuple.Create(type,fac)); } public ActionResult Art() { int id = 2; string query = "SELECT * FROM Type_fac WHERE Type_no =" + id; var type = db.Database.SqlQuery<Type_fac>(query).SingleOrDefault(); string query2 = "SELECT * FROM Faculty WHERE Type_no =" + id; var fac = db.Database.SqlQuery<Faculty>(query2).ToList(); return View(Tuple.Create(type, fac)); } public ActionResult Tech() { int id = 3; string query = "SELECT * FROM Type_fac WHERE Type_no =" + id; var type = db.Database.SqlQuery<Type_fac>(query).SingleOrDefault(); string query2 = "SELECT * FROM Faculty WHERE Type_no =" + id; var fac = db.Database.SqlQuery<Faculty>(query2).ToList(); return View(Tuple.Create(type, fac)); } public ActionResult Doc() { int id = 4; string query = "SELECT * FROM Type_fac WHERE Type_no =" + id; var type = db.Database.SqlQuery<Type_fac>(query).SingleOrDefault(); string query2 = "SELECT * FROM Faculty WHERE Type_no =" + id; var fac = db.Database.SqlQuery<Faculty>(query2).ToList(); return View(Tuple.Create(type, fac)); } public ActionResult Nature() { int id = 5; string query = "SELECT * FROM Type_fac WHERE Type_no =" + id; var type = db.Database.SqlQuery<Type_fac>(query).SingleOrDefault(); string query2 = "SELECT * FROM Faculty WHERE Type_no =" + id; var fac = db.Database.SqlQuery<Faculty>(query2).ToList(); return View(Tuple.Create(type, fac)); } } }<file_sep>/Pre_project/Pre_project/Controllers/StudentController.cs using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using Pre_project.Models; using System.Data.Entity.Validation; namespace PreProject.Controllers { public class StudentController : Controller { private PreprojectEntities db = new PreprojectEntities(); // GET: Student public ActionResult FindYourself() { return View(); } public ActionResult Test() { string query = "SELECT * FROM Quiz"; var quiz = db.Database.SqlQuery<Quiz>(query).ToList(); string query2 = "SELECT * FROM Answer"; var answer = db.Database.SqlQuery<Answer>(query2).ToList(); return View(Tuple.Create(quiz, answer)); } public ActionResult CalculateTest() { var result = Request.Form; int type1 = 0; int type2 = 0; int type3 = 0; int type4 = 0; int type5 = 0; for (int i = 0; i < result.Count; i++) { var value = result[i]; if (value == "1") { type1++; } else if (value == "2") { type2++; } else if (value == "3") { type3++; } else if (value == "4") { type4++; } else if (value == "5") { type5++; } } int[] arr = new[] { type1, type2, type3, type4, type5 }; int max = arr.Max(); int index = Array.LastIndexOf(arr, max); return RedirectToAction("Result","Student",new { type_no = index+1 }); } public ActionResult Result(int type_no) { Result result = new Result(); result.Student = Request.Cookies["C_Username"].Value; result.R_date = DateTime.Now; result.Type_no = type_no; db.Results.Add(result); db.SaveChanges(); string query = "SELECT * FROM Type_Fac WHERE Type_no = " + type_no; var type = db.Database.SqlQuery<Type_fac>(query).SingleOrDefault(); string query2 = "SELECT * FROM Faculty WHERE Type_no = " + type_no; var fac = db.Database.SqlQuery<Faculty>(query2).ToList(); return View(Tuple.Create(type, fac)); } public ActionResult FindTutor() { return View(); } [HttpPost] [ValidateAntiForgeryToken] public ActionResult FindTutor(string subject, string price, string sex) { if (subject == " " && price == " " && sex == " ") { string query = "SELECT * FROM Post_Tutor"; return RedirectToAction("SeeTutor", "Student", new { query = query }); } else if (subject == " " && price == " ") { sex = "'%" + sex + "%'"; string query = "SELECT * FROM Post_Tutor WHERE Sex LIKE N"+sex; return RedirectToAction("SeeTutor", "Student", new { query = query }); } else if(subject == " " && sex == " ") { price = "'%" + price + "%'"; string query = "SELECT * FROM Post_Tutor WHERE Price LIKE N" + price; return RedirectToAction("SeeTutor", "Student", new { query = query }); } else if (price == " " && sex == " ") { subject = "'" + subject + "'"; string query = "SELECT * FROM Post_Tutor WHERE Subject LIKE N" + subject; return RedirectToAction("SeeTutor", "Student", new { query = query }); } else if (subject == " ") { price = "'%" + price + "%'"; sex = "'%" + sex + "%'"; string query = "SELECT * FROM Post_Tutor WHERE Price LIKE N" + price+ " AND Sex LIKE N" + sex; return RedirectToAction("SeeTutor", "Student", new { query = query }); } else if (price == " ") { subject = "'" + subject + "'"; sex = "'%" + sex + "%'"; string query = "SELECT * FROM Post_Tutor WHERE Subject LIKE N" + subject + " AND Sex LIKE N" + sex; return RedirectToAction("SeeTutor", "Student", new { query = query }); } else if (sex == " ") { subject = "'" + subject + "'"; price = "'%" + price + "%'"; string query = "SELECT * FROM Post_Tutor WHERE Subject LIKE N" + subject + " AND Price LIKE N" + price; return RedirectToAction("SeeTutor", "Student", new { query = query }); } else { subject = "'" + subject + "'"; price = "'%" + price + "%'"; sex = "'%" + sex + "%'"; string query = "SELECT * FROM Post_Tutor WHERE Subject LIKE N" + subject + " AND Price LIKE N" + price + " AND Sex LIKE N" + sex; return RedirectToAction("SeeTutor", "Student", new { query = query }); } return View(); } public ActionResult SeeTutor(string query) { var data = db.Database.SqlQuery<Post_Tutor>(query).ToList(); return View(data); } public ActionResult StudentReadPost(int id, string tutor) { string query = "SELECT * FROM Post_Tutor WHERE Post_no = " + id; var post = db.Database.SqlQuery<Post_Tutor>(query).SingleOrDefault(); tutor = "'" + tutor + "'"; string query2 = "SELECT * FROM Tutor WHERE Username = " + tutor; var dataTutor = db.Database.SqlQuery<Tutor>(query2).SingleOrDefault(); return View(Tuple.Create(post, dataTutor)); } public ActionResult Evaluation() { return View(); } public ActionResult ProfileStudent(string username) { string text = "'"+ username +"'"; string query = "SELECT * FROM Student WHERE Username = " + text; var profile = db.Database.SqlQuery<Student>(query).SingleOrDefault(); return View(profile); } public ActionResult EditProfile(string username) { string text = "'" + username + "'"; string query = "SELECT * FROM Student WHERE Username = " + text; var profile = db.Database.SqlQuery<Student>(query).SingleOrDefault(); return View(profile); } [HttpPost] [ValidateAntiForgeryToken] public ActionResult EditProfile(string new_firstname, string new_lastname, string new_sex, string new_school, string new_grade, string new_email) { string new_username = Request.Cookies["C_Username"].Value; var update = db.Students.Where(a => a.Username.Equals(new_username)).FirstOrDefault<Student>(); update.Student_name = new_firstname; update.Student_lastname = new_lastname; update.Sex = new_sex; update.School = new_school; update.Grade = new_grade; update.Email = new_email; try { db.SaveChanges(); } catch (DbEntityValidationException ex) { var errorMessages = ex.EntityValidationErrors.SelectMany(x => x.ValidationErrors).Select(x => x.ErrorMessage); var fullErrorMessage = string.Join("; ", errorMessages); var exceptionMessage = string.Concat(ex.Message, " The validation errors are: ", fullErrorMessage); throw new DbEntityValidationException(exceptionMessage, ex.EntityValidationErrors); } return RedirectToAction("ProfileStudent", "Student", new { username = new_username }); } public ActionResult ChangePasswordS() { return View(); } // GET: ChangePassword [HttpPost] public ActionResult ChangePasswordS(string username_old, string password_old, string password_new, string Comfirm_new) { if (password_old != null) { var check_username = db.Students.Where(a => a.Username.Equals(username_old)).FirstOrDefault<Student>(); if (check_username.Password == password_old) { if (password_new == Comfirm_new) { check_username.Password = <PASSWORD>; try { db.SaveChanges(); } catch (DbEntityValidationException ex) { var errorMessages = ex.EntityValidationErrors.SelectMany(x => x.ValidationErrors).Select(x => x.ErrorMessage); var fullErrorMessage = string.Join("; ", errorMessages); var exceptionMessage = string.Concat(ex.Message, " The validation errors are: ", fullErrorMessage); throw new DbEntityValidationException(exceptionMessage, ex.EntityValidationErrors); } } else { ViewBag.Error1 = "Password don't match"; return View(); } } else { ViewBag.Error2 = "Password is wrong !!!"; return View(); } } return RedirectToAction("ProfileStudent", "Student", new { username = username_old }); } } }<file_sep>/Pre_project/Pre_project/Controllers/LoginController.cs using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using Pre_project.Models; using System.Data.Entity.Validation; namespace PreProject.Controllers { public class LoginController : Controller { private PreprojectEntities db = new PreprojectEntities(); // GET: Login public ActionResult Login() { string username = Request.Form["username"]; string password = Request.Form["password"]; if (ModelState.IsValid) { if (username != null & password != null) { var check = db.Students.Where(a => a.Username.Equals(username) && a.Password.Equals(password)).FirstOrDefault(); var check2 = db.Tutors.Where(a => a.Username.Equals(username) && a.Password.Equals(<PASSWORD>)).FirstOrDefault(); if (check != null) { var Cookie_Username = new HttpCookie("C_Username"); Cookie_Username.Value = check.Username; Response.Cookies.Add(Cookie_Username); var Cookie_Password = new HttpCookie("C_Password"); Cookie_Password.Value = <PASSWORD>; Response.Cookies.Add(Cookie_Password); var Cookie_Firstname = new HttpCookie("C_Firstname"); Cookie_Firstname.Value = check.Student_name; Response.Cookies.Add(Cookie_Firstname); var Cookie_Lastname = new HttpCookie("C_Lastname"); Cookie_Lastname.Value = check.Student_lastname; Response.Cookies.Add(Cookie_Lastname); var Cookie_Sex = new HttpCookie("C_Sex"); Cookie_Sex.Value = check.Sex; Response.Cookies.Add(Cookie_Sex); var Cookie_School = new HttpCookie("C_School"); Cookie_School.Value = check.School; Response.Cookies.Add(Cookie_School); var Cookie_Grade = new HttpCookie("C_Grade"); Cookie_Grade.Value = check.Grade; Response.Cookies.Add(Cookie_Grade); var Cookie_Email = new HttpCookie("C_Email"); Cookie_Email.Value = check.Email; Response.Cookies.Add(Cookie_Email); return RedirectToAction("FindYourself", "Student"); } if (check2 != null) { var Cookie_Username = new HttpCookie("C_Username"); Cookie_Username.Value = check2.Username; Response.Cookies.Add(Cookie_Username); var Cookie_Password = new HttpCookie("C_Password"); Cookie_Password.Value = <PASSWORD>; Response.Cookies.Add(Cookie_Password); var Cookie_Firstname = new HttpCookie("C_Firstname"); Cookie_Firstname.Value = check2.Tutor_name; Response.Cookies.Add(Cookie_Firstname); var Cookie_Lastname = new HttpCookie("C_Lastname"); Cookie_Lastname.Value = check2.Tutor_lastname; Response.Cookies.Add(Cookie_Lastname); var Cookie_Sex = new HttpCookie("C_Sex"); Cookie_Sex.Value = check2.Sex; Response.Cookies.Add(Cookie_Sex); var Cookie_Tel = new HttpCookie("C_Tel"); Cookie_Tel.Value = check2.Tel; Response.Cookies.Add(Cookie_Tel); var Cookie_Email = new HttpCookie("C_Email"); Cookie_Email.Value = check2.Email; Response.Cookies.Add(Cookie_Email); var Cookie_Education = new HttpCookie("C_Education"); Cookie_Education.Value = check2.Education; Response.Cookies.Add(Cookie_Education); var Cookie_Price = new HttpCookie("C_Price"); Cookie_Price.Value = check2.Price; Response.Cookies.Add(Cookie_Price); return RedirectToAction("MyPost", "Tutor"); } else { ViewBag.Message = "username or password is wrong"; } } } return View(); } } }<file_sep>/Pre_project/Pre_project/Controllers/IndexController.cs using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using Pre_project.Models; using System.Data.Entity.Validation; namespace PreProject.Controllers { public class IndexController : Controller { private PreprojectEntities db = new PreprojectEntities(); // GET: Index public ActionResult Index() { return View(); } } }<file_sep>/Pre_project/Pre_project/Controllers/TutorController.cs using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using Pre_project.Models; using System.Data.Entity.Validation; using System.IO; namespace Pre_project.Controllers { public class TutorController : Controller { private PreprojectEntities db = new PreprojectEntities(); public ActionResult CreatPost() { return View(); } [HttpPost] [ValidateInput(false)] public ActionResult CreatPost(string note, string description, string subject, string post_name, string username_post, string price, string sex) { Post_Tutor post = new Post_Tutor(); if (description == "") { ViewBag.alertPost = "กรุณากรอกข้อความ"; return View(); } if (username_post != null) { post.Tutor = username_post; post.Post_name = post_name; post.Description = description; post.Note = note; post.Subject = subject; post.Price = price; post.Sex = Request.Cookies["C_Sex"].Value; try { db.Post_Tutor.Add(post); db.SaveChanges(); return RedirectToAction("MyPost", "Tutor"); } catch (DbEntityValidationException ex) { var errorMessages = ex.EntityValidationErrors.SelectMany(x => x.ValidationErrors).Select(x => x.ErrorMessage); var fullErrorMessage = string.Join("; ", errorMessages); var exceptionMessage = string.Concat(ex.Message, " The validation errors are: ", fullErrorMessage); throw new DbEntityValidationException(exceptionMessage, ex.EntityValidationErrors); } } return View(); } public ActionResult MyPost() { string username = Request.Cookies["C_Username"].Value; string text = "'" + username + "'"; string query = "SELECT * FROM Post_Tutor WHERE Tutor = " + text; var mypost = db.Database.SqlQuery<Post_Tutor>(query).ToList(); return View(mypost); } public ActionResult ReadPost(int id) { string query = "SELECT * FROM Post_Tutor WHERE Post_no = " + id; var post = db.Database.SqlQuery<Post_Tutor>(query).SingleOrDefault(); return View(post); } public ActionResult ProfileTutor(string username) { string text = "'" + username + "'"; string query = "SELECT * FROM Tutor WHERE Username = " + text; var profile = db.Database.SqlQuery<Tutor>(query).SingleOrDefault(); return View(profile); } public ActionResult EditProfile(string username) { string text = "'" + username + "'"; string query = "SELECT * FROM Tutor WHERE Username = " + text; var profile = db.Database.SqlQuery<Tutor>(query).SingleOrDefault(); return View(profile); } [HttpPost] [ValidateAntiForgeryToken] public ActionResult EditProfile(HttpPostedFileBase file,string new_firstname, string new_lastname, string new_sex, string new_tel, string new_email, string new_education, string new_price, string new_image) { string new_username = Request.Cookies["C_Username"].Value; Tutor update = db.Tutors.Where(a => a.Username.Equals(new_username)).FirstOrDefault(); update.Username = Request.Cookies["C_Username"].Value; update.Tutor_name = new_firstname; update.Tutor_lastname = new_lastname; update.Sex = new_sex; update.Tel = new_tel; update.Email = new_email; update.Education = new_education; update.Price = new_price; if (file != null) { if (file.ContentType != "image/jpeg" && file.ContentType != "image/png") { ViewBag.Message1 = "XXXXX"; return View(); } string ImageName = Path.GetFileName(file.FileName); if (file.ContentType == "image/jpeg") { ImageName = new_username + ".jpg"; } else if (file.ContentType == "image/png") { ImageName = new_username + ".png"; } string path = Server.MapPath("~/Uploadfiles/" + ImageName); file.SaveAs(path); update.Image = "~/Uploadfiles/" + ImageName; } if (update != null) { db.SaveChanges(); } return RedirectToAction("ProfileStudent", "Student", new { username = new_username }); } public ActionResult ChangePasswordT() { return View(); } // GET: ChangePassword [HttpPost] public ActionResult ChangePasswordT(string username_old, string password_old, string password_new, string Comfirm_new) { if (password_old != null) { var check_username = db.Tutors.Where(a => a.Username.Equals(username_old)).FirstOrDefault<Tutor>(); if (check_username.Password == password_old) { if (password_new == Comfirm_new) { check_username.Password = <PASSWORD>; try { db.SaveChanges(); } catch (DbEntityValidationException ex) { var errorMessages = ex.EntityValidationErrors.SelectMany(x => x.ValidationErrors).Select(x => x.ErrorMessage); var fullErrorMessage = string.Join("; ", errorMessages); var exceptionMessage = string.Concat(ex.Message, " The validation errors are: ", fullErrorMessage); throw new DbEntityValidationException(exceptionMessage, ex.EntityValidationErrors); } } else { ViewBag.Error1 = "Password don't match"; return View(); } } else { ViewBag.Error2 = "Password is wrong !!!"; return View(); } } return RedirectToAction("ProfileTutor", "Tutor", new { username = username_old }); } } }
5ba3f87f98779ceef7d026004a760de637ec1c23
[ "C#" ]
6
C#
580107030011/PreProject
0391df4be0ffdeeb327d9ae89daa944e7816aa65
a2df7dc5bd815c6addffcc50ea4b10c56cb94249
refs/heads/master
<file_sep>package PathPlanning; import java.util.*; /** * * @author <NAME> * Student number: 14820340 * */ public abstract class AStarAlgorithm<Node> { @SuppressWarnings("rawtypes") private class Path implements Comparable{ public Node point; public Double f; public Double g; public Path parent; public Path(){ parent = null; point = null; g = 0.0; f = 0.0; } public Path(Path p){ parent = p; g = p.g; f = p.f; } public Node getPoint(){ return point; } public void setPoint(Node point){ this.point = point; } @SuppressWarnings("unchecked") public int compareTo(Object o){ Path p = (Path)o; if(f - p.f < 0){ return -1; } else if (f - p.f == 0){ return 0; } else if (f - p.f > 0){ return 1; } return 0; } } private Double lastCost = 0.0; private PriorityQueue<Path> paths = new PriorityQueue<Path>(); private HashMap<Node, Double> mindists = new HashMap<Node, Double>(); protected abstract boolean goal(Node node); protected abstract Double g(Node from, Node to); protected abstract Double heuristic(Node from, Node to); protected abstract List<Node> grabNodes(Node node); /*Calculates the cost to get to the node*/ protected Double f(Path p, Node from, Node to){ Double g; if(p.parent != null){ g = g(from, to) + p.parent.g; }else{ g = g(from, to) + 0.0; } Double h = heuristic(from, to); p.g = g; p.f = g + h; return p.f; } private void expandPath(Path path){ Node p = path.getPoint(); Double min = mindists.get(p); /* * If a better path passing for this point already exists then * don't expand it. */ if(min == null || min > path.f) mindists.put(path.getPoint(), path.f); else return; List<Node> successes = grabNodes(p); for(Node to : successes){ Path newPath = new Path(path); newPath.setPoint(to); f(newPath, path.getPoint(), to); paths.offer(newPath); } } public Double getCost(){ return lastCost; } public List<Node> calculate(Node start){ try{ Path root = new Path(); root.setPoint(start); /*Do I need this? This is if the start node has a cost*/ f(root, start, start); expandPath(root); for(;;){ Path p = paths.poll(); if(p == null){ lastCost = Double.MAX_VALUE; return null; } Node lastPoint = p.getPoint(); lastCost = p.g; /*if at the goal it will add each node it went through to a list of the path to take*/ if(goal(lastPoint)){ LinkedList<Node> finalPath = new LinkedList<Node>(); for(Path i = p; i != null; i = i.parent){ finalPath.addFirst(i.getPoint()); } return finalPath; } expandPath(p); } } catch(Exception e){ e.printStackTrace(); } return null; } }
7b195fa3701de423bafa25f26af9c4db173c5875
[ "Java" ]
1
Java
KaiReptilon/MComp-Project-1
792f8e2b59cab4c00b482e719c7d6c4e4cabfa8b
e2efe85ceeac06ef3916e9beb93aa7599c5d7de8
refs/heads/master
<file_sep>/* Author: <NAME> */ var answers = document.getElementsByName("choices"); function refresh() { var styleElem = document.head.appendChild(document.createElement("style")); document.getElementById('questions').innerHTML = questions_arr[question_number-1]; for (var i=1; i<=4;i++) { document.getElementById('answer'+i).innerHTML = getAnswerChoices(); document.getElementById('ans'+i).checked = false; } styleElem.innerHTML = "h3 {text-align: center;}"; } var questions_arr = ["Which age range best describes you?", "What is your preferred method of listening to music?", "What is your favorite flow style?", "What's your favorite movie genre?", "What do you like to do on the weekends?", "How do you feel about the presence of social issues in music?", "What's your take on autotune?", "Who's one of your favorite mainstream artists?", "Pick a region that best describes your taste:", "Which do you prefer: hooks or choruses?"]; var answers_arr = [ ["0-17", "Earbuds", "Fast", "Action", "Go out and party", "I'm with it", "T-Pain is my idol", "J. Cole", "East Coast", "Hooks make the song"], ["18-24", "Over-the-ear headphones", "Medium paced", "Horror", "Chill with some friends", "Keep it out of music", "I think it's great", "Drake", "West Coast", "Little to no hooks. I like it old school."], ["25-39", "Small speakers", "Smooth and slow", "Comedy", "Stay home alone", "As long as it isn't overbearing, it's good", "Not a huge fan", "<NAME>", "Southern Rap", "Both"], ["40+", "Huge speakers", "Other/Unique", "Thriller", "Play video games", "Not a huge fan, but I'll listen sometimes", "I hate autotune. I'm old", "I don't listen to mainstream artists", "Area doesn't matter", "Neither. I like production."] ]; var question_number = 1; var idx = 0; var first_question = false; function getAnswerChoices() { if (first_question) { idx++ }; while (idx < answers.length) { switch (answers[idx].value) { case 'a': first_question = true; return answers_arr[idx][question_number - 1]; case 'b': return answers_arr[idx][question_number - 1]; case 'c': return answers_arr[idx][question_number - 1]; case 'd': first_question = false; return answers_arr[idx][question_number - 1]; default: return; } } } // List of all possible artist results var amir_obe = 0 var sir = 0 var jalen_santoy = 0; var the88glam = 0; var derek_pope = 0; var cunninlynguists = 0; var joyner_lucas = 0 var royce_da_59 = 0 var flatbush_zombies = 0; var kirk_knight = 0; var nyck_caution = 0; function submitAnswer() { var i = 0, options = answers.length; var checked = false; var user_choice; for (; i < options; i++) { if (answers[i].checked) { user_choice = answers[i].value; checked = true; } } if (!checked) { // alert("answer not checked"); return; } switch (user_choice) { case 'a': incrAArtists(); break; case 'b': incrBArtists(); break; case 'c': incrCArtists(); break; case 'd': incrDArtists(); break; default: return; } question_number++; idx = 0; refresh(); if (question_number==10) { document.getElementById('button').innerHTML="Submit Quiz"; } if (question_number>10) { getResults(); } } function getResults() { removeElements(); document.getElementById('reset').style.display='block'; document.getElementById('wrapper').style.display='block'; var styleElem = document.head.appendChild(document.createElement("style")); styleElem.innerHTML = "#reset {margin: 0 auto;}"; var artist_picks = {"<NAME>": amir_obe, "SiR": sir, "<NAME>": jalen_santoy, "<NAME>": derek_pope, "88GLAM": the88glam, "Cunninlynguists": cunninlynguists, "<NAME>": joyner_lucas, "Royce da 5'9\"": royce_da_59, "Flatbush Zombies": flatbush_zombies, "Kirk Knight": kirk_knight, "Nyck Caution": nyck_caution}; var artist_perc = [amir_obe,sir,jalen_santoy,derek_pope,the88glam, cunninlynguists,joyner_lucas,flatbush_zombies,kirk_knight,nyck_caution]; var keysSorted = Object.keys(artist_picks).sort(function(a,b){return artist_picks[b]-artist_picks[a]}); artist_perc.sort(function(a,b){return b-a}); displayCustomResults(keysSorted, artist_perc); } function displayCustomResults(sortedArtists, sortedPercentage) { for(var i=0; i<4; i++) { var styleElem = document.head.appendChild(document.createElement("style")); styleElem.innerHTML = ".line::after {content: '';} .line" + (i+1) + "::after {max-width: " + sortedPercentage[i] + "0%;}"; document.getElementById('perc' + (i+1)).style.display = 'block'; document.getElementById('perc' + (i+1)).innerHTML = sortedPercentage[i] + "0%"; document.getElementById('result'+(i+1)).innerHTML = sortedArtists[i]; } document.getElementById(''+sortedArtists[0]+'').style.display = 'block'; } function removeElements() { document.getElementById('button').style.display = "none"; for (var i=1; i<=4;i++) { document.getElementById('answer'+i).style.display = "none"; document.getElementById('ans'+i).style.display = "none"; } document.getElementById('questions').style.display = "none"; document.getElementById('form').style.display = "none"; } function incrAArtists() { switch (question_number) { case 1: derek_pope++, the88glam++ , nyck_caution++; break; case 2: sir++, derek_pope++, cunninlynguists++; break; case 3: joyner_lucas++, nyck_caution++, flatbush_zombies++; break; case 4: nyck_caution++, royce_da_59++, jalen_santoy++; break; case 5: flatbush_zombies++, the88glam++, jalen_santoy++; break; case 6: cunninlynguists++, joyner_lucas++, royce_da_59++; break; case 7: amir_obe++, derek_pope++, the88glam++; break; case 8: jalen_santoy++, sir++, royce_da_59++; break; case 9: joyner_lucas++, flatbush_zombies++, kirk_knight++, nyck_caution++; break; case 10: sir++, kirk_knight++; break; default: return; } } function incrBArtists() { switch (question_number) { case 1: flatbush_zombies++, kirk_knight++, nyck_caution++; break; case 2: the88glam++, royce_da_59++, nyck_caution++; break; case 3: kirk_knight++, cunninlynguists++, royce_da_59++; break; case 4: amir_obe++, derek_pope++, joyner_lucas++; break; case 5: amir_obe++, sir++; break; case 6: the88glam++, sir++, amir_obe++; break; case 7: flatbush_zombies++, kirk_knight++; break; case 8: amir_obe++, derek_pope++, the88glam++; break; case 9: derek_pope++, amir_obe++, sir++; break; case 10: cunninlynguists++, royce_da_59++, nyck_caution++; break; default: return; } } function incrCArtists() { switch (question_number) { case 1: jalen_santoy++, sir++; break; case 2: jalen_santoy++, joyner_lucas++, kirk_knight++; break; case 3: sir++, the88glam++; break; case 4: cunninlynguists++, flatbush_zombies++; break; case 5: amir_obe++, sir++; break; case 6: jalen_santoy++, flatbush_zombies++, kirk_knight++; break; case 7: sir++, nyck_caution++, jalen_santoy++, joyner_lucas++; break; case 8: flatbush_zombies++, joyner_lucas++; break; case 9: jalen_santoy++, cunninlynguists++; break; case 10: flatbush_zombies++, joyner_lucas++, jalen_santoy++; break; default: return; } } function incrDArtists() { switch (question_number) { case 1: cunninlynguists++, joyner_lucas++, royce_da_59++; break; case 2: amir_obe++, flatbush_zombies++; break; case 3: sir++, the88glam++; break; case 4: amir_obe++, derek_pope++, jalen_santoy++; break; case 5: derek_pope++, joyner_lucas++, nyck_caution++; break; case 6: derek_pope++, nyck_caution++; break; case 7: cunninlynguists++, royce_da_59++; break; case 8: cunninlynguists++, kirk_knight++, nyck_caution++; break; case 9: the88glam++, royce_da_59++; break; case 10: amir_obe++, derek_pope++, the88glam++; break; default: return; } }<file_sep># Underrated Artists Quiz Take this quiz and find an underrated hip hop/r&b artist that you've (possibly) never heard of before! ## A Quick Biography Today, it's easy for people to think of hip hop from the top artists. Ranging from seriously emotional verses to eerie autotuned pitches, take this quiz to discover lesser-known, quality music: <a href="https://austinbhale.github.io/Underrated-Artists-Quiz/" target="_blank">Underrated Artists Quiz</a>.
e692fce649a7d4511cccc41d31712a5e2bd98675
[ "JavaScript", "Markdown" ]
2
JavaScript
z-bergeron/Underrated-Artists-Quiz
82ed42af2d28c09160d41b223af89556f5f94d4b
66934f028f498bde178e9cfc4f79c5a09613fcb2
refs/heads/master
<file_sep>""" Create a list of patients with medical parameters given by the class Sensor. """ import zlib import base64 import json import pandas as pd from simulate_sensor import Sensor def initiate_patient(ID): """Starts thread of patient with sensor Id ID in the specified time interval. """ patient = Sensor(ID) patient.start() return patient def stop_patient(patient): """Stops thread of patient with sensor Id ID in the specified time interval. """ # TODO: get it by id? patient.stop() def initiate_patient_list(listID): """Starts thread of patients with sensor Id ID in the list. Returns dictionary of patients. """ patient_dict = {} for ID in listID: patient_dict[ID] = initiate_patient(ID) return patient_dict def stop_patient_list(patient_dict): """Stops thread of patient with sensor Id ID in the list. """ for ID in patient_dict: stop_patient(patient_dict[ID]) def get_one_patient(ID): """Get the parameter of the patient with sensor Id ID in the specified time interval. If no time interval is given it returns the patient's parameters that istant. """ # Create istance of type Sensor in the given time interval # (if specified) try: readable_sensor = pd.read_csv(ID+'.csv') readable_sensor.set_index('ID') # os.remove(ID+'.csv') except FileNotFoundError: print('No file found for patient no. '+ID) return readable_sensor def get_patient_list(IDs): """ Returns the parameters for a list of patients in base64. """ # TODO: read only data in a specified time interval # TODO: remove csv file once it is read readable_sensors = pd.DataFrame() # Create some test data for our catalog in the form of a pandas df for i, ID in enumerate(IDs): sensor = get_one_patient(ID) readable_sensors = pd.concat([readable_sensors, sensor]) # Set the ID of the sensor as index of the DataFrame readable_sensors.reset_index(drop=True, inplace=True) data_json = pd.DataFrame.to_json(readable_sensors) # Encode it as bytes object enc = data_json.encode() # utf-8 by default # Compress comp = zlib.compress(enc) # Shrink to base64 return base64.b64encode(comp) def check_patient(patient_list, ID): """Check the parameters of a patient in patient_list with sensor ID""" for patient in patient_list: if patient.ID == ID: return patient else: return 'ID {} not found'.format(ID) <file_sep>""" Class to simulate a patient's medical parameters in a given timne interval. """ # import bluetooth import numpy as np import pandas as pd import random import threading import time import datetime import json import os # TODO: Add details of the hospitals in which the patient using the # sensor can be found. class Sensor(threading.Thread): """Class to simulate a sensor""" def __init__(self, ID, s=1): """The constructor returns the istant patient's parameter. Randomly draw the patient parameters from a normal distribution in the time interval of length s seconds. Input: ------ s: a time interval in seconds, default = 1 Output: ------- object of type Sensors with parameters ID (sensor Id), HB (heart beat [bpm]), O2 (Oxigen concentration [%]), Pd (diastolic pressure [mmHg]), Ps (systolic pressure [mmHg]), T (temperature [C]) """ # TODO: I guess these will be read via BLE? threading.Thread.__init__(self) self.ID = ID self.HB = list(np.random.normal(80, 0.5, s)) self.O2 = list(np.random.normal(97, 0.1, s)) self.Pd = list(np.random.normal(80, 0.3, s)) self.Ps = list(np.random.normal(120, 0.1, s)) self.T = list(np.random.normal(36.5, 0.1, s)) self.readable = pd.DataFrame() self._is_running = True # self.start() def get_patient(self, T, interval=False): """Get the parameter of the patient with sensor Id ID in the specified time interval. If no time interval is given it returns the patient's parameters that istant. """ dict_sensor = dict([('ID', self.ID), ('Time', T), ('HB [bpm]', self.HB), ('O2 levels [%]', self.O2), ('P systolic', self.Ps), ('P diastolic', self.Pd), ('T [C]', self.T)]) db_sensor = pd.DataFrame(dict_sensor) db_sensor = db_sensor.set_index('Time') self.readable = pd.concat([self.readable, db_sensor]) def save_data(self): """Save the dictionary of data to csv. Every time the data is saved clear self.readable """ # TODO: send to API instead of saving it to csv if os.path.isfile(self.ID+'.csv'): self.readable.to_csv(self.ID+'.csv', mode='a', header=False) else: self.readable.to_csv(self.ID+'.csv', mode='a') self.readable = pd.DataFrame() def run(self, save=True): """Return a new set of data every 10 second, dump them in a .csv file every minute if save = True """ countdown = 0 while(self._is_running): time.sleep(5.) countdown += 1 self.get_patient(time.strftime("%H:%M:%S", time.localtime())) if countdown%12 == 0 and save: self.save_data() def stop(self): """Stops the thread by changing _is_running value to False """ self._is_running = False print('Patent '+self.ID+' thread stopped') <file_sep> # Simulation of a medical sensor __Step 1: simulate the medical sensor__ In the first step we can simulate a medical sensor reading a patient's parameter using the class Sensor in simulate_sensor.py. Every Sensor object can be started as a thread and once started will produce data every 5 seconds and save it to .csv every minute. The script patients.py has functions to start threads given a patient ID and to save all the data from the IDs in a pandas dataframe. TODO: Write a method in Sensor that pushes the data to the API instead of saving it in .csv __Step 3: simulate the API__ Simulate a API with simulate_API.py to obtain the data from Step 1 and start/stop different patient threads. TODO: Make sure that the data is deleted every time it is sent to the API. __Step 4: simulate a request for the API__ Using simulate_request.py we can retrieve the data from the API and save them in a csv file. __To test the sensor class:__ Check the jupyter notebook DEMO __To test the API:__ Download the mock data and in the same directory run: $ python3 simulte_API.py 1 2 3 and in another terminal: $ python3 simulate_request.py <file_sep>""" Simulates an API to get medical data of a list of patients in a time interval. """ import flask import json import sys import pandas as pd from datetime import datetime, timedelta from flask import request, jsonify from simulate_sensor import Sensor from patients import get_patient_list # Read in the IDs list IDs = sys.argv[1:] app = flask.Flask(__name__) app.config["DEBUG"] = True # TODO: Get the data for the patients in IDs in # a given time frame interval readable_sensor = get_patient_list(IDs) data_json = pd.DataFrame.to_json(readable_sensor) @app.route('/', methods=['GET']) def home(): return '''<h1>Cosinuss Sensor</h1> <p>A prototype API to recieve and downloar Cosinuss sensor data.</p>''' # A route to return all of the available entries in our catalog. @app.route('/api/data', methods=['GET']) def api_all(): return jsonify(data_json) app.run() <file_sep>""" Simulates an API request. The user needs to run simulate_API.py before to simulate the webpage. """ import base64 import zlib import pandas as pd import requests from flask import jsonify import json # Read byte content coded = requests.get('http://127.0.0.1:5000/api/data') # Decode from base64 res = base64.b64decode(coded.text) # Decompress bytes res_exp = zlib.decompress(res) # Decode to utf-8 res_str = res_exp.decode() # Convert to json r = json.loads(res_str) df = pd.DataFrame(r) # Save as csv df = df.set_index('ID') df = df.sort_index() df.to_csv("Patient.csv")
f2428c768ef7a8ab520ebbd268fa809ded13d252
[ "Markdown", "Python" ]
5
Python
blinda/medical-sensor
2be545335e0eca9db4256be6a4aae3d5079e7e2d
3a3d3982588b975e9a8d475460f1cdedfc2a6638
refs/heads/master
<file_sep>import { Formatter } from '@ribajs/core'; /** * Formats the price based on the shop's HTML with currency setting (if the format is not overwritten by passing a format parameter). * @see https://help.shopify.com/en/themes/liquid/filters/money-filters */ export declare const moneyWithCurrencyFormatter: Formatter; <file_sep>import { Pjax, Prefetch } from '../services'; import { Binding, Binder, EventDispatcher, Utils } from '@ribajs/core'; export interface RouteOptions { url: string; viewId: string; removeAfterActivation: boolean; newTab: boolean; } export interface CustomData { prefetch: Prefetch; dispatcher?: EventDispatcher; options: RouteOptions; checkURL(this: Binding, urlToCheck?: string): boolean; onClick(this: Binding, event: Event): void; onNewPageReady(this: Binding): void; onLinkEnter(this: Binding, event: Event): void; } /** * Open link with pajax if the route is not the active route */ export const routeBinder: Binder<string> = { name: 'route', bind(this: Binding, el: HTMLUnknownElement) { this.customData = <CustomData> { prefetch: new Prefetch(), dispatcher: undefined, options: { removeAfterActivation: false, newTab: false, } as RouteOptions, checkURL(this: Binding, urlToCheck?: string) { if (urlToCheck && Utils.onRoute(urlToCheck)) { return true; } return false; }, onClick(this: Binding, event: Event) { // Do not go to ref without pajax event.preventDefault(); if (Utils.onRoute(this.customData.options.url)) { console.debug('already on this site'); } else { if (this.customData.options.url) { const pjax = Pjax.getInstance(this.customData.options.viewId); pjax.goTo(this.customData.options.url, this.customData.options.newTab); } } if (this.customData.options.removeAfterActivation && this.el && this.el.parentNode) { // this.unbind(); TODO? this.el.parentNode.removeChild(this.el); } }, onNewPageReady(this: Binding) { this.customData.checkURL.call(this, this.customData.options.url); }, onLinkEnter(this: Binding, event: Event) { (this.customData as CustomData).prefetch.onLinkEnter(event, this.customData.options.url); }, }; }, routine(this: Binding, el: HTMLElement, optionsOrUrl?: string | RouteOptions) { if (Utils.isString(optionsOrUrl)) { this.customData.options.url = optionsOrUrl as string; } else if (Utils.isObject(optionsOrUrl as RouteOptions)) { this.customData.options = optionsOrUrl as RouteOptions; } this.customData.options.viewId = this.customData.options.viewId || 'main'; this.customData.options.removeAfterActivation = Utils.isBoolean(this.customData.options.removeAfterActivation) ? this.customData.options.removeAfterActivation : false; this.customData.dispatcher = new EventDispatcher(this.customData.options.viewId); this.customData.options.newTab = false; const isAnkerHTMLElement = el.tagName === 'A'; if (!this.customData.options.url && isAnkerHTMLElement) { const url = el.getAttribute('href'); if (url) { this.customData.options.url = url; } } if (el.getAttribute('target') === '_blank') { this.customData.options.newTab = true; } const location = Utils.getLocation(); const host = location.protocol + '//' + location.hostname; // normalize url if (this.customData.options.url && Utils.isAbsoluteUrl(this.customData.options.url)) { // if is an internal link if (Utils.isInternalUrl(this.customData.options.url)) { // get relative url this.customData.options.url = this.customData.options.url.replace(host, ''); } else { this.customData.options.newTab = true; } } // set href if not set if (isAnkerHTMLElement && !el.getAttribute('href') && this.customData.options.url) { el.setAttribute('href', this.customData.options.url); } this.customData.dispatcher.on('newPageReady', this.customData.onNewPageReady.bind(this)); el.addEventListener('click', this.customData.onClick.bind(this)); if (!this.customData.options.newTab && !Utils.onRoute(this.customData.options.url)) { el.addEventListener('mouseover', this.customData.onLinkEnter.bind(this)); el.addEventListener('touchstart', this.customData.onLinkEnter.bind(this)); } this.customData.checkURL.call(this, this.customData.options.url); }, unbind(this: Binding, el: HTMLUnknownElement) { el.removeEventListener('mouseover', this.customData.onLinkEnter); el.removeEventListener('touchstart', this.customData.onLinkEnter); el.removeEventListener('click', this.customData.onClick); }, }; <file_sep>import { Utils } from '../../services/utils'; export const pluralizeFormatter = { name: 'pluralize', read(input: any, singular: string, plural: string) { if (plural === null) { plural = singular + 's'; } if (Utils.isArray(input)) { input = input.length; } if (input === 1) { return singular; } else { return plural; } }, }; <file_sep>import { Component, Binder } from '@ribajs/core'; import { JQuery as $ } from '@ribajs/jquery'; import { DropdownService } from '../../services/dropdown.service'; export class DropdownComponent extends Component { public static tagName: string = 'bs4-dropdown'; protected scope: any = { toggle: this.toggle, }; protected dropdownService: DropdownService; static get observedAttributes() { return []; } constructor(element?: HTMLElement) { super(element); const $el = $(this.el); this.dropdownService = new DropdownService($el.find('.dropdown-toggle')[0] as HTMLButtonElement); this.init(DropdownComponent.observedAttributes); } public toggle(context: Binder<any>, event: Event) { event.preventDefault(); event.stopPropagation(); return this.dropdownService.toggle(); } protected template() { return null; } } <file_sep>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const core_1 = require("@ribajs/core"); class ShopifySectionComponent extends core_1.Component { constructor(element) { super(element); this.initEventListeners(); } initEventListeners() { this.el.addEventListener('shopify:section:load', this.onSectionLoad); this.el.addEventListener('shopify:section:unload', this.onSectionUnload); this.el.addEventListener('shopify:section:select', this.onSectionSelect); this.el.addEventListener('shopify:section:deselect', this.onSectionDeselect); this.el.addEventListener('shopify:section:reorder', this.onSectionReorder); this.el.addEventListener('shopify:block:select', this.onBlockSelect); this.el.addEventListener('shopify:block:deselect', this.onBlockDeselect); } /** * A section has been added or re-rendered. * Re-execute any JavaScript needed for the section to work and display properly (as if the page had just been loaded). */ onSectionLoad(event) { console.debug('onSectionLoad', event); } onSectionUnload(event) { console.debug('onSectionUnload', event); } onSectionSelect(event) { console.debug('onSectionSelect', event); } onSectionDeselect(event) { console.debug('onSectionDeselect', event); } onSectionReorder(event) { console.debug('onSectionReorder', event); } onBlockSelect(event) { console.debug('onBlockSelect', event); } onBlockDeselect(event) { console.debug('onBlockDeselect', event); } } exports.ShopifySectionComponent = ShopifySectionComponent; ShopifySectionComponent.tagName = 'shopify-section'; <file_sep>import { Component } from '../component'; import { ComponentWrapper } from './component.wrapper'; import { Type } from '../interfaces/type'; export interface Components { [name: string]: Type<Component> | ComponentWrapper; } <file_sep>import { Utils } from '../../services/utils'; /** * Check if value is a string */ export const isStringFormatter = { name: 'isString', read(str: string) { return Utils.isString(str); }, }; <file_sep>import { RibaModule } from '@ribajs/core'; // export * from './binders'; // export * from './interfaces'; export * from './services'; // import { binders } from './binders'; import * as services from './services'; // import * as components from './components'; export const shopifyTDAModule = <RibaModule> { binders: {}, services, formatters: {}, components: {}, }; export default shopifyTDAModule; <file_sep>import { Formatter } from '../../interfaces/formatter'; import { Utils } from '../../services/utils'; /** * Check if value is undefined */ export const isUndefinedFormatter: Formatter = { name: 'isUndefined', read: Utils.isUndefined, }; <file_sep>import Popper from 'popper.js'; // /dist/umd/popper import { JQuery as $ } from '@ribajs/jquery'; import { Utils } from './utils.service'; /** * -------------------------------------------------------------------------- * Bootstrap (v4.1.3): dropdown.js * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * @see https://raw.githubusercontent.com/twbs/bootstrap/v4-dev/js/src/dropdown.js * -------------------------------------------------------------------------- */ /** * ------------------------------------------------------------------------ * Constants * ------------------------------------------------------------------------ */ export const NAME = 'dropdown'; export const VERSION = '4.1.3'; export const DATA_KEY = 'bs.dropdown'; export const EVENT_KEY = `.${DATA_KEY}`; export const DATA_API_KEY = '.data-api'; export const ESCAPE_KEYCODE = 27; // KeyboardEvent.which value for Escape (Esc) key export const SPACE_KEYCODE = 32; // KeyboardEvent.which value for space key export const TAB_KEYCODE = 9; // KeyboardEvent.which value for tab key export const ARROW_UP_KEYCODE = 38; // KeyboardEvent.which value for up arrow key export const ARROW_DOWN_KEYCODE = 40; // KeyboardEvent.which value for down arrow key export const RIGHT_MOUSE_BUTTON_WHICH = 3; // MouseEvent.which value for the right button (assuming a right-handed mouse) export const REGEXP_KEYDOWN = new RegExp(`${ARROW_UP_KEYCODE}|${ARROW_DOWN_KEYCODE}|${ESCAPE_KEYCODE}`); export const EVENT = { HIDE : `hide${EVENT_KEY}`, HIDDEN : `hidden${EVENT_KEY}`, SHOW : `show${EVENT_KEY}`, SHOWN : `shown${EVENT_KEY}`, CLICK : `click${EVENT_KEY}`, CLICK_DATA_API : `click${EVENT_KEY}${DATA_API_KEY}`, KEYDOWN_DATA_API : `keydown${EVENT_KEY}${DATA_API_KEY}`, KEYUP_DATA_API : `keyup${EVENT_KEY}${DATA_API_KEY}`, }; export const CLASSNAME = { DISABLED : 'disabled', SHOW : 'show', DROPUP : 'dropup', DROPRIGHT : 'dropright', DROPLEFT : 'dropleft', MENURIGHT : 'dropdown-menu-right', MENULEFT : 'dropdown-menu-left', POSITION_STATIC : 'position-static', }; export const SELECTOR = { DATA_TOGGLE : 'bs4-dropdown .dropdown-toggle', FORM_CHILD : '.dropdown form', MENU : '.dropdown-menu', NAVBAR_NAV : '.navbar-nav', VISIBLE_ITEMS : '.dropdown-menu .dropdown-item:not(.disabled):not(:disabled)', }; export const ATTACHMENTMAP = { TOP : 'top-start', TOPEND : 'top-end', BOTTOM : 'bottom-start', BOTTOMEND : 'bottom-end', RIGHT : 'right-start', RIGHTEND : 'right-end', LEFT : 'left-start', LEFTEND : 'left-end', }; export const DEFAULT = { offset : 0, flip : true, boundary : 'scrollParent', reference : 'toggle', display : 'dynamic', }; export const DEFAULTTYPE = { offset : '(number|string|function)', flip : 'boolean', boundary : '(string|element)', reference : '(string|element)', display : 'string', }; /** * ------------------------------------------------------------------------ * Class Definition * ------------------------------------------------------------------------ */ export class DropdownService { // Getters static get VERSION() { return VERSION; } static get Default() { return DEFAULT; } static get DefaultType() { return DEFAULTTYPE; } // Static public static closeAll() { const $menus = $('.dropdown-menu.show'); $menus.each((index, menu) => { const $menu = $(menu); const $dropdown = $menu.closest('dropdown-menu.show'); this.close($menu[0], $menu, $dropdown); }); } public static close(triggerCloseElement: Element, $menu: JQuery<Element>, $dropdown?: JQuery<Element>) { const relatedTarget = { relatedTarget: triggerCloseElement, }; const $parent = DropdownService._getParentFromElement(triggerCloseElement); if ($menu && $menu.hasClass(CLASSNAME.SHOW)) { $menu.removeClass(CLASSNAME.SHOW); } if ($dropdown && $dropdown.hasClass(CLASSNAME.SHOW)) { $dropdown.removeClass(CLASSNAME.SHOW) .removeClass(CLASSNAME.SHOW) .trigger($.Event(EVENT.HIDDEN, relatedTarget)); } if ($parent.hasClass(CLASSNAME.SHOW)) { $parent .removeClass(CLASSNAME.SHOW) .trigger($.Event(EVENT.HIDDEN, relatedTarget)); } } public static _clearMenus(event?: JQuery.Event) { if (event && (event.which === RIGHT_MOUSE_BUTTON_WHICH || event.type === 'keyup' && event.which !== TAB_KEYCODE)) { return; } const toggles = [].slice.call($(SELECTOR.DATA_TOGGLE).get()) as Element[]; $(SELECTOR.DATA_TOGGLE).each((i, element) => { // for (let i = 0, len = toggles.length; i < len; i++) { const parent = DropdownService._getParentFromElement(element); const context = $(toggles[i]).data(DATA_KEY); // console.warn('_clearMenus parent', parent, context); const relatedTarget: any = { relatedTarget: toggles[i], }; if (event && event.type === 'click') { relatedTarget.clickEvent = event; } if (!context) { // continue; return; } const dropdownMenu = parent.find(SELECTOR.MENU); if (!$(parent).hasClass(CLASSNAME.SHOW)) { // continue; return; } if (event && (event.type === 'click' && /input|textarea/i.test(((event as unknown as Event).target as Element).tagName) || event.type === 'keyup' && event.which === TAB_KEYCODE) && $.contains(parent.get(0), (event as unknown as Event).target as Element)) { // continue; return; } const hideEvent = $.Event(EVENT.HIDE, relatedTarget); $(parent).trigger(hideEvent); if (hideEvent.isDefaultPrevented()) { // continue; return; } // If this is a touch-enabled device we remove the extra // empty mouseover listeners we added for iOS support if (document.documentElement && 'ontouchstart' in document.documentElement) { $(document.body).children().off('mouseover', 'null', $.noop); } toggles[i].setAttribute('aria-expanded', 'false'); dropdownMenu.removeClass(CLASSNAME.SHOW); parent .removeClass(CLASSNAME.SHOW) .trigger($.Event(EVENT.HIDDEN, relatedTarget)); }); } public static _getParentFromElement(element: Element) { return $(element).parent(); // let parent; // const selector = Utils.getSelectorFromElement(element); // if (selector) { // parent = document.querySelector(selector); // } // return parent || element.parentNode; } private _element: HTMLButtonElement | HTMLAnchorElement; private _popper: any /* Popper */ | null; // TODO Popper namcespace error private _config: any; // TODO private _menu: Element; private _inNavbar: boolean; constructor(element: HTMLButtonElement | HTMLAnchorElement, config?: any) { this._element = element; this._popper = null; this._config = this._getConfig(config); this._menu = this._getMenuElement(); this._inNavbar = this._detectNavbar(); $(this._element).data(DATA_KEY, this._config); this.clouseOnClickOutsite(DropdownService._getParentFromElement(this._element)); } // Public public close() { return DropdownService.close(this._element, $(this._menu)); } public show() { const relatedTarget = { relatedTarget: this._element, }; const $parent = DropdownService._getParentFromElement(this._element); if (!$(this._menu).hasClass(CLASSNAME.SHOW)) { $(this._menu).addClass(CLASSNAME.SHOW); } if (!$parent.hasClass(CLASSNAME.SHOW)) { $parent .addClass(CLASSNAME.SHOW) .trigger($.Event(EVENT.SHOWN, relatedTarget)); } } public toggle() { if ((this._element as HTMLButtonElement).disabled || $(this._element).hasClass(CLASSNAME.DISABLED)) { return; } const parent = DropdownService._getParentFromElement(this._element); const isActive = $(this._menu).hasClass(CLASSNAME.SHOW); DropdownService._clearMenus(); if (isActive) { this.close(); return; } const relatedTarget = { relatedTarget: this._element, }; const showEvent = $.Event(EVENT.SHOW, relatedTarget); $(parent).trigger(showEvent); if (showEvent.isDefaultPrevented()) { return; } this.clouseOnClickOutsite(DropdownService._getParentFromElement(this._element)); // Disable totally Popper.js for Dropdown in Navbar if (!this._inNavbar) { /** * Check for Popper dependency * Popper - https://popper.js.org */ if (typeof Popper === 'undefined') { throw new TypeError('Bootstrap dropdown require Popper.js (https://popper.js.org)'); } let referenceElement = this._element as HTMLElement; if (this._config.reference === 'parent') { referenceElement = parent.get(0) as HTMLElement; } else if (Utils.isElement(this._config.reference)) { referenceElement = this._config.reference; // Check if it's jQuery element if (typeof this._config.reference.jquery !== 'undefined') { referenceElement = this._config.reference[0]; } } // If boundary is not `scrollParent`, then set position to `static` // to allow the menu to "escape" the scroll parent's boundaries // https://github.com/twbs/bootstrap/issues/24251 if (this._config.boundary !== 'scrollParent') { $(parent).addClass(CLASSNAME.POSITION_STATIC); } this._popper = new Popper(referenceElement, this._menu as HTMLElement, this._getPopperConfig()); } // If this is a touch-enabled device we add extra // empty mouseover listeners to the body's immediate children; // only needed because of broken event delegation on iOS // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html if (document.documentElement && 'ontouchstart' in document.documentElement && $(parent).closest(SELECTOR.NAVBAR_NAV).length === 0) { $(document.body).children().on('mouseover', null, $.noop); } this.clouseOnClickOutsite(DropdownService._getParentFromElement(this._element)); this._element.focus(); this._element.setAttribute('aria-expanded', 'true'); $(this._menu).toggleClass(CLASSNAME.SHOW); $(parent) .toggleClass(CLASSNAME.SHOW) .trigger($.Event(EVENT.SHOWN, relatedTarget)); } public dispose() { $.removeData(this._element, DATA_KEY); $(this._element).off(EVENT_KEY); delete this._element; // = null; delete this._menu; // = null; if (this._popper !== null) { this._popper.destroy(); this._popper = null; } } public update() { this._inNavbar = this._detectNavbar(); if (this._popper !== null) { this._popper.scheduleUpdate(); } } // Private /** * @see https://stackoverflow.com/questions/152975/how-do-i-detect-a-click-outside-an-element * @param selector */ private clouseOnClickOutsite($element: JQuery<Element>) { const outsideClickListener = (event: Event) => { if (!$(event.target as any).closest($element.get(0)).length) { this.close(); removeClickListener(); } }; const removeClickListener = () => { document.removeEventListener('click', outsideClickListener); }; document.addEventListener('click', outsideClickListener); } private _getConfig(config?: any) { config = { ...DropdownService.Default, ...$(this._element).data(), ...config, }; Utils.typeCheckConfig( NAME, config, DropdownService.DefaultType, ); return config; } private _getMenuElement() { if (!this._menu) { const parent = DropdownService._getParentFromElement(this._element); if (parent) { this._menu = parent.find(SELECTOR.MENU).get(0); } } return this._menu; } private _getPlacement() { const $parentDropdown = $(this._element.parentNode as any); let placement = ATTACHMENTMAP.BOTTOM; // Handle dropup if ($parentDropdown.hasClass(CLASSNAME.DROPUP)) { placement = ATTACHMENTMAP.TOP; if ($(this._menu).hasClass(CLASSNAME.MENURIGHT)) { placement = ATTACHMENTMAP.TOPEND; } } else if ($parentDropdown.hasClass(CLASSNAME.DROPRIGHT)) { placement = ATTACHMENTMAP.RIGHT; } else if ($parentDropdown.hasClass(CLASSNAME.DROPLEFT)) { placement = ATTACHMENTMAP.LEFT; } else if ($(this._menu).hasClass(CLASSNAME.MENURIGHT)) { placement = ATTACHMENTMAP.BOTTOMEND; } return placement; } private _detectNavbar() { return $(this._element).closest('.navbar').length > 0; } private _getPopperConfig() { const offsetConf: any = {}; if (typeof this._config.offset === 'function') { offsetConf.fn = (data: any) => { data.offsets = { ...data.offsets, ...this._config.offset(data.offsets) || {}, }; return data; }; } else { offsetConf.offset = this._config.offset; } const popperConfig = { placement: this._getPlacement() as any, modifiers: { offset: offsetConf, flip: { enabled: this._config.flip, }, preventOverflow: { boundariesElement: this._config.boundary, }, } as any, }; // Disable Popper.js if we have a static display if (this._config.display === 'static') { popperConfig.modifiers.applyStyle = { enabled: false, }; } return popperConfig; } } <file_sep>"use strict"; function __export(m) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); __export(require("./services")); __export(require("./binders")); __export(require("./formatters")); __export(require("./components")); __export(require("./interfaces")); const shopify_module_1 = __importDefault(require("./shopify.module")); exports.shopifyModule = shopify_module_1.default; exports.default = shopify_module_1.default; <file_sep>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const core_1 = require("@ribajs/core"); class ShopifyProductService { /** * Get product object by handle * @param handle product handle */ static get(handle) { if (this.cache.hasOwnProperty(handle)) { return new Promise((resolve) => { resolve(this.cache[handle]); }); } else { return core_1.HttpService.getJSON(`/products/${handle}.js`) .then((product) => { this.cache[handle] = product; return this.cache[handle]; }); } } /** * Check if the option values fits to the current variant. * @param variant * @param optionValues * @return Returns true if the option values fitting to the variant */ static fitsVariantOptions(variant, optionValues) { let fit = true; // position0 is the option index starting on 0 for (const position0 in optionValues) { if (optionValues[position0]) { const optionValue = optionValues[position0]; fit = fit && variant.options.indexOf(optionValue.toString()) > -1; } } return fit; } /** * Get product variant of (selected) option values * @param optionValues (selected) option values */ static getVariantOfOptions(product, optionValues) { let result = null; if (product) { for (const i in product.variants) { if (product.variants[i]) { result = null; const variant = product.variants[i]; const fits = this.fitsVariantOptions(variant, optionValues); if (fits) { result = variant; break; } } } } return result; } /** * Get variant object by variant id * @param id Variant id */ static getVariant(product, id) { let result = null; if (product) { product.variants.forEach((variant) => { if (variant.id === id) { result = variant; } }); } return result; } /** * Get product option by name * @param product product wich holds the options * @param name option name */ static getOption(product, name) { let result = null; product.options.forEach((option) => { if (option.name.toLowerCase() === name.toLowerCase()) { result = option; } }); return result; } /** * Prepair product, remove protocol from featured_image, lovercase the option names * @param product product object */ static prepair(product) { // remove protocol product.featured_image .replace(/(^\w+:|^)\/\//, '//'); // all option names to lower case for (const option of product.options) { option.name = option.name.toString().toLocaleLowerCase(); } return product; } } exports.ShopifyProductService = ShopifyProductService; ShopifyProductService.cache = {}; <file_sep>import { Component, Binding, handleizeFormatter } from '@ribajs/core'; import template from './tabs.component.html'; export interface Tab { title: string; content: string; handle: string; active: boolean; type?: string; } export interface Scope { tabs: Tab[]; activate: TabsComponent['activate']; optionTabsAutoHeight: boolean; } export class TabsComponent extends Component { public static tagName: string = 'bs4-tabs'; protected scope: Scope = { tabs: new Array<Tab>(), activate: this.activate, optionTabsAutoHeight: false, }; protected tabs?: NodeListOf<Element>; protected tabPanes?: NodeListOf<Element>; protected scrollable?: Element | null; static get observedAttributes() { return [ 'option-tabs-auto-height', 'tab-0-title', 'tab-0-content', 'tab-0-handle', 'tab-1-title', 'tab-1-content', 'tab-1-handle', 'tab-2-title', 'tab-2-content', 'tab-2-handle', 'tab-3-title', 'tab-3-content', 'tab-3-handle', 'tab-4-title', 'tab-4-content', 'tab-4-handle', 'tab-5-title', 'tab-5-content', 'tab-5-handle', 'tab-6-title', 'tab-6-content', 'tab-6-handle', 'tab-7-title', 'tab-7-content', 'tab-7-handle', 'tab-8-title', 'tab-8-content', 'tab-8-handle', 'tab-9-title', 'tab-9-content', 'tab-9-handle', 'tab-10-title', 'tab-10-content', 'tab-10-handle', 'tab-11-title', 'tab-11-content', 'tab-11-handle', 'tab-12-title', 'tab-12-content', 'tab-12-handle', 'tab-13-title', 'tab-13-content', 'tab-13-handle', 'tab-14-title', 'tab-14-content', 'tab-14-handle', 'tab-15-title', 'tab-15-content', 'tab-15-handle', 'tab-16-title', 'tab-16-content', 'tab-16-handle', 'tab-17-title', 'tab-17-content', 'tab-17-handle', 'tab-18-title', 'tab-18-content', 'tab-18-handle', 'tab-19-title', 'tab-19-content', 'tab-19-handle', ]; } constructor(element?: HTMLElement) { super(element); this.addTabsByTemplate(); this.initTabs(); this.activateFirstTab(); this.init(TabsComponent.observedAttributes); } /** * Make all tabs panes as height as the heighest tab pane */ public setHeight() { if (this.scope.optionTabsAutoHeight) { return; } // Bind static template this.setElements(); let heigest = 0; if (!this.tabPanes) { return; } this.tabPanes.forEach((tabPane) => { if (!(tabPane as unknown as HTMLElement).style) { return; } (tabPane as unknown as HTMLElement).style.height = 'auto'; (tabPane as unknown as HTMLElement).style.display = 'block'; const height = (tabPane as unknown as HTMLElement).offsetHeight || 0; if (height > heigest) { heigest = height; } }); this.tabPanes.forEach((tabPane) => { if (!(tabPane as unknown as HTMLElement).style) { return; } // Reset display style property (tabPane as unknown as HTMLElement).style.display = ''; if (heigest > 0) { (tabPane as unknown as HTMLElement).style.height = heigest + 'px'; } }); } public deactivateAll() { for (const tab of this.scope.tabs) { tab.active = false; } } public activate(tab: Tab, binding?: Binding, event?: Event) { this.deactivateAll(); tab.active = true; if (event) { event.preventDefault(); } } public activateFirstTab() { if (this.scope.tabs.length > 0) { this.activate(this.scope.tabs[0]); } } protected setElements() { this.tabs = this.el.querySelectorAll('[role="tab"]'); this.tabPanes = this.el.querySelectorAll('[role="tabpanel"]'); this.scrollable = this.el.querySelector('[scrollable]'); } protected resizeTabsArray(newSize: number) { while (newSize > this.scope.tabs.length) { this.scope.tabs.push({handle: '', title: '', content: '', active: false}); } this.scope.tabs.length = newSize; } protected onTabShownEventHandler(event: Event) { const curTab = (event.target || event.srcElement) as Element | null; if (!curTab) { return; } if (this.scrollable) { const tabScrollPosition = curTab.getBoundingClientRect(); const scrollLeftTo = this.scrollable.scrollLeft || 0 + tabScrollPosition.left; // TODO animate // this.scrollable.animate({ scrollLeft: scrollLeftTo}, 'slow'); this.scrollable.scrollLeft = scrollLeftTo; } } protected onResizeEventHandler(event: Event) { this.setHeight(); } protected initTabs() { // Bind static template this.setElements(); if (this.tabs) { this.tabs.forEach(((tab) => { tab.removeEventListener('shown.bs.tab', this.onTabShownEventHandler); tab.addEventListener('shown.bs.tab', this.onTabShownEventHandler); })); } if (this.scope.optionTabsAutoHeight) { window.removeEventListener('resize', this.onResizeEventHandler.bind(this)); window.addEventListener('resize', this.onResizeEventHandler.bind(this)); this.setHeight(); } } protected addTabByAttribute(attributeName: string, newValue: string) { const index = Number(attributeName.replace(/[^0-9]/g, '')); if (index >= this.scope.tabs.length) { this.resizeTabsArray(index + 1); } if (attributeName.endsWith('Content')) { this.scope.tabs[index].content = newValue; } if (attributeName.endsWith('Title')) { this.scope.tabs[index].title = newValue; this.scope.tabs[index].handle = this.scope.tabs[index].handle || handleizeFormatter.read(this.scope.tabs[index].title); } if (attributeName.endsWith('Handle')) { this.scope.tabs[index].handle = newValue; } // if is first tab if ( this.scope.tabs.length > 0 && this.scope.tabs[0] && this.scope.tabs[0].content.length > 0 && this.scope.tabs[0].title.length > 0 && this.scope.tabs[0].handle.length > 0 ) { this.activateFirstTab(); } } protected addTabByTemplate(tpl: HTMLTemplateElement) { const title = tpl.getAttribute('title'); if (!title) { console.error(new Error('template "title" attribute is required"')); return; } const handle = tpl.getAttribute('handle') || handleizeFormatter.read(title); if (!handle) { console.error(new Error('template "handle" attribute is required"')); return; } const type = tpl.getAttribute('type') || undefined; const content = tpl.innerHTML; this.scope.tabs.push({title, handle, content, active: false, type }); } protected addTabsByTemplate() { const templates = this.el.querySelectorAll<HTMLTemplateElement>('template'); templates.forEach((tpl) => { this.addTabByTemplate(tpl); }); } protected parsedAttributeChangedCallback(attributeName: string, oldValue: any, newValue: any, namespace: string | null) { super.parsedAttributeChangedCallback(attributeName, oldValue, newValue, namespace); if (attributeName.startsWith('tab')) { this.addTabByAttribute(attributeName, newValue); this.initTabs(); } } protected async afterBind(): Promise<any> { // Workaround setTimeout(() => { if (this.scope.optionTabsAutoHeight) { this.setHeight(); } }, 500); } protected onlyTemplateChilds() { let allAreTemplates: boolean = true; this.el.childNodes.forEach((child) => { allAreTemplates = allAreTemplates && (child.nodeName === 'TEMPLATE' || child.nodeName === '#text'); }); return allAreTemplates; } protected template() { // Only set the component template if there no childs or the childs are templates if (!this.el.hasChildNodes() || this.onlyTemplateChilds()) { return template; } else { return null; } } } <file_sep>import { Utils } from '../../services/utils'; /** * Sets a default value if the first value is not set * @see https://gist.github.com/der-On/cdafe908847e2b882691 */ export const defaultFormatter = { name: 'default', read(value: any, defaultValue: any) { if (Utils.isDefined(value)) { if (Utils.isString(value)) { if (value.length > 0) { return value; } else { return defaultValue; } } return value; } return defaultValue; }, }; <file_sep>import Popper from 'popper.js'; // /dist/umd/popper /** * * @see https://getbootstrap.com/docs/4.1/components/tooltips/ * @see https://github.com/twbs/bootstrap/blob/v4-dev/js/src/tooltip.js */ import { Binder } from '@ribajs/core'; import { JQuery as $ } from '@ribajs/jquery'; const template = '<div class="tooltip" role="tooltip">' + '<div class="arrow"></div>' + '<div class="tooltip-inner"></div></div>'; /** * */ export const tooltipBinder: Binder<string> = { name: 'bs4-tooltip', block: false, bind(el: HTMLUnknownElement) { this.customData.$tip = $(template); this.customData.show = () => { const attachment: 'auto' | 'top' | 'right' | 'bottom' | 'left' = 'top'; const offset = 0; this.customData.popper = new Popper(el, this.customData.$tip[0], { placement: attachment, modifiers: { offset: { offset, }, flip: { behavior: 'flip', }, arrow: { element: '.arrow', }, preventOverflow: { boundariesElement: 'scrollParent', }, }, }); this.customData.$tip.appendTo(document.body); this.customData.$tip.addClass('show'); this.customData.$tip.addClass('bs-tooltip-' + attachment); }; this.customData.hide = () => { this.customData.$tip.removeClass('show'); if (this.customData.popper) { this.customData.popper.destroy(); } }; el.addEventListener('mouseenter', this.customData.show); el.addEventListener('mouseleave', this.customData.hide); }, routine(el: HTMLElement, text: string) { this.customData.$tip.find('.tooltip-inner').html(text); }, unbind() { this.customData.hide(); this.el.removeEventListener('mouseenter', this.customData.show); this.el.removeEventListener('mouseleave', this.customData.hide); }, }; <file_sep>import { Binder } from '../interfaces'; import { Utils } from '../services/utils'; export interface Assign { key: string; value: any; } /** * assign-* * Assign a value in your model. * Sets or overwrites a value by his property name (named whatever value is in place of [property]) in your model. * @example * <div rv-assign-new='"hello"'>{new}</div> */ export const assignPropertyBinder: Binder<Assign> = { name: 'assign-*', routine(el: HTMLElement, value: any) { const propertyName = Utils.camelCase((this.args[0] as string).trim()); const obj: any = {}; obj[propertyName] = value; return Utils.extend(false, this.view.models, obj); }, }; <file_sep>import { Formatters, Binders, Adapters, Root, Components, Options, } from './interfaces'; import { Utils } from './services/utils'; import { parseTemplate, parseType } from './parsers'; import { Binding } from './binding'; import { starBinder } from './binders/attribute.binder'; import { View } from './view'; import { Observer } from './observer'; import { ModulesService } from './services/module.service'; export class Riba { /** * Sets the attribute on the element. If no binder above is matched it will fall * back to using this binder. */ public static fallbackBinder = starBinder; /** * Default event handler, calles the function defined in his binder * @see Binding.eventHandler * @param el The element the event was triggered from */ public static handler(this: any, context: any, ev: Event, binding: Binding, el: HTMLElement) { this.call(context, ev, binding.view.models, el); } /** singleton instance */ private static instance: Riba; public module: ModulesService; /** Global binders */ public binders: Binders<any> = {}; /** Global components. */ public components: Components = {}; /** Global formatters. */ public formatters: Formatters = {}; /** Global (sightglass) adapters. */ public adapters: Adapters = {}; public parseTemplate = parseTemplate; public parseType = parseType; /** Default template delimiters. */ public templateDelimiters = ['{', '}']; /** Default sightglass root interface. */ public rootInterface = '.'; /** Preload data by default. */ public preloadData = true; /** Remove binder attributes after binding */ public removeBinderAttributes = true; // TODO fixme on false: Maximum call stack size exceeded /** Stop binding on this node types */ public blockNodeNames = ['SCRIPT', 'STYLE', 'TEMPLATE', 'CODE']; /** Default attribute prefix. */ private _prefix = 'rv'; /** Default attribute full prefix. */ private _fullPrefix = 'rv-'; set prefix(value) { this._prefix = value; this._fullPrefix = value + '-'; } get prefix() { return this._prefix; } get fullPrefix() { return this._fullPrefix; } /** * Creates an singleton instance of Riba. */ constructor() { this.module = new ModulesService(this.binders, this.components, this.formatters, this.adapters); if (Riba.instance) { return Riba.instance; } Riba.instance = this; } /** * Merges an object literal into the corresponding global options. * @param options */ public configure(options: Partial<Options>) { if (!options) { return; } Object.keys(options).forEach((option) => { const value = (options as any)[option]; switch (option) { case 'binders': this.binders = Utils.concat(false, this.binders, value); break; case 'formatters': this.formatters = Utils.concat(false, this.formatters, value); break; case 'components': this.components = Utils.concat(false, this.components, value); break; case 'adapters': this.adapters = Utils.concat(false, this.adapters, value); break; case 'prefix': this.prefix = value; break; case 'parseTemplate': this.parseTemplate = value; break; case 'parseType': this.parseType = value; break; case 'templateDelimiters': this.templateDelimiters = value; break; case 'rootInterface': this.rootInterface = value; break; case 'preloadData': this.preloadData = value; break; case 'blockNodeNames': this.blockNodeNames = value; break; default: console.warn('Option not supported', option, value); break; } }); } public getViewOptions(options?: Partial<Options>) { const viewOptions: Partial<Options> = { // EXTENSIONS adapters: <Adapters> {}, binders: <Binders<any>> {}, components: <Components> {}, formatters: <Formatters> {}, // other starBinders: {}, // sightglass rootInterface: <Root> {}, }; if (options) { viewOptions.binders = Utils.concat(false, viewOptions.binders, options.binders); viewOptions.formatters = Utils.concat(false, viewOptions.formatters, options.formatters); viewOptions.components = Utils.concat(false, viewOptions.components, options.components); viewOptions.adapters = Utils.concat(false, viewOptions.adapters, options.adapters); } viewOptions.prefix = options && options.prefix ? options.prefix : this.prefix; viewOptions.fullPrefix = viewOptions.prefix ? viewOptions.prefix + '-' : this.fullPrefix; viewOptions.templateDelimiters = options && options.templateDelimiters ? options.templateDelimiters : this.templateDelimiters; viewOptions.rootInterface = options && options.rootInterface ? options.rootInterface : this.rootInterface; viewOptions.removeBinderAttributes = options && typeof(options.removeBinderAttributes) === 'boolean' ? options.removeBinderAttributes : this.removeBinderAttributes; viewOptions.blockNodeNames = options && options.blockNodeNames ? options.blockNodeNames : this.blockNodeNames; viewOptions.preloadData = options && typeof(options.preloadData) === 'boolean' ? options.preloadData : this.preloadData; viewOptions.handler = options && options.handler ? options.handler : Riba.handler; // merge extensions viewOptions.binders = Utils.concat(false, this.binders, viewOptions.binders); viewOptions.formatters = Utils.concat(false, this.formatters, viewOptions.formatters); viewOptions.components = Utils.concat(false, this.components, viewOptions.components); viewOptions.adapters = Utils.concat(false, this.adapters, viewOptions.adapters); // get all starBinders from available binders if (viewOptions.binders) { viewOptions.starBinders = Object.keys(viewOptions.binders).filter((key) => { return key.indexOf('*') >= 1; // Should start with * }); } return (viewOptions as Options); } /** * Binds some data to a template / element. Returns a riba.View instance. */ public bind(el: HTMLElement | DocumentFragment | HTMLUnknownElement[], models: any, options?: Options) { const viewOptions: Options = this.getViewOptions(options); models = models || new Object(null); Observer.updateOptions(viewOptions); const view = new View(el, models, viewOptions); view.bind(); return view; } } <file_sep>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); /** * Return a resized shopify image URL * @see https://help.shopify.com/en/themes/liquid/filters/url-filters#img_url * * @param url * @param size * @param scale TODO * @param crop TODO * @param extension */ exports.imgUrlFormatter = { name: 'img_url', read(url, size, scale, crop, extension, element) { try { if (size === 'original' || size === 'master') { return url; } if (scale && scale !== 1) { size += '@' + scale + 'x'; } const result = url.match(/(.*\/[\w\-\_\.]+)\.(\w{2,4})/); if (!result || !result[1] || !result[2]) { throw new Error(`Can't match url ${url}`); } const path = result[1]; extension = extension || result[2]; return path + '_' + size + '.' + extension; } catch (error) { console.error(error); return url; } }, }; <file_sep>module.exports = { "presets": [ "@babel/typescript", [ "@babel/preset-env", { "targets": { "ie": "11", "safari": "10", "chrome": "52", "edge": "16", "firefox": "59" } } ] ], "plugins": [ [ "@babel/plugin-transform-runtime", { "corejs": 2 } ], "@babel/plugin-proposal-class-properties", "@babel/plugin-proposal-object-rest-spread", "@babel/plugin-proposal-export-default-from", "@babel/plugin-syntax-dynamic-import", "array-includes" ] }; <file_sep>import { Binder } from '../interfaces'; import { Utils } from '../services/utils'; export interface Assign { key: string; value: any; } /** * assign * Assign a value in your model. * The value you want to assign must be an object and will be concatenate with your model. * @example * <div rv-assign='{"newValue": "hello", "anotherNewValue": "world"}'>{newValue} {anotherNewValue}!</div> */ export const assignBinder: Binder<Assign> = { name: 'assign', routine(el: HTMLElement, value: object) { if (typeof(value) === 'object') { return Utils.extend(false, this.view.models, value); } console.warn('Value must be an object or propertyName is required'); }, }; <file_sep>import { Formatter } from '@ribajs/core'; /** * Formats the price based on the shop's HTML with currency setting (if the format is not overwritten by passing a format parameter). * @param cents * @param format * * @see https://github.com/NathanPJF/deploybot-shopify/blob/master/assets/ajaxify.js.liquid * @see https://github.com/discolabs/cartjs/blob/master/src/utils.coffee * @see https://github.com/JumpLinkNetwork/shopify-productjs/blob/master/src/utilities.js * @see https://help.shopify.com/en/themes/liquid/filters/money-filters */ export declare const moneyFormatter: Formatter; <file_sep>import { Binder, EventDispatcher, Utils, View as RivetsView } from '@ribajs/core'; import { Pjax, Prefetch, HideShowTransition } from '../services'; import { State } from '../interfaces'; /** * The main wrapper for the riba router * TODO convert this to a component * * ``` * <div rv-view='{"listenAllLinks": true}'> * <div class="rv-view-container" {% include 'jumplink-utils-barba-container-attributes', parseCollection: true %}> * {{ content_for_layout }} * </div> * </div> * ``` */ export const viewBinder: Binder<string> = { name: 'view', block: true, bind(el: Element) { const self = this; if (!this.customData) { this.customData = {}; } this.customData.nested = this.customData.nested || null, this.customData.wrapper = this.customData.wrapper || el, this.customData.onPageReady = (viewId: string, currentStatus: State, prevStatus: State, container: HTMLElement, newPageRawHTML: string, dataset: any, isInit: boolean) => { // Only to anything if the viewID is eqal (in this way it is possible to have multiple views) if (viewId !== self.customData.options.viewId) { console.warn('not the right view', self.customData.options.viewId, viewId); return; } // unbind the old rivets view if (self.customData.nested) { if (self.customData.options.action === 'replace') { self.customData.nested.unbind(); } } // add the dateset to the model if (!Utils.isObject(self.view.models)) { self.view.models = {}; } if (self.customData.options.datasetToModel === true && Utils.isObject(dataset)) { self.view.models.dataset = dataset; // = container.data(); } // TODO append on action append self.customData.nested = new RivetsView(container, self.view.models, self.view.options); self.customData.nested.bind(); }; this.customData.onTransitionCompleted = (viewId: string) => { // Only to anything if the viewID is eqal (in this way it is possible to have multiple views) if (viewId !== self.customData.options.viewId) { return; } // scroll to Anchor of hash if (this.customData.options.scrollToAnchorHash && window.location.hash) { const scrollToMe = document.getElementById(window.location.hash.substr(1)); if (scrollToMe) { return new Promise((resolve, reject) => { resolve(Utils.scrollTo(scrollToMe, 0, window)); }); } } return Promise.resolve(); }; /* * Make the dispatcher available in the model to register event handlers. * * I.e., if we have initialized rivets/riba with: * * `rivets.bind(document.body, model)`, * * then we can register event handlers for the Barba router dispatcher like this: * * `model.routerDispatcher.on('newPageReady', ...);` * `model.routerDispatcher.on('transitionCompleted', ...);` * ...etc. * */ // this.view.models.routerDispatcher = dispatcher; }, routine(el: HTMLUnknownElement, options: any) { // Set default options this.customData.options = options || {}; this.customData.options.viewId = this.customData.options.viewId || el.getAttribute('id') || 'main'; this.customData.options.action = this.customData.options.action || 'replace'; // replace / append if (this.customData.options.viewId === 'main') { this.customData.options.containerSelector = this.customData.options.containerSelector || '[data-namespace]'; this.customData.options.scrollToTop = Utils.isBoolean(this.customData.options.scrollToTop) ? this.customData.options.scrollToTop : true; this.customData.options.listenAllLinks = Utils.isBoolean(this.customData.options.listenAllLinks) ? this.customData.options.listenAllLinks : true; this.customData.options.listenPopstate = Utils.isBoolean(this.customData.options.listenPopstate) ? this.customData.options.listenPopstate : true; this.customData.options.scrollToAnchorHash = Utils.isBoolean(this.customData.options.scrollToAnchorHash) ? this.customData.options.scrollToAnchorHash : true; this.customData.options.datasetToModel = Utils.isBoolean(this.customData.options.datasetToModel) ? this.customData.options.datasetToModel : true; this.customData.options.parseTitle = Utils.isBoolean(this.customData.options.parseTitle) ? this.customData.options.parseTitle : true; this.customData.options.changeBrowserUrl = Utils.isBoolean(this.customData.options.changeBrowserUrl) ? this.customData.options.changeBrowserUrl : true; } else { this.customData.options.containerSelector = this.customData.options.containerSelector || `#${this.customData.options.viewId} > *:first-child`; this.customData.options.scrollToTop = Utils.isBoolean(this.customData.options.scrollToTop) ? this.customData.options.scrollToTop : false; this.customData.options.listenAllLinks = Utils.isBoolean(this.customData.options.listenAllLinks) ? this.customData.options.listenAllLinks : false; this.customData.options.listenPopstate = Utils.isBoolean(this.customData.options.listenPopstate) ? this.customData.options.listenPopstate : false; this.customData.options.scrollToAnchorHash = Utils.isBoolean(this.customData.options.scrollToAnchorHash) ? this.customData.options.scrollToAnchorHash : false; this.customData.options.datasetToModel = Utils.isBoolean(this.customData.options.datasetToModel) ? this.customData.options.datasetToModel : false; this.customData.options.parseTitle = Utils.isBoolean(this.customData.options.parseTitle) ? this.customData.options.parseTitle : false; this.customData.options.changeBrowserUrl = Utils.isBoolean(this.customData.options.changeBrowserUrl) ? this.customData.options.changeBrowserUrl : false; } this.customData.options.autoprefetchLinks = Utils.isBoolean(this.customData.options.autoprefetchLinks) ? this.customData.options.autoprefetchLinks : this.customData.options.listenAllLinks; this.customData.options.transition = this.customData.options.transition || new HideShowTransition(this.customData.options.action, this.customData.options.scrollToTop); this.customData.dispatcher = new EventDispatcher(this.customData.options.viewId); this.customData.prefetch = new Prefetch(); this.customData.wrapper.setAttribute('id', this.customData.options.viewId); this.customData.dispatcher.on('newPageReady', this.customData.onPageReady); this.customData.dispatcher.on('transitionCompleted', this.customData.onTransitionCompleted); const pjax = new Pjax(this.customData.options.viewId, this.customData.wrapper, this.customData.options.containerSelector, this.customData.options.listenAllLinks, this.customData.options.listenPopstate, this.customData.options.transition, this.customData.options.parseTitle, this.customData.options.changeBrowserUrl); this.customData.prefetch.init(this.customData.options.autoprefetchLinks); pjax.start(); }, unbind(el: HTMLUnknownElement) { if (this.customData.dispatcher) { this.customData.dispatcher.off('newPageReady', this.customData.onPageReady); this.customData.dispatcher.off('transitionCompleted', this.customData.onTransitionCompleted); } if (this.customData && this.customData.nested !== null) { this.customData.nested.unbind(); } delete this.customData; }, }; <file_sep>import { Binder, EventDispatcher, Utils } from '@ribajs/core'; export const routeClassStarBinder: Binder<string> = { name: 'route-class-*', bind(el: HTMLUnknownElement) { this.customData = { dispatcher: new EventDispatcher('main'), }; }, /** * Tests the url with the current location, if the url is equal to the current location this element is active * @param el Binder HTML Element * @param url Url to compare with the current location */ routine(el: HTMLElement, url: string) { const className = this.args[0].toString() || 'active'; const isAnkerHTMLElement = el.tagName === 'A'; if (!url && isAnkerHTMLElement) { const href = el.getAttribute('href'); if (href) { url = href; } } const onUrlChange = (urlToCheck?: string) => { if (urlToCheck) { if (Utils.onRoute(urlToCheck)) { el.classList.add(className); // check if element is radio input if (el.getAttribute('type') === 'radio') { (el as HTMLInputElement).checked = true; } return true; } else { el.classList.remove(className); // uncheck if element is radio input if (el.getAttribute('type') === 'radio') { (el as HTMLInputElement).checked = false; } } } return false; }; this.customData.dispatcher.on('newPageReady', () => onUrlChange(url)); onUrlChange(url); }, unbind(el: HTMLUnknownElement) { // console.warn('routeClassStarBinder routine', el); }, }; <file_sep>export { collapseOnUrlBinder } from './collapse-on-url.binder'; export { collapseBinder } from './collapse.binder'; export { dropdownBinder } from './dropdown.binder'; export { expanOnUrlBinder } from './expan-on-url.binder'; export { scrollspyClassBinder } from './scrollspy-class.binder'; export { tooltipBinder } from './tooltip.binder'; export { scrollToOnEventBinder } from './scroll-to-on-event.binder'; <file_sep>import { Utils } from '../../services/utils'; /** * parse json string to object * @example <div rv-add-class='"["col-2", "col-3", "col-4", "col-5", "col-6"]" | parse | random'> */ export const parseFormatter = { name: 'parse', read(jsonString: string) { if (Utils.isString(jsonString)) { return Utils.parseJsonString(jsonString); } else if (Utils.isObject(jsonString as any) || Utils.isArray(jsonString as any)) { console.warn('[parseFormatter] You do not need to parse the value because since it already been parsed'); return jsonString; } return null; }, }; <file_sep>import jquery from 'jquery'; /** * JQuery Extension for mobile events: https://github.com/benmajor/jQuery-Touch-Events */ import touchEvents from './_jquery-touch-events'; // tslint:disable-next-line:variable-name const JQuery: JQueryStatic = touchEvents(jquery); export default JQuery; export { JQuery }; <file_sep>import { EventDispatcher } from '@ribajs/core'; import { PQueue } from './p-queue.service'; import { ShopifyCartLineItem, ShopifyCartUpdateProperty, ShopifyCartAddError, ShopifyCartObject, ShopifyCustomerAddress, ShopifyShippingRates, ShopifyShippingRatesNormalized } from '../interfaces'; export interface ShopifyCartRequestOptions { triggerOnStart: boolean; triggerOnComplete: boolean; triggerOnChange: boolean; } export declare class ShopifyCartService { static queue: PQueue; static cart: ShopifyCartObject | null; static shopifyCartEventDispatcher: EventDispatcher; /** * Use this to add a variant to the cart. * @param id Variant id * @param quantity Quantity * @param properties Additional properties * @return Response if successful, the JSON of the line item associated with the added variant. * @see https://help.shopify.com/en/themes/development/getting-started/using-ajax-api#add-to-cart */ static add(id: number, quantity?: number, properties?: {}, options?: ShopifyCartRequestOptions): Promise<ShopifyCartLineItem | ShopifyCartAddError>; static _get(): Promise<ShopifyCartObject>; /** * Use this to get the cart as JSON. * @param data * @return The JSON of the cart. * @see https://help.shopify.com/en/themes/development/getting-started/using-ajax-api#get-cart */ static get(options?: ShopifyCartRequestOptions): Promise<ShopifyCartObject>; /** * Use this to change cart attributes, the cart note, and quantities of line items in the cart. * @param id Variant ID * @param quantity Quantity * @param properties Additional properties * @return Response The JSON of the cart. * @see https://help.shopify.com/en/themes/development/getting-started/using-ajax-api#update-cart */ static update(id: number | number, quantity: number, properties?: {}, options?: ShopifyCartRequestOptions): Promise<ShopifyCartObject>; /** * Use this to change cart attributes, the cart note, and quantities of line items in the cart. * @param id Variant ID * @param quantity Quantity * @param properties Additional properties * @return Response The JSON of the cart. * @see https://help.shopify.com/en/themes/development/getting-started/using-ajax-api#update-cart */ static updates(updates: ShopifyCartUpdateProperty | Array<number>, options?: ShopifyCartRequestOptions): Promise<ShopifyCartObject>; /** * This call sets the quantity of an item already in the cart. * * Although /cart/update.js and /cart/change.js may seem like they accomplish the same function, * they truly are quite different. The /cart/update.js controller allows updates to several items * at once, including items that may not yet be in the cart (it will add them), and it also allows * updates of cart attributes and the cart note. The /cart/change.js controller is only able to * update the quantity of one item at a time, and that item must be in the cart already. If the * item is not in the cart, /cart/change.js will not add it and it will then return a 404 error. * Whereas the /cart/update.js controller updates no quantity when any of the requested update * cannot be met, the /cart/change.js controller, on the other hand, will adjust the quantity to * add all items in stock if what is requested is greater than what's available. Use your browser's * JavaScript console to test things out if you're not sure about the behavior of the different request URLs. * * @param id Variant ID * @param quantity Quantity * @param properties Additional properties * @return Response The JSON of the cart. * @see https://help.shopify.com/en/themes/development/getting-started/using-ajax-api#change-cart */ static change(id: number | number, quantity: number, properties?: {}, options?: ShopifyCartRequestOptions): Promise<ShopifyCartObject>; /** * If you use Line Item Properties you may end up with several items in the cart that share the same variant ID. How do you update the quantity of an item in the cart that has specific line item properties? Once you have identified the 1-based index of the item in the cart, you can use the line property instead of id like so: * @param line -based index of the item in the cart * @param quantity Quantity * @param properties Additional properties * @return Response The JSON of the cart. */ static changeLine(line: string | number, quantity: number, properties?: {}, options?: ShopifyCartRequestOptions): Promise<ShopifyCartObject>; /** * This call sets all quantities of all line items in the cart to zero. * @return The JSON of an empty cart. This does not remove cart attributes nor the cart note. * @return Response The JSON of an empty cart. This does not remove cart attributes nor the cart note. * @see https://help.shopify.com/en/themes/development/getting-started/using-ajax-api#clear-cart */ static clear(options?: ShopifyCartRequestOptions): Promise<ShopifyCartObject>; static _getShippingRates(shippingAddress: ShopifyCustomerAddress, normalize?: boolean): Promise<ShopifyShippingRates | ShopifyShippingRatesNormalized>; /** * Get estimated shipping rates. * @param shippingAddress TODO: /cart/shipping_rates.json?shipping_address[zip]=K1N 5T2&shipping_address[country]=Canada&shipping_address[province]=Ontario * @see https://help.shopify.com/en/themes/development/getting-started/using-ajax-api#get-shipping-rates */ static getShippingRates(shippingAddress: ShopifyCustomerAddress, normalize?: boolean, options?: ShopifyCartRequestOptions): Promise<ShopifyShippingRates | ShopifyShippingRatesNormalized>; protected static CART_POST_ADD_URL: string; protected static CART_GET_URL: string; protected static CART_POST_UPDATE_URL: string; protected static CART_POST_CHANGE_URL: string; protected static CART_POST_CLEAR_URL: string; protected static CART_GET_SHIPPING_RATES_URL: string; protected static requestOptionDefaults: { triggerOnStart: boolean; triggerOnComplete: boolean; triggerOnChange: boolean; }; protected static waitForComplete: boolean; /** * Trigger `ShopifyCart:request:complete`, if queue is already panding no noting (in this case we already looking for onIdle) */ protected static triggerOnComplete(): Promise<void> | undefined; /** * TODO check if cart values are changed * @param cart The cart object */ protected static triggerOnChange(cart: ShopifyCartObject): void; /** * Trigger `ShopifyCart:request:start`, if not already triggered */ protected static triggerOnStart(): void; /** * Trigger `ShopifyCart:add` */ protected static triggerAdd(id: number, quantity: number, properties: any): void; protected static normalizeShippingRates(shippingRates: ShopifyShippingRates): ShopifyShippingRatesNormalized; } <file_sep>export { Bs4ContentsComponent } from './bs4-contents/bs4-contents.component'; export { Bs4IconComponent } from './bs4-icon/bs4-icon.component'; export { Bs4ScrollspyComponent } from './bs4-scrollspy/bs4-scrollspy.component'; export { Bs4SidebarComponent } from './bs4-sidebar/bs4-sidebar.component'; export { Bs4ToggleButtonComponent } from './bs4-toggle-button/bs4-toggle-button.component'; export { DropdownComponent } from './dropdown/dropdown.component'; export { Bs4NavbarComponent } from './bs4-navbar/bs4-navbar.component'; export { TabsComponent } from './tabs/tabs.component'; <file_sep>export * from './interfaces'; import * as components from './components'; // import * as formatters from './formatters/shopify.formatters'; import * as services from './services'; export const shopifyEasdkModule = { // formatters, services, components, }; export default shopifyEasdkModule; <file_sep>import { Formatter } from '../../interfaces/formatter'; import { Utils } from '../../services/utils'; /** * Checks if value is an boolean */ export const isBooleanFormatter: Formatter = { name: 'isBoolean', read: Utils.isBoolean, }; <file_sep>export * from './vendors'; export * from './binders'; import { jqueryModule } from './jquery.module'; export { jqueryModule }; export default jqueryModule; <file_sep>import { Binder } from '../interfaces'; /** * Sets the element's text value. */ export const htmlBinder: Binder<string> = { name: 'html', routine(el: HTMLElement, value: string) { el.innerHTML = value != null ? value : ''; }, }; <file_sep>import { Binder, Type } from '@ribajs/core'; import { AI18nSwitcherComponent } from '../abstract-switcher/switcher.abstract.component'; import { Langcode } from '../../interfaces'; import { ALocalesService } from '../../services/locales-base.service'; export const i18nSwitcherComponentWrapper = (localesService: ALocalesService): Type<AI18nSwitcherComponent> => { return class I18nSwitcherComponent extends AI18nSwitcherComponent { public static tagName: string = 'i18n-switcher'; static get observedAttributes() { return []; } protected localesService = localesService; protected scope = { langcodes: <Langcode[]> [], switch: this.switch, toggle: this.toggle, ready: <boolean> false, }; constructor(element?: HTMLElement) { super(element); this.init(I18nSwitcherComponent.observedAttributes); } /** * Switch to language by langcode * @param langcode * @param event */ public switch(langcode: Langcode, context?: Binder<any>, event?: Event) { return super.switch(langcode, context, event); } /** * Toggle language, makes only sense if you have only two languages * @param langcode * @param event */ public toggle(context?: Binder<any>, event?: Event) { return super.toggle(context, event); } protected setLangcode(langcode: string) { return super.setLangcode(langcode); } protected requiredAttributes() { return []; } protected disconnectedCallback() { super.disconnectedCallback(); } protected template() { return null; } }; }; <file_sep>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const core_1 = require("@ribajs/core"); /** * Custom version of shopify tools like api.jquery.js / option-selection.js * @see https://mayert-douglas4935.myshopify.com/pages/api */ class ShopifyService { constructor(shopSettings) { if (ShopifyService.instance) { return ShopifyService.instance; } ShopifyService.instance = this; } static formatMoneyWithDelimiters(num, precision = 2, thousands = ',', decimal = '.') { if (!core_1.Utils.isNumber(num) || num === null) { return '0'; } const numStr = (num / 100.0).toFixed(precision); const parts = numStr.split('.'); const dollars = parts[0].replace(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1' + thousands); const cents = parts[1] ? (decimal + parts[1]) : ''; return dollars + cents; } static get moneyWithCurrencyFormat() { if (window.model && window.model.system && window.model.system.shopSettings) { return window.model.system.shopSettings.moneyWithCurrencyFormat; } } static get moneyFormat() { if (window.model && window.model.system && window.model.system.shopSettings) { return window.model.system.shopSettings.moneyFormat; } } } exports.ShopifyService = ShopifyService; <file_sep>import { Formatter } from '@ribajs/core'; /** * Return a resized shopify image URL * @see https://help.shopify.com/en/themes/liquid/filters/url-filters#img_url * * @param url * @param size * @param scale TODO * @param crop TODO * @param extension */ export declare const imgUrlFormatter: Formatter; <file_sep>import { Utils as RibaUtils, } from '@ribajs/core'; /** * * @see https://github.com/twbs/bootstrap/blob/v4-dev/js/src/util.js#L124 */ export class Utils extends RibaUtils { /** * Shoutout AngusCroll (https://goo.gl/pxwQGp) * @param obj */ public static toType(obj: any) { const matches = {}.toString.call(obj).match(/\s([a-z]+)/i); return matches ? matches[1].toLowerCase() : null; } /** * * @see https://github.com/twbs/bootstrap/blob/v4-dev/js/src/util.js#L124 */ public static isElement(obj: Element | Element[]) { return ((obj as Element[])[0] || obj as Element).nodeType; } /** * * @param componentName * @param config * @param configTypes */ public static typeCheckConfig(componentName: string, config: any, configTypes: any) { for (const property in configTypes) { if (Object.prototype.hasOwnProperty.call(configTypes, property)) { const expectedTypes = configTypes[property]; const value = config[property]; const valueType = value && Utils.isElement(value) ? 'element' : Utils.toType(value); if (!valueType || !new RegExp(expectedTypes).test(valueType)) { throw new Error( `${componentName.toUpperCase()}: ` + `Option "${property}" provided type "${valueType}" ` + `but expected type "${expectedTypes}".`); } } } } } <file_sep>import { Utils } from '../../services/utils'; import { emptyFormatter } from '../array/empty.formatter'; /** * Check if value is a string and not empty */ export const filledFormatter = { name: 'filled', read(str: string) { return Utils.isString(str) && !emptyFormatter.read(str.replace(/\s/g, '')); }, }; <file_sep>/** * * @see https://github.com/twbs/bootstrap/blob/v4-dev/js/src/collapse.js */ export class CollapseService { public static DATA_KEY = 'bs.collapse'; public static EVENT_KEY = `.${CollapseService.DATA_KEY}`; public static DATA_API_KEY = '.data-api'; public static EVENT = { SHOW : `show${CollapseService.EVENT_KEY}`, SHOWN : `shown${CollapseService.EVENT_KEY}`, HIDE : `hide${CollapseService.EVENT_KEY}`, HIDDEN : `hidden${CollapseService.EVENT_KEY}`, CLICK_DATA_API : `click${CollapseService.EVENT_KEY}${CollapseService.DATA_API_KEY}`, }; public static CLASSNAME = { SHOW : 'show', COLLAPSE : 'collapse', COLLAPSING : 'collapsing', COLLAPSED : 'collapsed', }; private targets: NodeListOf<Element> | Array<Element>; constructor(targets: NodeListOf<Element> | Array<Element>) { this.targets = targets; } public show() { this.targets.forEach((target: Element) => { target.classList.remove(CollapseService.CLASSNAME.COLLAPSE); target.classList.add(CollapseService.CLASSNAME.SHOW); target.dispatchEvent(new Event(CollapseService.EVENT.SHOWN)); }); } public hide() { this.targets.forEach((target: Element) => { target.classList.remove(CollapseService.CLASSNAME.SHOW); target.classList.add(CollapseService.CLASSNAME.COLLAPSE); target.dispatchEvent(new Event(CollapseService.EVENT.HIDDEN)); }); } public isExpanded() { if (this.targets.length > 0 && this.targets[0]) { return this.targets[0].classList.contains(CollapseService.CLASSNAME.SHOW); } return false; } public isCollapsed() { return !this.isExpanded(); } public toggle() { if (this.isCollapsed()) { this.show(); } else { this.hide(); } } } <file_sep>import { Utils } from '../../services/utils'; /** * Array formatter to get a subarray from array */ export const rangeFormatter = { name: 'range', read(arr: any[], start: number, end: number) { start = Number(Utils.isNumber(start) ? start : 0); end = Number(Utils.isNumber(end) ? end : arr.length - 1); if (end > arr.length - 1) { end = arr.length - 1; } if (start > end) { return []; } return arr.slice(Number(start || 0), 1 + end); }, }; <file_sep>import { Binder, Utils, View as RivetsView, handleizeFormatter } from '@ribajs/core'; import { Pjax, HideShowTransition } from '../services'; /** * Loads a url with pjax and show them insite the element this binder is used on */ export const viewStaticBinder: Binder<string> = { name: 'view-static', block: true, bind(el: HTMLElement) { if (!this.customData) { this.customData = { nested: null, }; } }, routine(el: HTMLElement, options: any) { const wrapper = el; const self = this; // Set default options options = options || {}; options.listenAllLinks = false; options.listenPopstate = false; options.parseTitle = false; options.transition = options.transition || new HideShowTransition(); options.viewId = options.viewId || el.getAttribute('id') || handleizeFormatter.read(options.url); options.containerSelector = options.containerSelector || '[data-namespace]'; const pjax = new Pjax(options.viewId, wrapper, options.containerSelector, options.listenAllLinks, options.listenPopstate , options.transition, options.parseTitle); const $newContainer = pjax.load(options.url); $newContainer.then((container: HTMLElement) => { wrapper.replaceWith(container); container.style.visibility = 'visible'; // add the dateset to the model if (!Utils.isObject(self.view.models)) { self.view.models = {}; } // self.view.models.dataset = container.data(); if (self.customData.nested) { self.customData.nested.unbind(); } self.customData.nested = new RivetsView(container, self.view.models, self.view.options); self.customData.nested.bind(); }); }, unbind(el: HTMLUnknownElement) { if (this.customData.nested) { this.customData.nested.unbind(); } delete this.customData; }, }; <file_sep>import { Component, EventDispatcher, Utils, } from '@ribajs/core'; type State = 'overlay-left' | 'overlay-right' | 'side-left' | 'side-right' | 'hidden'; interface Scope { /** * Selector string to get the container element from DOM */ containerSelector?: string; /** * The current state of the sidebar, can be `'hidden'`, `'side-left'`, `'side-right'`, `'overlay-left'` or `'overlay-right'` */ state: State; /** * The 'id' is required to react to events of the `bs4-toggle-button`, the `target-id` attribute of the `bs4-toggle-button` must be identical to this `id` */ id?: string; /** * The width of the sidebar */ width: number; // Options /** * The sidebar can be positioned `right` or `left` */ position: 'left' | 'right'; /** * Auto show the sidebar if the viewport width is wider than this value */ autoShowOnWiderThan: number; /** * Auto hide the sidebar if the viewport width is slimmer than this value */ autoHideOnSlimmerThan: number; /** * You can force to hide the sidebar on corresponding URL pathames e.g. you can hide the sidebar on home with `['/']`. */ forceHideOnLocationPathnames: Array<string>; /** * Like `force-hide-on-location-pathnames`, but to force to open the sidebar */ forceShowOnLocationPathnames: Array<string>; /** * If the viewport width is wider than this value the sidebar adds a margin to the container (detected with the `container-selector`) to reduce its content, if the viewport width is slimmer than this value the sidebar opens over the content */ overlayOnSlimmerThan: number; // Template methods /** * Hides / closes the sidebar */ hide: Bs4SidebarComponent['hide']; /** * Shows / opens the sidebar */ show: Bs4SidebarComponent['show']; /** * Toggles (closes or opens) the sidebar */ toggle: Bs4SidebarComponent['toggle']; } export class Bs4SidebarComponent extends Component { public static tagName: string = 'bs4-sidebar'; protected style: CSSStyleDeclaration; protected autobind = true; static get observedAttributes() { return ['id', 'container-selector', 'position', 'width', 'auto-show-in-wider-than', 'auto-hide-on-slimmer-than', 'force-hide-on-location-pathnames', 'force-show-on-location-pathnames', 'overlay-on-slimmer-than']; } protected toggleButtonEvents?: EventDispatcher; protected routerEvents = new EventDispatcher('main'); protected scope: Scope = { // template properties containerSelector: undefined, state: 'hidden', id: undefined, width: 250, // Options position: 'left', autoShowOnWiderThan: 1199, autoHideOnSlimmerThan: 1200, forceHideOnLocationPathnames: [], forceShowOnLocationPathnames: [], overlayOnSlimmerThan: 1200, // template methods hide: this.hide, show: this.show, toggle: this.toggle, }; constructor(element?: HTMLElement) { super(element); this.init(Bs4SidebarComponent.observedAttributes); this.style = window.getComputedStyle(this.el); window.addEventListener('resize', this.onEnviromentChanges.bind(this), false); } public setState(state: State) { this.scope.state = state; } public getState() { return this.scope.state; } public hide() { this.scope.state = 'hidden'; this.onStateChange(); } public show() { const vw = Utils.getViewportDimensions().w; if (vw < this.scope.overlayOnSlimmerThan) { this.scope.state = 'overlay-' + this.scope.position as State; } else { this.scope.state = 'side-' + this.scope.position as State; } this.onStateChange(); } public toggle() { if (this.scope.state === 'hidden') { this.show(); } else { this.hide(); } } protected onToggle(targetId: string) { this.toggle(); } protected initToggleButtonEventDispatcher() { if (this.toggleButtonEvents) { this.toggleButtonEvents.off('toggle', this.onToggle); } this.toggleButtonEvents = new EventDispatcher('bs4-toggle-button:' + this.scope.id); this.toggleButtonEvents.on('toggle', this.onToggle.bind(this)); } protected initRouterEventDispatcher() { this.routerEvents.on('newPageReady', this.onEnviromentChanges.bind(this)); } protected onHidden() { this.setContainersStyle(); const translateX = this.scope.position === 'left' ? '-100%' : '100%'; this.el.setAttribute('style', `transform:translateX(${translateX});width:${this.scope.width}px;`); } protected onSide(directon: State) { this.setContainersStyle(undefined, '', directon); this.el.setAttribute('style', `transform:translateX(0);width:${this.scope.width}px;`); } protected onOverlay(directon: State) { this.setContainersStyle(undefined, '', directon); this.el.setAttribute('style', `transform:translateX(0);width:${this.scope.width}px;`); } protected onStateChange() { switch (this.scope.state) { case 'side-left': case 'side-right': this.onSide(this.scope.state); break; case 'overlay-left': case 'overlay-right': this.onOverlay(this.scope.state); break; default: this.onHidden(); break; } if (this.toggleButtonEvents) { this.toggleButtonEvents.trigger('toggled', this.scope.state); } } protected get width() { return this.el.offsetWidth || this.scope.width; } protected setStateByEnviroment() { if (this.scope.forceHideOnLocationPathnames.includes(window.location.pathname)) { return this.hide(); } if (this.scope.forceShowOnLocationPathnames.includes(window.location.pathname)) { return this.show(); } const vw = Utils.getViewportDimensions().w; if (vw < this.scope.autoHideOnSlimmerThan) { return this.hide(); } if (vw < this.scope.autoHideOnSlimmerThan) { return this.hide(); } if (vw > this.scope.autoShowOnWiderThan) { return this.show(); } } /** * If vieport size changes, location url changes or something else */ protected onEnviromentChanges() { this.setStateByEnviroment(); } protected getContainers() { return this.scope.containerSelector ? document.querySelectorAll<HTMLUnknownElement>(this.scope.containerSelector) : undefined; } protected initContainers() { const containers = this.getContainers(); this.setContainersStyle(containers); } protected setContainersStyle(containers?: NodeListOf<HTMLUnknownElement>, style?: string, move?: State) { if (!containers) { containers = this.getContainers(); } if (containers) { for (let i = 0; i < containers.length; i++) { const container = containers[i]; this.setContainerStyle(container, style, move); } } } /** * Sets the container style, takes overs always the transition style of this sidebar * @param container * @param style * @param move */ protected setContainerStyle(container: HTMLUnknownElement, style: string = '', move?: State) { if (move) { const width = this.width; const conStyle = window.getComputedStyle(container); switch (move) { case 'side-left': switch (conStyle.position) { case 'fixed': style += 'left:' + width + 'px'; break; default: style += 'margin-left:' + width + 'px'; break; } break; case 'side-right': switch (conStyle.position) { case 'fixed': style += 'right:' + width + 'px'; break; default: style += 'margin-right:' + width + 'px'; break; } break; default: break; } } return container.setAttribute('style', `transition:${this.style.transition};${style}`); } protected async beforeBind() { this.initRouterEventDispatcher(); this.onEnviromentChanges(); } protected async afterBind() { this.onEnviromentChanges(); } protected requiredAttributes() { return ['id']; } protected parsedAttributeChangedCallback(attributeName: string, oldValue: any, newValue: any, namespace: string | null) { super.parsedAttributeChangedCallback(attributeName, oldValue, newValue, namespace); if (attributeName === 'containerSelector') { this.initContainers(); } if (attributeName === 'id') { this.initToggleButtonEventDispatcher(); } } // deconstructor protected disconnectedCallback() { super.disconnectedCallback(); } protected template() { if (!this.el.hasChildNodes()) { console.warn('No child elements found, this component as no template so you need to define your own as child of this component.'); } return null; } } <file_sep>export * from './dropdown.service'; export * from './utils.service'; <file_sep>export * from './HistoryManager'; export * from './Dom'; export * from './Prefetch'; import { EventDispatcher, Utils, HttpService } from '@ribajs/core'; import { BaseCache } from '../Cache'; import { HideShowTransition } from '../Transition'; import { Transition } from '../../interfaces'; import { Dom } from './Dom'; import { HistoryManager } from './HistoryManager'; export interface PjaxInstances { [key: string]: Pjax; } /** * Pjax is a static object with main function * * @borrows Dom as Dom */ class Pjax { /** * Class name used to ignore links */ public static ignoreClassLink = 'no-barba'; public static cache = new BaseCache(); public static instances: PjaxInstances = {}; public static getInstance(id: string) { const result = Pjax.instances[id]; if (!result) { throw new Error(`No Pjax instance with id ${id} found!`); } return result; } /** * Determine if the link should be followed */ public static preventCheck(evt: Event, element?: HTMLAnchorElement, href?: string): boolean { if (!window.history.pushState) { return false; } /** * Get href from element if href is not set */ if (!href && element) { href = this.getHref(element); } /** * Create fake html element if element is not set */ if (href && !element) { element = document.createElement('a'); element.setAttribute('href', href); } if (!element) { return false; } if (!href) { return false; } // Ignore case when a hash is being tacked on the current URL if (href.indexOf('#') > -1) { return false; } // In case you're trying to load the same page if (Utils.cleanLink(href) === Utils.cleanLink(location.href)) { return false; } // Middle click, cmd click and ctrl click if ((evt && ((evt as any).which && (evt as any).which > 1) || (evt as any).metaKey || (evt as any).ctrlKey || (evt as any).shiftKey || (evt as any).altKey)) { return false; } // Ignore target with _blank target if (element.target && element.target === '_blank') { return false; } // Check if it's the same domain if (window.location.protocol !== element.protocol || window.location.hostname !== element.hostname) { return false; } // Check if the port is the same if (Utils.getPort() !== Utils.getPort(element.port)) { return false; } // Ignore case where there is download attribute if (element.getAttribute && typeof element.getAttribute('download') === 'string') { return false; } if (element.classList.contains(this.ignoreClassLink)) { return false; } return true; } /** * Get the .href parameter out of an element * and handle special cases (like xlink:href) */ public static getHref(el: HTMLAnchorElement | SVGAElement): string | undefined { if (!el) { return undefined; } if (el.getAttribute && typeof el.getAttribute('xlink:href') === 'string') { return el.getAttribute('xlink:href') || undefined; } if (typeof(el.href) === 'string') { let href = el.href; // normalize url if (href && Utils.isAbsoluteUrl(href)) { const location = Utils.getLocation(); const host = location.protocol + '//' + location.hostname; // if is not an external link if (href.indexOf(host) === 0) { // get relative href href = href.replace(host, ''); } } return href; } return undefined; } public dom?: Dom; public history = new HistoryManager(); /** * Indicate wether or not use the cache */ public cacheEnabled: boolean = true; /** * Indicate if there is an animation in progress */ public transitionProgress: boolean = false; private listenAllLinks: boolean; private listenPopstate: boolean; private parseTitle: boolean; private changeBrowserUrl: boolean; private dispatcher: EventDispatcher; private transition?: Transition; private wrapper?: HTMLElement; private viewId: string; /** * Creates an singleton instance of Pjax. */ constructor(id: string, wrapper?: HTMLElement, containerSelector = '[data-namespace]', listenAllLinks: boolean = false, listenPopstate: boolean = true, transition: Transition = new HideShowTransition(), parseTitle: boolean = true, changeBrowserUrl: boolean = true) { this.viewId = id; let instance = this as Pjax; this.dispatcher = new EventDispatcher(this.viewId); this.listenAllLinks = listenAllLinks; this.listenPopstate = listenPopstate; this.parseTitle = parseTitle; this.changeBrowserUrl = changeBrowserUrl; if (Pjax.instances[this.viewId]) { instance = Pjax.instances[this.viewId]; } instance.transition = instance.transition || transition; instance.wrapper = instance.wrapper || wrapper; instance.listenAllLinks = Utils.isBoolean(instance.listenAllLinks) ? instance.listenAllLinks : listenAllLinks; instance.listenPopstate = Utils.isBoolean(instance.listenPopstate) ? instance.listenPopstate : listenPopstate; instance.parseTitle = Utils.isBoolean(instance.parseTitle) ? instance.parseTitle : parseTitle; instance.changeBrowserUrl = Utils.isBoolean(instance.changeBrowserUrl) ? instance.changeBrowserUrl : changeBrowserUrl; if (instance.wrapper) { instance.dom = instance.dom || new Dom(instance.wrapper, containerSelector, this.parseTitle); instance.wrapper.setAttribute('aria-live', 'polite'); } Pjax.instances[this.viewId] = instance; return Pjax.instances[this.viewId]; } /** * Function to be called to start Pjax */ public start() { if (this.wrapper) { this.init(this.wrapper, this.listenAllLinks, this.listenPopstate); } else { console.error(`Can't init pjax without wrapper`); } } /** * Return the currentURL cleaned */ public getCurrentUrl() { return Utils.cleanLink( Utils.getUrl(), ); } /** * Change the URL with pushstate and trigger the state change */ public goTo(url: string, newTab = false) { if (newTab) { const win = window.open(url, '_blank'); if (win) { return win.focus(); } return false; } if (url.indexOf('http') !== 0) { if (this.changeBrowserUrl) { window.history.pushState(null, '', url); } return this.onStateChange(undefined, url); } // fallback this.forceGoTo(url); } /** * Return a transition object */ public getTransition(): Transition { // User customizable return this.transition || new HideShowTransition(); } /** * Load an url, will start an fetch request or load from the cache */ public load(url: string): Promise<HTMLElement> { let fetch; fetch = Pjax.cache.get(url); if (!fetch) { fetch = HttpService.get(url, undefined, 'html'); Pjax.cache.set(url, fetch); } return fetch .then((data: string) => { if (!this.dom) { throw new Error('[Pjax] you need to call the start method first!'); } const container = this.dom.parseResponse(data); this.dom.putContainer(container); if (!this.cacheEnabled) { Pjax.cache.reset(); } return container; }) .catch((error: any) => { console.error(error); // Something went wrong (timeout, 404, 505...) this.forceGoTo(url); throw error; }); } /** * Attach the eventlisteners */ protected bindEvents(listenAllLinks: boolean, listenPopstate: boolean) { // you can also use the rv-router for this if (listenAllLinks) { document.addEventListener('click', this.onLinkClick.bind(this), ); } if (listenPopstate) { window.addEventListener('popstate', this.onStateChange.bind(this), ); } } /** * Force the browser to go to a certain url */ protected forceGoTo(url: Location | string) { if (url && (url as Location).href) { window.location = url as Location; } if (typeof url === 'string') { window.location.href = url; } } /** * Callback called from click event */ protected onLinkClick(evt: Event) { let el: HTMLAnchorElement = ((evt as Event).target as HTMLAnchorElement ); // Go up in the nodelist until we // find something with an href while (el && !Pjax.getHref(el)) { el = (el.parentNode as HTMLAnchorElement); } const href = Pjax.getHref(el); if (Pjax.preventCheck(evt, el, href)) { evt.stopPropagation(); evt.preventDefault(); this.dispatcher.trigger('linkClicked', el, evt); if (!href) { throw new Error('href is null'); } this.goTo(href); } } /** * Method called after a 'popstate' or from .goTo() */ protected onStateChange(event?: Event, newUrl: string = this.getCurrentUrl()) { if (this.transitionProgress) { this.forceGoTo(newUrl); } if (this.changeBrowserUrl && this.history.currentStatus().url === newUrl) { return false; } this.history.add(newUrl); const newContainer = this.load(newUrl); const transition = this.getTransition(); this.transitionProgress = true; this.dispatcher.trigger('initStateChange', this.viewId, this.history.currentStatus(), this.history.prevStatus(), ); if (!this.dom) { throw new Error('[Pjax] you need to call the start method first!'); } const transitionInstance = transition.init( this.dom.getContainer(document.body), newContainer, ); newContainer.then( this.onNewContainerLoaded.bind(this), ); transitionInstance.then( this.onTransitionEnd.bind(this), ); } /** * Function called as soon the new container is ready */ protected onNewContainerLoaded(container: HTMLElement) { const currentStatus = this.history.currentStatus(); if (!this.dom) { throw new Error('[Pjax] you need to call the start method first!'); } currentStatus.namespace = this.dom.getNamespace(container); this.dispatcher.trigger('newPageReady', this.viewId, this.history.currentStatus(), this.history.prevStatus(), container, this.dom.currentHTML, container.dataset, false, // true if this is the first time newPageReady is tiggered / true on initialisation ); } /** * Function called as soon the transition is finished */ protected onTransitionEnd() { this.transitionProgress = false; this.dispatcher.trigger('transitionCompleted', this.viewId, this.history.currentStatus(), this.history.prevStatus(), ); } /** * Init the events */ protected init(wrapper: HTMLElement, listenAllLinks: boolean, listenPopstate: boolean) { if (!this.dom) { throw new Error('[Pjax] you need to call the start method first!'); } const container = this.dom.getContainer(document.body); this.wrapper = wrapper; this.history.add( this.getCurrentUrl(), this.dom.getNamespace(container), ); // Fire for the current view. this.dispatcher.trigger('initStateChange', this.viewId, this.history.currentStatus(), ); this.dispatcher.trigger('newPageReady', this.viewId, this.history.currentStatus(), {}, container, this.dom.currentHTML, container.dataset, true, // true if this is the first time newPageReady is tiggered / true on initialisation ); this.dispatcher.trigger('transitionCompleted', this.viewId, this.history.currentStatus(), ); this.bindEvents(listenAllLinks, listenPopstate); } } export { Pjax }; <file_sep>// iMPORTANT do not use .babelrc: https://github.com/babel/babel/issues/8711#issuecomment-421918023 module.exports = { "presets": [ [ "@babel/typescript", { "allExtensions": true } ], [ "@babel/preset-env", { // see https://www.sumcumo.com/tree-shaking-eine-einfuehrung modules: false } ] ], "plugins": [ [ "@babel/plugin-transform-runtime", { "corejs": 2 } ], "@babel/plugin-syntax-export-default-from", "@babel/plugin-proposal-class-properties", "@babel/plugin-proposal-object-rest-spread", "array-includes" ], "ignore": [ "**/custom-elements-es5-adapter.js" ] }; <file_sep>/** * Object that is going to deal with DOM parsing/manipulation */ class Dom { /** * The name of the data attribute on the container */ public dataNamespace = 'namespace'; /** * Class name used to identify the containers * * @default */ public containerSelector: string; /** * Full HTML String of the current page. * By default is the innerHTML of the initial loaded page. * * Each time a new page is loaded, the value is the response of the xhr call. * */ public currentHTML?: string; private _wrapper: HTMLElement; private parseTitle: boolean; constructor(wrapper: HTMLElement, containerSelector = '[data-namespace]', parseTitle: boolean) { this._wrapper = wrapper; this.containerSelector = containerSelector; this.parseTitle = parseTitle; } /** * Parse the responseText obtained from the xhr call * @see https://stackoverflow.com/a/41038197/1465919 */ public parseResponse(responseText: string): HTMLElement { this.currentHTML = responseText; const wrapper = document.createElement('div') as HTMLElement; wrapper.innerHTML = responseText; if (this.parseTitle === true) { const titleElement = wrapper.querySelector('title'); if (titleElement && titleElement.textContent) { document.title = titleElement.textContent; } } return this.getContainer(wrapper); } /** * Get the main barba wrapper by the ID `wrapperId` */ public getWrapper(): Element { return this._wrapper; } /** * Get the container on the current DOM, * or from an Element passed via argument */ public getContainer(element?: HTMLTemplateElement | HTMLElement): HTMLElement { if (!element) { throw new Error('Barba.js: [getContainer] No element to get container from!'); } if (!element) { throw new Error('Barba.js: [getContainer] DOM not ready!'); } const container = this.parseContainer(element); if (!container) { throw new Error('[DOM] No container found'); } return container; } /** * Get the namespace of the container */ public getNamespace(element: HTMLElement): string | null { if (element && element.dataset && element.dataset.namespace) { return element.dataset.namespace; } else { return null; } } /** * Put the container on the page */ public putContainer(element: HTMLElement | HTMLElement, appendChild: 'append' | 'replace' = 'replace') { element = element as HTMLElement; element.style.visibility = 'hidden'; const wrapper = this.getWrapper(); wrapper.appendChild(element); } /** * Get container selector */ protected parseContainer(newPage: HTMLTemplateElement | HTMLElement): HTMLElement { if (!newPage) { const error = new Error('New page not loaded!'); console.error(error, newPage); throw error; } let result: HTMLElement | null; if ((newPage as HTMLTemplateElement).content) { result = (newPage as HTMLTemplateElement).content.querySelector(this.containerSelector); } else { result = newPage.querySelector(this.containerSelector); } if (!result) { const error = new Error(`No container with selector "${this.containerSelector}" found!`); console.error(error, newPage); throw error; } return result; } } export { Dom }; <file_sep>import { Formatter } from '../../interfaces/formatter'; import { Utils } from '../../services/utils'; /** * Checks if value is defined */ export const isDefinedFormatter: Formatter = { name: 'isDefined', read: Utils.isDefined, }; <file_sep>export const debugFormatter = { name: 'debug', read(toPrint: any, level: 'log' | 'debug' | 'info' | 'error' | 'warn' = 'debug') { console[level](toPrint); return toPrint; }, }; <file_sep>import { PriorityQueue } from './priority-queue.service'; export declare class PQueue { queue: PriorityQueue; private _carryoverConcurrencyCount; private _isIntervalIgnored; private _intervalCount; private _intervalCap; private _interval; private _intervalId?; private _intervalEnd; private _timeoutId; private _pendingCount; private _concurrency; private _isPaused; private _resolveEmpty; private _resolveIdle; constructor(options: any); add(fn: any, options?: any): Promise<any>; addAll(fns: any, options: any): Promise<[unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown]>; start(): void; pause(): void; clear(): void; onEmpty(): Promise<unknown>; onIdle(): Promise<any>; get _doesIntervalAllowAnother(): boolean; get _doesConcurrentAllowAnother(): boolean; private _next; private _resolvePromises; private _onResumeInterval; private _intervalPaused; private _tryToStartAnother; private _initializeIntervalIfNeeded; private _onInterval; get size(): number; get pending(): number; get isPaused(): boolean; } <file_sep>import { Utils } from '../../services/utils'; /** * Gets back random value of array * @example <div rv-add-class='"["col-2", "col-3", "col-4", "col-5", "col-6"]" | parse | random'> */ export const randomFormatter = { name: 'random', read(array: any[]) { if (Utils.isArray(array)) { const value = array[Math.floor(Math.random() * array.length)]; return value; } return null; }, }; <file_sep>/** * Custom version of shopify tools like api.jquery.js / option-selection.js * @see https://mayert-douglas4935.myshopify.com/pages/api */ export declare class ShopifyService { static formatMoneyWithDelimiters(num: number, precision?: number, thousands?: string, decimal?: string): string; /** singleton instance */ protected static instance: ShopifyService; protected moneyFormat?: string; protected moneyWithCurrencyFormat?: string; static get moneyWithCurrencyFormat(): any; static get moneyFormat(): any; constructor(shopSettings?: any); } <file_sep>import { CommandInput, ConfigurationLoader as IConfigurationLoader } from '../interfaces'; import { ConfigurationLoader } from '../lib/configuration'; import { FileSystemReader } from '../lib/readers'; export abstract class AbstractAction { public abstract async handle( inputs?: CommandInput[], options?: CommandInput[], extraFlags?: string[], ): Promise<void>; protected deepCopyInput(inputs: CommandInput[]) { return inputs.map(input => ({...input})); } protected getInput(inputs: CommandInput[], name: string) { const input = inputs.find(input => input.name === name); return input; } protected setInput(inputs: CommandInput[], name: string, value: string | boolean) { const input = inputs.find(input => input.name === name); // Add new input if input not exists if (!input) { inputs.push({name, value}); return this.getInput(inputs, name); } input.value = value; return input; } /** * Sets input if value only if the value has not yet been set * @param inputs * @param name * @param value */ protected setDefaultInput(inputs: CommandInput[], name: string, value: string | boolean) { const input = inputs.find(input => input.name === name); if (!input) { inputs.push({name, value}); return this.getInput(inputs, name); } if (typeof(input.value) === 'undefined') { input.value = value; } return input; } protected concatOptions(inputsSources: CommandInput[][]) { const result = new Array<CommandInput>(); for (const inputs of inputsSources) { const toConcat = this.deepCopyInput(inputs); for (const input of toConcat) { if (typeof(input.value) !== 'undefined') { this.setInput(result, input.name, input.value) } } } return result; } protected async loadConfiguration() { const loader: IConfigurationLoader = new ConfigurationLoader( new FileSystemReader(process.cwd()), ); return loader.load(); } protected async generateFiles(args: CommandInput[], options: CommandInput[]): Promise<void> { return Promise.resolve(); } } <file_sep>import { RibaModule } from '@ribajs/core'; export * from './binders'; export * from './interfaces'; export * from './services'; import * as routerBinders from './binders'; import * as services from './services'; export const routerModule = <RibaModule> { binders: routerBinders, services, }; export default routerModule; <file_sep>export * from './polyfills.module'; <file_sep>// TODO export interface Deferred { resolve: any; reject: any; } /** * Just an Class with some helpful functions * * @export * @class Utils */ export class Utils { public static couldBeJson(str?: string | null) { if (!str) { return false; } return str.startsWith('{') || str.startsWith('['); } /** * Test if string is a json string * @param str */ public static isJson(str?: string | null) { if (!str) { return false; } try { const val = JSON.parse(str); return (Array.isArray(val) || typeof(val) === 'object') ? true : false; } catch (error) { return false; } } /** * Check if value is undefined */ public static isUndefined(value?: any) { return typeof(value) === 'undefined'; } /** * Check if value is undefined */ public static isDefined(value?: any) { return !Utils.isUndefined(value); } /** * Check if type is Object * @see https://stackoverflow.com/a/4775737/1465919 */ public static isObject(obj: object) { return Utils.isDefined(obj) && typeof obj === 'object' && obj !== null; } /** * Parse value to string or return undefined if value is null * @param value */ public static getString(value: string) { return value != null ? value.toString() : undefined; } /** * Parse value to number or return 0 if value is null or undefined * @param value */ public static getNumber(value: string) { return value ? parseFloat(value) : undefined; } /** * Parses a json string with the special feature that json strings * can also havesingle quotations for defining the properties and values */ public static parseJsonString(value: string) { let object = null; if (Utils.couldBeJson(value)) { if (Utils.isJson(value)) { object = JSON.parse(value) || null; } else { try { // Transform an invalid json string with single quotation to a valid json string with double quotation object = JSON.parse(value.replace(/'/g, '"')) || null; } catch (error) { console.warn(error); } } } return object; } public static times(n: number, cb: () => void) { for (let i = 0; i < n; i++) { cb(); } } /** * */ public static getInputValue(el: HTMLElement) { const results: string[] = []; if ((el as HTMLSelectElement).type === 'checkbox') { return (el as HTMLInputElement).checked; } else if ((el as HTMLSelectElement).type === 'select-multiple') { const options: HTMLOptionsCollection = (el as HTMLSelectElement).options; for (const key in options) { if (options.hasOwnProperty(key)) { const option = options[key]; if (option.selected) { results.push(option.value); } } } return results; } else if ( el.getAttribute('contenteditable')) { return el.innerHTML; // TODO write test for contenteditable } else { return (el as HTMLInputElement).value; } } /** * Returns a camel-cased version of the string. Used when translating an * element's attribute name into a property name for the component's scope. * @param string */ public static camelCase = (str: string) => { return str.replace(/-([a-z0-9])/g, (grouped) => { return grouped[1].toUpperCase(); }); } /** * Check if value is a function */ public static isFunction(value: any) { return typeof(value) === 'function'; } /** * Check if variable is an Array * @see https://stackoverflow.com/a/4775737/1465919 */ public static isArray(value: any) { return Object.prototype.toString.call( value ) === '[object Array]'; } /** * Check whether variable is number or a string with numbers in JavaScript * @see https://stackoverflow.com/a/1421988/1465919 */ public static isNumber(value?: any): boolean { return !isNaN(parseFloat(value)) && !isNaN(value - 0); } /** * Check if type is Boolean * @see https://stackoverflow.com/a/28814615/1465919 */ public static isBoolean(value?: any) { return typeof(value) === typeof(true); } /** * Check if value is a string */ public static isString(value?: any) { return this.isDefined(value) && typeof(value) === 'string'; } /** * Check if string contains a number */ public static stringHasNumber(value: string) { return this.isString(value) && /\d/.test(value); } /** * Check if string contains only numbers */ public static stringHasOnlyNumbers(value?: any) { return /^\d+$/.test(value); } /** * Check if string contains only numbers, +, - and () */ public static stringIsPhoneNumber(value: string) { return /^[0-9 ()+-]+$/.test(value); } /** * Just get the digits of a string, useful to remove px pixel from css value * * @see http://stackoverflow.com/a/1100653/1465919 */ public static justDigits(str: string) { const num = str.replace(/[^-\d\.]/g, ''); if (!Utils.isNumber(num)) { return 0; } else { return Number(num); } } /** * Merge the contents of two or more objects together into the first object. * @param {boolean} deep If true, the merge becomes recursive (aka. deep copy). * @param {object} target An object that will receive the new properties * @param {any[]} objects The objects containing additional properties to merge in. * @see http://www.damirscorner.com/blog/posts/20180216-VariableNumberOfArgumentsInTypescript.html */ public static extend(deep: boolean, extended: any = {}, ...objects: any[]) { // Merge the object into the extended object const merge = (obj: any) => { for (const prop in obj) { if (obj.hasOwnProperty(prop)) { if (deep && Object.prototype.toString.call(obj[prop]) === '[object Object]') { // If we're doing a deep merge and the property is an object extended[prop] = this.extend(true, extended[prop], obj[prop]); } else { // Otherwise, do a regular merge extended[prop] = obj[prop]; } } } }; // Loop through each object and conduct a merge for (let i = 0; i < objects.length; i++) { merge(objects[i]); } return extended; } /** * Concat the contents of two objects together into the first object and return the concatenated object. * @param {boolean} deep If true, the merge becomes recursive (aka. deep copy). * @param {object} object1 An first object containing properties to concat. * @param {object} object2 The second object containing properties to concat. */ public static concat(deep: boolean, object1?: object, object2?: object): any { object1 = this.extend(deep, object1 || {}, object1 || {}, object2 || {}); return object1; } /** * Clone an object or array * @param deep If true, the merge becomes recursive (aka. deep copy). * @param val The value(s) to clone */ public static clone(deep: boolean, val: any) { if (Utils.isArray(val)) { return val.slice(); } else { return Utils.extend(deep, {}, val); } } /** * Return a new "Deferred" object * https://developer.mozilla.org/en-US/docs/Mozilla/JavaScript_code_modules/Promise.jsm/Deferred * * @memberOf Barba.Utils * @return {Deferred} */ public static deferred(): any { const obj: any = {}; const prom = new Promise((resolve: any, reject: any) => { obj.resolve = resolve; obj.reject = reject; }); obj.promise = prom; return obj; } /** * get hostname an path of address bar * @see http://stackoverflow.com/a/736970/1465919 * * @example * var l = getLocation("http://example.com/path"); * console.debug(l.hostname) * >> "example.com" * console.debug(l.pathname) * >> "/path" */ public static getLocation(url?: string): Location { if (!url) { return window.location; } const l = document.createElement('a'); l.href = url; return l as any as Location; } /** * Return the current url * * @memberOf Barba.Utils * @return {string} currentUrl */ public static getUrl(url?: string): string { const location = Utils.getLocation(url); return location.protocol + '//' + location.host + location.pathname + location.search; } /** * Check if we are on the route */ public static onRoute = (checkUrl?: string) => { if (checkUrl) { const pathname = Utils.getLocation().pathname; return checkUrl === pathname; } return false; } /** * Check if the current location url stats with a url or is equal */ public static onParentRoute = (checkUrl?: string) => { if (checkUrl) { const pathname = Utils.getLocation().pathname; return pathname.startsWith(checkUrl); } return false; } /** * Given an url, return it without the hash * * @memberOf Barba.Utils * @private * @param {string} url * @return {string} newCleanUrl */ public static cleanLink(url: string): string { return url.replace(/#.*/, ''); } /** * Return the port number normalized, eventually you can pass a string to be normalized. * * @memberOf Barba.Utils * @private * @param {String} p * @return {Int} port */ public static getPort(p?: string, url?: string) { const location = Utils.getLocation(url); const port = typeof p !== 'undefined' ? p : location.port; const protocol = location.protocol; if (port !== '') { return Number(port); } if (protocol === 'http:') { return 80; } if (protocol === 'https:') { return 443; } } /** * Test if url is absolute or relative * @see https://stackoverflow.com/a/19709846/1465919 */ public static isAbsoluteUrl(url: string) { const isProtokoll = new RegExp('^(?:[a-z]+:)?//', 'i'); const isAbsolute = isProtokoll.test(url) || url.startsWith('mailto:') || url.startsWith('tel:') || url.startsWith('fax:'); return isAbsolute; } public static isExternalUrl = (absoluteUrl: string) => { if (Utils.isAbsoluteUrl(absoluteUrl)) { const location = Utils.getLocation(); const host = location.protocol + '//' + location.hostname; let isExternal = true; if (absoluteUrl.startsWith(host)) { isExternal = false; } return isExternal; } return false; } public static isInternalUrl = (url: string) => { return !Utils.isExternalUrl(url); } /** * get param from hash */ public static getUrlParameter(name: string, url: string) { if (!url) { url = window.location.href; } name = name.replace(/[\[\]]/g, '\\$&'); const regex = new RegExp('[?&]' + name + '(=([^&#]*)|&|#|$)'); const results = regex.exec(url); if (!results) { return null; } if (!results[2]) { return ''; } return decodeURIComponent(results[2].replace(/\+/g, ' ')); } /** * Get hash from address bar or url if set */ public static getHash(url?: string) { return Utils.getLocation(url).hash; } /** * Change hash from address bar */ public static updateHash(hash: string) { return window.location.hash = hash; } /** * Remove hash from address bar */ public static removeHash() { return history.pushState('', document.title, window.location.pathname + window.location.search); } public static getViewportDimensions() { const w = Math.max(document.documentElement ? document.documentElement.clientWidth : 0, window.innerWidth || 0); const h = Math.max(document.documentElement ? document.documentElement.clientHeight : 0, window.innerHeight || 0); return { h, w, }; } public static escapeHtml(str: string) { const tagsToReplace = { '&': '&amp;', '<': '&lt;', '>': '&gt;', }; return str.replace(/[&<>]/g, (tag) => { return tagsToReplace[tag as '&' | '<' | '>'] || tag; }); } /** * Scrolls to an element by event and selector * * Attributes: * * scroll-element="query-selector" * @see https://stackoverflow.com/a/31987330 * @param element * @param to * @param duration */ public static scrollTo(to: HTMLElement, offset: number, scrollElement: Element | (Window & typeof globalThis) | null) { if (!scrollElement) { scrollElement = window; } if (typeof((scrollElement as Window).pageYOffset) === 'number') { // if is is window to scroll scrollElement.scroll({ behavior: 'smooth', left: 0, top: (to.getBoundingClientRect().top + (scrollElement as Window).pageYOffset) - offset, }); } else { // if is is another element to scroll scrollElement.scroll({ behavior: 'smooth', left: 0, top: (to.offsetTop ) - offset, }); } } /** * Cross-browser Document Ready check * @see https://www.competa.com/blog/cross-browser-document-ready-with-vanilla-javascript/ * @param callback */ public static domIsReady(callback: () => void) { if (!callback || typeof(callback) !== 'function') { return new Error('The callback is required!'); } const checkReady = () => { if (document.readyState !== 'loading') { callback(); if ((document as any).attachEvent) { (document as any).detachEvent('onreadystatechange', checkReady); } document.removeEventListener('DOMContentLoaded', checkReady); } }; if ((document as any).attachEvent) { (document as any).attachEvent('onreadystatechange', checkReady); } if (document.addEventListener) { document.addEventListener('DOMContentLoaded', checkReady); } checkReady(); } } <file_sep>import { Binder } from '@ribajs/core'; import { CollapseService } from '../services/collapse.service'; /** * * @see https://getbootstrap.com/docs/4.1/components/collapse/ */ export const collapseBinder: Binder<string> = { name: 'bs4-collapse', routine(el: HTMLElement, targetSelector: string) { const targets = el.querySelectorAll(targetSelector); const collapseService = new CollapseService(targets); const onStateChange = () => { if (collapseService.isCollapsed()) { el.classList.add(CollapseService.CLASSNAME.COLLAPSED); el.setAttribute('aria-expanded', 'false'); } else { el.classList.remove(CollapseService.CLASSNAME.COLLAPSED); el.setAttribute('aria-expanded', 'true'); } }; targets.forEach((target) => { target.addEventListener(CollapseService.EVENT.SHOWN, onStateChange.bind(this)); target.addEventListener(CollapseService.EVENT.HIDDEN, onStateChange.bind(this)); }); el.addEventListener('click', (event) => { event.preventDefault(); collapseService.toggle(); }); onStateChange(); }, }; <file_sep>/** * This implementation of components replaces the old components of rivets following the Web Components v1 specs * * @see https://developer.mozilla.org/de/docs/Web/Web_Components/Using_custom_elements */ import { EventHandler, Formatter } from '../interfaces'; import { View } from '../view'; import { Riba } from '../riba'; import { Binding } from '../binding'; import { Utils } from '../services/utils'; import { FakeHTMLElement } from './fake-html-element'; export type TemplateFunction = () => Promise<string | null> | string | null; export interface RibaComponentContext { fallback: boolean; view: View; } export interface ObservedAttributeToCheck { initialized: boolean; passed: boolean; } export interface ObservedAttributesToCheck { [key: string]: ObservedAttributeToCheck; } export abstract class Component extends FakeHTMLElement { public static tagName: string; /** * Context of this component, used for debugging */ public context?: RibaComponentContext; protected view?: View; protected templateLoaded: boolean = false; /** * Used to check if all passed observedAttributes are initialized */ protected observedAttributesToCheck: ObservedAttributesToCheck = {}; protected riba?: Riba; protected el: HTMLUnknownElement; protected abstract scope: any; public get bound() { return !!this.view; } /** * If true the component will automatically bind the component to riba if all required attributes are setted */ protected autobind: boolean = true; private attributeObserverFallback?: MutationObserver; constructor(element?: HTMLUnknownElement, context?: RibaComponentContext) { super(element); this.context = context; if (element) { this.el = element; } else if (window.customElements) { this.el = this as unknown as HTMLElement; } else { throw new Error(`element is required on browsers without custom elements support`); } } /** * Remove this custom element */ public remove() { if (this.el && this.el.parentElement) { this.el.parentElement.removeChild(this.el); if (!(window as any).customElements) { this.disconnectedFallbackCallback(); } } } public disconnectedFallbackCallback() { this.disconnectedCallback(); } protected abstract template(): Promise<string | null> | string | null; /** * returns a list of attributes wich are required until the riba binding starts */ protected requiredAttributes(): string[] { return []; } protected async init(observedAttributes: string[]) { this.initAttributeObserver(observedAttributes); this.getPassedObservedAttributes(observedAttributes); return this.bindIfReady(); } /** * If `autobind` is true this component will bind riba automatically in this component if all all passed observed and required attributes are initialized */ protected async bindIfReady() { /** * After all required and passed attributes are set we load the template and bind the component */ if (this.allPassedObservedAttributesAreInitialized() && this.checkRequiredAttributes()) { return this.loadTemplate() .then((template) => { if (this.autobind) { return this.bind(); } return null; }); } // console.warn('Not all required and passed attributes are set to load and bind the template', this.observedAttributesToCheck); return null; } /** * Check if the attribute (e.g. `src`) is passed to this custom element also checks if the attribute was passed with riba (e.g. `rv-src`) * @param observedAttribute */ protected attributeIsPassed(observedAttribute: string) { // TODO this.riba is not defined on this time, so the TODO is get the fullPrefix from riba const fullPrefix = this.riba ? this.riba.fullPrefix : 'rv-'; return this.el.getAttribute(observedAttribute) !== null || this.el.getAttribute(fullPrefix + observedAttribute) !== null; } /** * Get passed observed attributes, used to check if all passed attributes are initialized * @param observedAttributes */ protected getPassedObservedAttributes(observedAttributes: string[]) { for (const observedAttribute of observedAttributes) { const passed = this.attributeIsPassed(observedAttribute); this.observedAttributesToCheck[observedAttribute] = { passed, initialized: false, }; } } /** * Checks if all passed observed attributes are initialized */ protected allPassedObservedAttributesAreInitialized() { let allInitialized = true; for (const key in this.observedAttributesToCheck) { if (this.observedAttributesToCheck.hasOwnProperty(key)) { if (this.observedAttributesToCheck[key].passed) { allInitialized = allInitialized && this.observedAttributesToCheck[key].initialized; } } } return allInitialized; } /** * Required attributes before the view is bound * * The attributeChangedCallback is called for each attribute wich updates the riba view each time * which can have a big impact on performance or required attributes are not yet available which can lead to errors. * So define required attriutes and the view is ony bind the first time after all this attributes are transmitted. */ protected checkRequiredAttributes() { let allDefined = true; const requiredAttributes = this.requiredAttributes(); requiredAttributes.forEach((requiredAttribute: string) => { if (!this.scope.hasOwnProperty(requiredAttribute) || !this.scope[requiredAttribute] ) { // console.warn(`Attribute ${requiredAttribute} not set: ${this.scope[requiredAttribute]}`); allDefined = false; } else { // console.warn(`Attribute ${requiredAttribute} is defined: ${this.scope[requiredAttribute]}`); } }); return allDefined; } protected parseAttribute(attr: string | null) { let value: any = attr; if (attr === 'true') { value = true; } else if (attr === 'false') { value = false; } else if (attr === 'null') { value = null; } else if (attr === 'undefined') { value = undefined; } else if (attr === '') { value = undefined; } else if (!isNaN(Number(attr))) { value = Number(attr); // If number is too large store the value as string if (value >= Number.MAX_SAFE_INTEGER) { value = attr; } } else { const jsonString = Utils.parseJsonString(value); value = jsonString ? jsonString : value; } return value; } /** * Event handler to liste for publish binder event for two-way-binding in web components */ protected publish(name: string, newValue: any, namespace: string | null) { this.el.dispatchEvent(new CustomEvent('publish-binder-change:' + name, { detail: { name, newValue, namespace: null, // TODO }})); } /** * Returns an event handler for the bindings (most on-*) insite this component. */ protected eventHandler(self: Component): EventHandler { // IMPORTANT this must be a function and not a Arrow Functions return function(this: EventHandler, context: Binding, event: Event, binding: Binding, el: HTMLElement) { this.call(self, context, event, binding.view.models, el); }; } /** * Extra call formatter to avoid the "this" context problem */ protected callFormatterHandler(self: this): any { return { name: 'call', read: (fn: (...args: any[]) => any, ...args: any[]) => { return fn.apply(self, args); }, }; } /** * Extra args formatter to avoid the "this" context problem * * Sets arguments to a function without directly call them * @param fn The function you wish to call * @param args the parameters you wish to call the function with */ protected argsFormatterHandler(self: this): Formatter { return { name: 'args', read: (fn: (...args: any[]) => any, ...fnArgs: any[]) => { return (event: Event, scope: any, el: HTMLElement, binding: any) => { // append the event handler args to passed args fnArgs.push(event); fnArgs.push(scope); fnArgs.push(el); fnArgs.push(binding); return fn.apply(self, fnArgs); }; }, }; } /** * Default custom Element method * Invoked when the custom element is first connected to the document's DOM. */ protected connectedCallback() { // console.warn('connectedCallback called'); } /** * Default custom Element method * Invoked when the custom element is disconnected from the document's DOM. */ protected disconnectedCallback() { if (this.view) { this.view.unbind(); } if (this.attributeObserverFallback) { this.attributeObserverFallback.disconnect(); } this.el.removeEventListener('binder-changed', this.BinderChangedEventHandler); } /** * Default custom Element method * Invoked when the custom element is moved to a new document. * @param attributeName * @param oldValue * @param newValue * @param namespace */ protected attributeChangedCallback(attributeName: string, oldValue: any, newValue: any, namespace: string | null) { if (this.observedAttributesToCheck && this.observedAttributesToCheck[attributeName]) { this.observedAttributesToCheck[attributeName].initialized = true; } newValue = this.parseAttribute(newValue); const parsedAttributeName = Utils.camelCase(attributeName); if (this.scope && this.scope[parsedAttributeName]) { oldValue = this.scope[parsedAttributeName]; } // automatically inject observed attributes to view scope this.scope[parsedAttributeName] = newValue; // call custom attribute changed callback with parsed values this.parsedAttributeChangedCallback(parsedAttributeName, oldValue, newValue, namespace); this.bindIfReady(); } /** * Similar to attributeChangedCallback but attribute arguments are already parsed as they are stored in the scope * @param attributeName * @param oldValue * @param newValue * @param namespace */ protected parsedAttributeChangedCallback(attributeNames: string | string[], oldValue: any, newValue: any, namespace: string | null) { // console.warn('parsedAttributeChangedCallback called', attributeNames, oldValue, newValue, namespace); } /** * Default custom Element method * Invoked when one of the custom element's attributes is added, removed, or changed. * Note: Not supported on polyfill: https://github.com/webcomponents/custom-elements#known-bugs-and-limitations * @param oldDocument * @param newDocument */ protected adoptedCallback(oldDocument: Document, newDocument: Document) { // console.warn('adoptedCallback called', oldDocument, newDocument); } protected async loadTemplate() { if (this.templateLoaded) { // console.warn('template already loaded'); return null; } if (!this.checkRequiredAttributes()) { // console.warn('not all required attributes are set to load the template'); return null; } // if innerHTML is null this component uses the innerHTML which he already has! return Promise.resolve(this.template()) .then((template) => { if (template !== null) { this.el.innerHTML = template; } return template; }) .then((template) => { this.templateLoaded = true; return template; }) .catch((error) => { console.error(error); this.templateLoaded = false; return error; }); } protected async bind() { if (this.bound) { // console.warn('component already bounded'); return; } if (!this.checkRequiredAttributes()) { // console.warn('not all required attributes are set for bind'); return; } await this.beforeBind() .then(() => { if (!this.el) { throw new Error('this.el is not defined'); } this.riba = new Riba(); const viewOptions = this.riba.getViewOptions({ handler: this.eventHandler(this), formatters: { call: this.callFormatterHandler(this), args: this.argsFormatterHandler(this), }, }); this.view = new View(Array.prototype.slice.call(this.el.childNodes) as unknown as NodeListOf<ChildNode>, this.scope, viewOptions); this.scope = this.view.models; this.view.bind(); return this.view; }) .then((view) => { return this.afterBind(); }) .catch((error) => { console.error(error); }); return this.view; } protected async unbind() { if (this.view) { this.view.unbind(); delete this.view; } } protected async build() { if (this.view) { this.view.build(); } } protected async beforeBind(): Promise<any> { // console.warn('beforeBind', this.bound); } protected async afterBind(): Promise<any> { // console.warn('afterBind', this.bound); } private BinderChangedEventHandler(event: Event) { const data = ( event as CustomEvent ).detail; this.attributeChangedCallback(data.name, data.oldValue, data.oldValue, data.namespace); } /** * Event handler to listen attribute change event as fallback for MutationObserver */ private initAttributeObserver(observedAttributes: string[]) { if ((window as any).customElements) { // use native implementaion } else { if ((window as any).MutationObserver) { // use MutationObserver as fallback this.attributeObserverFallback = new MutationObserver((mutations) => { mutations.forEach((mutation) => { if (mutation.type === 'attributes') { if (mutation.attributeName) { // if this attribute is a watched attribute if (observedAttributes.indexOf(mutation.attributeName) !== -1) { const newValue = this.el.getAttribute(mutation.attributeName); this.attributeChangedCallback(mutation.attributeName, mutation.oldValue, newValue, mutation.attributeNamespace); } } } }); }); this.attributeObserverFallback.observe(this.el, { attributes: true, }); } else { // use attribute change event as fallback for MutationObserver this.el.addEventListener('binder-changed', this.BinderChangedEventHandler); // this.$el.on('binder-changed', this.BinderChangedEventHandler); } // call attributeChangedCallback for all already setted static attributes const attributes = this.el.attributes; for (const i in attributes) { if (attributes.hasOwnProperty(i)) { const attribute: Node = attributes[i]; const name = attribute.nodeName; if (observedAttributes.indexOf(name) !== -1) { const newValue = attribute.nodeValue; this.attributeChangedCallback(name, null, newValue, null); } } } } } } <file_sep>import { Component } from '../component'; import { Type } from '../interfaces/type'; export type ComponentWrapper = (...deps: any[]) => Type<Component>; <file_sep>import { Component } from '@ribajs/core'; export declare abstract class ShopifySectionComponent extends Component { static tagName: string; protected abstract scope: any; constructor(element?: HTMLElement); protected initEventListeners(): void; protected abstract template(): string | null; /** * A section has been added or re-rendered. * Re-execute any JavaScript needed for the section to work and display properly (as if the page had just been loaded). */ protected onSectionLoad(event: Event): void; protected onSectionUnload(event: Event): void; protected onSectionSelect(event: Event): void; protected onSectionDeselect(event: Event): void; protected onSectionReorder(event: Event): void; protected onBlockSelect(event: Event): void; protected onBlockDeselect(event: Event): void; } <file_sep>import { coreModule, Riba } from '@ribajs/core'; import { <%= classify(name) %>Module } from './<%= name %>.module'; const riba = new Riba(); const model = {}; // Register modules riba.module.regist(coreModule); riba.module.regist(<%= classify(name) %>Module); const bindToElement = document.getElementById('rv-<%= name %>'); riba.bind(bindToElement, model); <file_sep>/* tslint:disable */ /// <reference types="jquery" /> /*! * jQuery Mobile Events * by <NAME> * https://github.com/benmajor/jQuery-Touch-Events * * Copyright 2011-2017, <NAME> * Licensed under the MIT License: * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ export default ($: any) => { if (!$) { console.error('JQuery is required for touch events!'); return; } 'use strict'; $.attrFn = $.attrFn || {}; let touchCapable = ('ontouchstart' in window), settings = { tap_pixel_range: 5, swipe_h_threshold: 50, swipe_v_threshold: 50, taphold_threshold: 750, doubletap_int: 500, shake_threshold: 15, touch_capable: touchCapable, orientation_support: ('orientation' in window && 'onorientationchange' in window), startevent: (touchCapable) ? 'touchstart' : 'mousedown', endevent: (touchCapable) ? 'touchend' : 'mouseup', moveevent: (touchCapable) ? 'touchmove' : 'mousemove', tapevent: (touchCapable) ? 'tap' : 'click', scrollevent: (touchCapable) ? 'touchmove' : 'scroll', hold_timer: undefined as number | undefined, tap_timer: undefined as number | undefined, }; // Declare touch namespace: $.touch = {}; // Convenience functions: $.isTouchCapable = function () { return settings.touch_capable; }; $.getStartEvent = function () { return settings.startevent; }; $.getEndEvent = function () { return settings.endevent; }; $.getMoveEvent = function () { return settings.moveevent; }; $.getTapEvent = function () { return settings.tapevent; }; $.getScrollEvent = function () { return settings.scrollevent; }; // SETTERS: // Set the X threshold of swipe events: $.touch.setSwipeThresholdX = function (threshold: number) { if (typeof threshold !== 'number') { throw new Error('Threshold parameter must be a type of number'); } settings.swipe_h_threshold = threshold; }; // Set the Y threshold of swipe events: $.touch.setSwipeThresholdY = function (threshold: number) { if (typeof threshold !== 'number') { throw new Error('Threshold parameter must be a type of number'); } settings.swipe_v_threshold = threshold; }; // Set the double tap interval: $.touch.setDoubleTapInt = function (interval: number) { if (typeof interval !== 'number') { throw new Error('Interval parameter must be a type of number'); } settings.doubletap_int = interval; }; // Set the taphold threshold: $.touch.setTapHoldThreshold = function (threshold: number) { if (typeof threshold !== 'number') { throw new Error('Threshold parameter must be a type of number'); } settings.taphold_threshold = threshold; }; // Set the pixel range for tapas: $.touch.setTapRange = function (range: number) { if (typeof range !== 'number') { throw new Error('Ranger parameter must be a type of number'); } settings.tap_pixel_range = range; }; // Add Event shortcuts: $.each(['tapstart', 'tapend', 'tapmove', 'tap', 'singletap', 'doubletap', 'taphold', 'swipe', 'swipeup', 'swiperight', 'swipedown', 'swipeleft', 'swipeend', 'scrollstart', 'scrollend', 'orientationchange', 'tap2', 'taphold2'], function (i: number, name: string) { $.fn[name] = function (fn: any) { return fn ? this.on(name, fn) : this.trigger(name); }; $.attrFn[name] = true; }); // tapstart Event: $.event.special.tapstart = { setup: function () { var thisObject = this, $this = $(thisObject); $this.on(settings.startevent, function tapStartFunc(e: any) { $this.data('callee', tapStartFunc); if (e.which && e.which !== 1) { return false; } var origEvent = e.originalEvent, touchData = { 'position': { 'x': ((settings.touch_capable) ? origEvent.touches[0].pageX : e.pageX), 'y': (settings.touch_capable) ? origEvent.touches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; triggerCustomEvent(thisObject, 'tapstart', e, touchData); return true; }); }, remove: function () { $(this).off(settings.startevent, $(this).data.callee); } }; // tapmove Event: $.event.special.tapmove = { setup: function () { var thisObject = this, $this = $(thisObject); $this.on(settings.moveevent, function tapMoveFunc(e: any) { $this.data('callee', tapMoveFunc); var origEvent = e.originalEvent, touchData = { 'position': { 'x': ((settings.touch_capable) ? origEvent.touches[0].pageX : e.pageX), 'y': (settings.touch_capable) ? origEvent.touches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; triggerCustomEvent(thisObject, 'tapmove', e, touchData); return true; }); }, remove: function () { $(this).off(settings.moveevent, $(this).data.callee); } }; // tapend Event: $.event.special.tapend = { setup: function () { var thisObject = this, $this = $(thisObject); $this.on(settings.endevent, function tapEndFunc(e: any) { // Touch event data: $this.data('callee', tapEndFunc); var origEvent = e.originalEvent; var touchData = { 'position': { 'x': (settings.touch_capable) ? origEvent.changedTouches[0].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.changedTouches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; triggerCustomEvent(thisObject, 'tapend', e, touchData); return true; }); }, remove: function () { $(this).off(settings.endevent, $(this).data.callee); } }; // taphold Event: $.event.special.taphold = { setup: function () { var thisObject = this, $this = $(thisObject), origTarget: any, start_pos = { x: 0, y: 0 }, end_x = 0, end_y = 0; $this.on(settings.startevent, function tapHoldFunc1(e: any) { if (e.which && e.which !== 1) { return false; } else { $this.data('tapheld', false); origTarget = e.target; var origEvent = e.originalEvent; var start_time = Date.now(); start_pos.x = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageX : e.pageX; start_pos.y = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageY : e.pageY; end_x = start_pos.x; end_y = start_pos.y; // Get the element's threshold: var ele_threshold = ($this.parent().data('threshold')) ? $this.parent().data('threshold') : $this.data('threshold'), threshold = (typeof ele_threshold !== 'undefined' && ele_threshold !== false && parseInt(ele_threshold)) ? parseInt(ele_threshold) : settings.taphold_threshold; settings.hold_timer = window.setTimeout(function () { var diff_x = (start_pos.x - end_x), diff_y = (start_pos.y - end_y); if (e.target == origTarget && ((start_pos.x == end_x && start_pos.y == end_y) || (diff_x >= -(settings.tap_pixel_range) && diff_x <= settings.tap_pixel_range && diff_y >= -(settings.tap_pixel_range) && diff_y <= settings.tap_pixel_range))) { $this.data('tapheld', true); var end_time = Date.now(); var duration = end_time - start_time, touches = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches : [e], touchData = []; for (var i = 0; i < touches.length; i++) { var touch = { 'position': { 'x': (settings.touch_capable) ? origEvent.changedTouches[i].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.changedTouches[i].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[i].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[i].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target, 'duration': duration }; touchData.push(touch); } var evt_name = (touches.length == 2) ? 'taphold2' : 'taphold'; $this.data('callee1', tapHoldFunc1); triggerCustomEvent(thisObject, evt_name, e, touchData); } }, threshold); return true; } }).on(settings.endevent, function tapHoldFunc2() { $this.data('callee2', tapHoldFunc2); $this.data('tapheld', false); window.clearTimeout(settings.hold_timer); }) .on(settings.moveevent, function tapHoldFunc3(e: any) { $this.data('callee3', tapHoldFunc3); end_x = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageX : e.pageX; end_y = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageY : e.pageY; }); }, remove: function () { $(this).off(settings.startevent, $(this).data.callee1).off(settings.endevent, $(this).data.callee2).off(settings.moveevent, $(this).data.callee3); } }; // doubletap Event: $.event.special.doubletap = { setup: function () { var thisObject = this, $this = $(thisObject), action: number, firstTap: any = null, origEvent: any, cooling = false; $this.on(settings.startevent, function doubleTapFunc1(e: any) { if (e.which && e.which !== 1) { return false; } $this.data('doubletapped', false); $this.data('callee1', doubleTapFunc1); origEvent = e.originalEvent; if (!firstTap) { firstTap = { 'position': { 'x': (settings.touch_capable) ? origEvent.touches[0].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.touches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target, 'element': e.originalEvent.srcElement, 'index': $(e.target).index() }; } return true; }).on(settings.endevent, function doubleTapFunc2(e: any) { var now = Date.now(); var lastTouch = $this.data('lastTouch') || now + 1; var delta = now - lastTouch; window.clearTimeout(action); $this.data('callee2', doubleTapFunc2); if (delta < settings.doubletap_int && ($(e.target).index() == firstTap.index) && delta > 100) { $this.data('doubletapped', true); window.clearTimeout(settings.tap_timer); // Now get the current event: var lastTap = { 'position': { 'x': (settings.touch_capable) ? e.originalEvent.changedTouches[0].pageX : e.pageX, 'y': (settings.touch_capable) ? e.originalEvent.changedTouches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target, 'element': e.originalEvent.srcElement, 'index': $(e.target).index() }; var touchData = { 'firstTap': firstTap, 'secondTap': lastTap, 'interval': lastTap.time - firstTap.time }; if (!cooling) { triggerCustomEvent(thisObject, 'doubletap', e, touchData); firstTap = null; } cooling = true; } else { $this.data('lastTouch', now); action = window.setTimeout(function () { firstTap = null; window.clearTimeout(action); }, settings.doubletap_int, [e]); } $this.data('lastTouch', now); }); }, remove: function () { $(this).off(settings.startevent, $(this).data.callee1).off(settings.endevent, $(this).data.callee2); } }; // singletap Event: // This is used in conjuction with doubletap when both events are needed on the same element $.event.special.singletap = { setup: function () { var thisObject = this, $this = $(thisObject), origTarget: any = null, startTime: any = null, start_pos = { x: 0, y: 0 }; $this.on(settings.startevent, function singleTapFunc1(e: any) { if (e.which && e.which !== 1) { return false; } else { startTime = Date.now(); origTarget = e.target; $this.data('callee1', singleTapFunc1); // Get the start x and y position: start_pos.x = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageX : e.pageX; start_pos.y = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageY : e.pageY; return true; } }).on(settings.endevent, function singleTapFunc2(e: any) { $this.data('callee2', singleTapFunc2); if (e.target === origTarget) { // Get the end point: var end_pos_x = (e.originalEvent.changedTouches) ? e.originalEvent.changedTouches[0].pageX : e.pageX, end_pos_y = (e.originalEvent.changedTouches) ? e.originalEvent.changedTouches[0].pageY : e.pageY; // We need to check if it was a taphold: settings.tap_timer = window.setTimeout(function () { var diff_x = (start_pos.x - end_pos_x), diff_y = (start_pos.y - end_pos_y); if (!$this.data('doubletapped') && !$this.data('tapheld') && (((start_pos.x == end_pos_x) && (start_pos.y == end_pos_y)) || (diff_x >= -(settings.tap_pixel_range) && diff_x <= settings.tap_pixel_range && diff_y >= -(settings.tap_pixel_range) && diff_y <= settings.tap_pixel_range))) { var origEvent = e.originalEvent; var touchData = { 'position': { 'x': (settings.touch_capable) ? origEvent.changedTouches[0].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.changedTouches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; // Was it a taphold? if ((touchData.time - startTime) < settings.taphold_threshold) { triggerCustomEvent(thisObject, 'singletap', e, touchData); } } }, settings.doubletap_int); } }); }, remove: function () { $(this).off(settings.startevent, $(this).data.callee1).off(settings.endevent, $(this).data.callee2); } }; // tap Event: $.event.special.tap = { setup: function () { var thisObject = this, $this = $(thisObject), started = false, origTarget: any = null, start_time: any, start_pos = { x: 0, y: 0 }, touches: any; $this.on(settings.startevent, function tapFunc1(e: any) { $this.data('callee1', tapFunc1); if (e.which && e.which !== 1) { return false; } else { started = true; start_pos.x = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageX : e.pageX; start_pos.y = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageY : e.pageY; start_time = Date.now(); origTarget = e.target; touches = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches : [e]; return true; } }).on(settings.endevent, function tapFunc2(e: any) { $this.data('callee2', tapFunc2); // Only trigger if they've started, and the target matches: var end_x = (e.originalEvent.targetTouches) ? e.originalEvent.changedTouches[0].pageX : e.pageX, end_y = (e.originalEvent.targetTouches) ? e.originalEvent.changedTouches[0].pageY : e.pageY, diff_x = (start_pos.x - end_x), diff_y = (start_pos.y - end_y); if (origTarget == e.target && started && ((Date.now() - start_time) < settings.taphold_threshold) && ((start_pos.x == end_x && start_pos.y == end_y) || (diff_x >= -(settings.tap_pixel_range) && diff_x <= settings.tap_pixel_range && diff_y >= -(settings.tap_pixel_range) && diff_y <= settings.tap_pixel_range))) { var origEvent = e.originalEvent; var touchData = []; for (var i = 0; i < touches.length; i++) { var touch = { 'position': { 'x': (settings.touch_capable) ? origEvent.changedTouches[i].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.changedTouches[i].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[i].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[i].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; touchData.push(touch); } var evt_name = (touches.length == 2) ? 'tap2' : 'tap'; triggerCustomEvent(thisObject, evt_name, e, touchData); } }); }, remove: function () { $(this).off(settings.startevent, $(this).data.callee1).off(settings.endevent, $(this).data.callee2); } }; // swipe Event (also handles swipeup, swiperight, swipedown and swipeleft): $.event.special.swipe = { setup: function () { var thisObject = this, $this = $(thisObject), started = false, hasSwiped = false, originalCoord = { x: 0, y: 0 }, finalCoord = { x: 0, y: 0 }, startEvnt: any; // Screen touched, store the original coordinate function touchStart(e: any) { $this = $(e.currentTarget); $this.data('callee1', touchStart); originalCoord.x = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageX : e.pageX; originalCoord.y = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageY : e.pageY; finalCoord.x = originalCoord.x; finalCoord.y = originalCoord.y; started = true; var origEvent = e.originalEvent; // Read event data into our startEvt: startEvnt = { 'position': { 'x': (settings.touch_capable) ? origEvent.touches[0].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.touches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; } // Store coordinates as finger is swiping function touchMove(e: any) { $this = $(e.currentTarget); $this.data('callee2', touchMove); finalCoord.x = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageX : e.pageX; finalCoord.y = (e.originalEvent.targetTouches) ? e.originalEvent.targetTouches[0].pageY : e.pageY; var swipedir; // We need to check if the element to which the event was bound contains a data-xthreshold | data-vthreshold: var ele_x_threshold = ($this.parent().data('xthreshold')) ? $this.parent().data('xthreshold') : $this.data('xthreshold'), ele_y_threshold = ($this.parent().data('ythreshold')) ? $this.parent().data('ythreshold') : $this.data('ythreshold'), h_threshold = (typeof ele_x_threshold !== 'undefined' && ele_x_threshold !== false && parseInt(ele_x_threshold)) ? parseInt(ele_x_threshold) : settings.swipe_h_threshold, v_threshold = (typeof ele_y_threshold !== 'undefined' && ele_y_threshold !== false && parseInt(ele_y_threshold)) ? parseInt(ele_y_threshold) : settings.swipe_v_threshold; if (originalCoord.y > finalCoord.y && (originalCoord.y - finalCoord.y > v_threshold)) { swipedir = 'swipeup'; } if (originalCoord.x < finalCoord.x && (finalCoord.x - originalCoord.x > h_threshold)) { swipedir = 'swiperight'; } if (originalCoord.y < finalCoord.y && (finalCoord.y - originalCoord.y > v_threshold)) { swipedir = 'swipedown'; } if (originalCoord.x > finalCoord.x && (originalCoord.x - finalCoord.x > h_threshold)) { swipedir = 'swipeleft'; } if (swipedir != undefined && started) { originalCoord.x = 0; originalCoord.y = 0; finalCoord.x = 0; finalCoord.y = 0; started = false; // Read event data into our endEvnt: var origEvent = e.originalEvent; var endEvnt = { 'position': { 'x': (settings.touch_capable) ? origEvent.touches[0].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.touches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; // Calculate the swipe amount (normalized): var xAmount = Math.abs(startEvnt.position.x - endEvnt.position.x), yAmount = Math.abs(startEvnt.position.y - endEvnt.position.y); var touchData = { 'startEvnt': startEvnt, 'endEvnt': endEvnt, 'direction': swipedir.replace('swipe', ''), 'xAmount': xAmount, 'yAmount': yAmount, 'duration': endEvnt.time - startEvnt.time }; hasSwiped = true; $this.trigger('swipe', touchData).trigger(swipedir, touchData); } } function touchEnd(e: any) { $this = $(e.currentTarget); var swipedir = ""; $this.data('callee3', touchEnd); if (hasSwiped) { // We need to check if the element to which the event was bound contains a data-xthreshold | data-vthreshold: var ele_x_threshold = $this.data('xthreshold'), ele_y_threshold = $this.data('ythreshold'), h_threshold = (typeof ele_x_threshold !== 'undefined' && ele_x_threshold !== false && parseInt(ele_x_threshold)) ? parseInt(ele_x_threshold) : settings.swipe_h_threshold, v_threshold = (typeof ele_y_threshold !== 'undefined' && ele_y_threshold !== false && parseInt(ele_y_threshold)) ? parseInt(ele_y_threshold) : settings.swipe_v_threshold; var origEvent = e.originalEvent; var endEvnt = { 'position': { 'x': (settings.touch_capable) ? origEvent.changedTouches[0].pageX : e.pageX, 'y': (settings.touch_capable) ? origEvent.changedTouches[0].pageY : e.pageY }, 'offset': { 'x': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageX - ($this.offset() ? $this.offset().left : 0)) : Math.round(e.pageX - ($this.offset() ? $this.offset().left : 0)), 'y': (settings.touch_capable) ? Math.round(origEvent.changedTouches[0].pageY - ($this.offset() ? $this.offset().top : 0)) : Math.round(e.pageY - ($this.offset() ? $this.offset().top : 0)) }, 'time': Date.now(), 'target': e.target }; // Read event data into our endEvnt: if (startEvnt.position.y > endEvnt.position.y && (startEvnt.position.y - endEvnt.position.y > v_threshold)) { swipedir = 'swipeup'; } if (startEvnt.position.x < endEvnt.position.x && (endEvnt.position.x - startEvnt.position.x > h_threshold)) { swipedir = 'swiperight'; } if (startEvnt.position.y < endEvnt.position.y && (endEvnt.position.y - startEvnt.position.y > v_threshold)) { swipedir = 'swipedown'; } if (startEvnt.position.x > endEvnt.position.x && (startEvnt.position.x - endEvnt.position.x > h_threshold)) { swipedir = 'swipeleft'; } // Calculate the swipe amount (normalized): var xAmount = Math.abs(startEvnt.position.x - endEvnt.position.x), yAmount = Math.abs(startEvnt.position.y - endEvnt.position.y); var touchData = { 'startEvnt': startEvnt, 'endEvnt': endEvnt, 'direction': swipedir.replace('swipe', ''), 'xAmount': xAmount, 'yAmount': yAmount, 'duration': endEvnt.time - startEvnt.time }; $this.trigger('swipeend', touchData); } started = false; hasSwiped = false; } $this.on(settings.startevent, touchStart); $this.on(settings.moveevent, touchMove); $this.on(settings.endevent, touchEnd); }, remove: function () { $(this).off(settings.startevent, $(this).data.callee1).off(settings.moveevent, $(this).data.callee2).off(settings.endevent, $(this).data.callee3); } }; // scrollstart Event (also handles scrollend): $.event.special.scrollstart = { setup: function () { var thisObject = this, $this = $(thisObject), scrolling: any, timer: any; function trigger(event: Event, state: boolean) { scrolling = state; triggerCustomEvent(thisObject, scrolling ? 'scrollstart' : 'scrollend', event); } // iPhone triggers scroll after a small delay; use touchmove instead $this.on(settings.scrollevent, function scrollFunc(event: any) { $this.data('callee', scrollFunc); if (!scrolling) { trigger(event, true); } clearTimeout(timer); timer = setTimeout(function () { trigger(event, false); }, 50); }); }, remove: function () { $(this).off(settings.scrollevent, $(this).data.callee); } }; // This is the orientation change (largely borrowed from jQuery Mobile): var win = $(window), get_orientation: any, last_orientation: any, initial_orientation_is_landscape, initial_orientation_is_default, portrait_map: any = { '0': true, '180': true }; if (settings.orientation_support) { var ww = window.innerWidth || win.width(), wh = window.innerHeight || win.height(), landscape_threshold = 50; initial_orientation_is_landscape = ww > wh && (ww - wh) > landscape_threshold; initial_orientation_is_default = portrait_map[window.orientation]; if ((initial_orientation_is_landscape && initial_orientation_is_default) || (!initial_orientation_is_landscape && !initial_orientation_is_default)) { portrait_map = { '-90': true, '90': true }; } } $.event.special.orientationchange = { setup: function () { // If the event is supported natively, return false so that jQuery // will on to the event using DOM methods. if (settings.orientation_support) { return false; } // Get the current orientation to avoid initial double-triggering. last_orientation = get_orientation(); win.on('throttledresize', handler); return true; }, teardown: function () { if (settings.orientation_support) { return false; } win.off('throttledresize', handler); return true; }, add: function (handleObj: any) { // Save a reference to the bound event handler. var old_handler = handleObj.handler; handleObj.handler = function (event: any) { event.orientation = get_orientation(); return old_handler.apply(this, arguments); }; } }; // If the event is not supported natively, this handler will be bound to // the window resize event to simulate the orientationchange event. function handler() { // Get the current orientation. var orientation = get_orientation(); if (orientation !== last_orientation) { // The orientation has changed, so trigger the orientationchange event. last_orientation = orientation; win.trigger("orientationchange"); } } $.event.special.orientationchange.orientation = get_orientation = function () { var isPortrait: any = true, elem = document.documentElement; if (settings.orientation_support) { isPortrait = portrait_map[window.orientation]; } else { isPortrait = elem && elem.clientWidth / elem.clientHeight < 1.1; } return isPortrait ? 'portrait' : 'landscape'; }; // throttle Handler: $.event.special.throttledresize = { setup: function () { $(this).on('resize', throttle_handler); }, teardown: function () { $(this).off('resize', throttle_handler); } }; var throttle = 250, throttle_handler = function (this: any) { curr = Date.now(); diff = curr - lastCall; if (diff >= throttle) { lastCall = curr; $(this).trigger('throttledresize'); } else { if (heldCall) { window.clearTimeout(heldCall); } // Promise a held call will still execute heldCall = window.setTimeout(handler, throttle - diff); } }, lastCall = 0, heldCall: any, curr, diff; // Trigger a custom event: function triggerCustomEvent(obj: any, eventType: string, event: any, touchData?: any) { var originalType = event.type; event.type = eventType; $.event.dispatch.call(obj, event, touchData); event.type = originalType; } // Correctly on anything we've overloaded: $.each({ scrollend: 'scrollstart', swipeup: 'swipe', swiperight: 'swipe', swipedown: 'swipe', swipeleft: 'swipe', swipeend: 'swipe', tap2: 'tap', taphold2: 'taphold' }, function (e: any, srcE: any) { $.event.special[e] = { setup: function () { $(this).on(srcE, $.noop); } }; }); return $; } <file_sep>import { HttpService } from '@ribajs/core'; import { Pjax } from '.'; /** * Prefetch */ class Prefetch { /** singleton instance */ private static instance: Prefetch; /** * Class name used to ignore prefetch on links * * @default */ public ignoreClassLink = 'no-barba-prefetch'; /** * Creates an singleton instance of Prefetch. */ constructor() { if (Prefetch.instance) { return Prefetch.instance; } Prefetch.instance = this; } /** * Init the event listener on mouseover and touchstart * for the prefetch * */ public init(autobindLinks = false) { if (!window.history.pushState) { return false; } // We do this with rv-route if (autobindLinks) { document.body.addEventListener('mouseover', this.onLinkEnter.bind(this)); document.body.addEventListener('touchstart', this.onLinkEnter.bind(this)); } } /** * Callback for the mousehover/touchstart, please use the rv-route binder instead * */ public onLinkEnter(evt: Event, url?: string, el?: HTMLAnchorElement) { if (!url) { if (!el && evt) { el = ((evt as Event).target as HTMLAnchorElement) || (evt as any).currentTarget; } if (!el) { throw new Error('HTML Element not set'); } while (el && !Pjax.getHref(el)) { el = (el.parentNode as HTMLAnchorElement); // TODO testme } if (!el || el.classList.contains(this.ignoreClassLink)) { return; } url = Pjax.getHref(el); } if (!url) { console.warn(`Url is not defined, you can't cache the link without the url. Please make shure your element has the href attribute or pass the url directly to this function.`); } // Check if the link is elegible for Pjax if (url && Pjax.preventCheck(evt, el, url) && !Pjax.cache.get(url)) { const xhr = HttpService.get(url, undefined, 'html'); Pjax.cache.set(url, xhr); } else { if (url) { // if (!Pjax.preventCheck(evt, el, url)) { // console.warn('preventCheck failed', Pjax.preventCheck(evt, el, url)); // } } } } } export { Prefetch }; <file_sep>export declare function lowerBound(array: any, value: any, comp: any): number; export declare class PriorityQueue { private _queue; constructor(); enqueue(run: any, options: any): void; dequeue(): any; get size(): number; } <file_sep>// iMPORTANT do not use .babelrc: https://github.com/babel/babel/issues/8711#issuecomment-421918023 module.exports = { "presets": [ "@babel/typescript", [ "@babel/preset-env", { "targets": { "ie": "11", "safari": "10", "chrome": "52", "edge": "16", "firefox": "59" } } ] ], "plugins": [ [ "@babel/plugin-transform-runtime", { "corejs": 2 } ], "@babel/plugin-proposal-class-properties", "@babel/plugin-proposal-object-rest-spread", "array-includes" ] };<file_sep>import { Component, EventDispatcher, } from '@ribajs/core'; type State = 'undefined' | 'overlay-left' | 'overlay-right' | 'side-left' | 'side-right' | 'hidden'; interface Scope { targetId?: string; toggle: Bs4ToggleButtonComponent['toggle']; state: State; isClosed: boolean; } export class Bs4ToggleButtonComponent extends Component { public static tagName: string = 'bs4-toggle-button'; protected autobind = true; static get observedAttributes() { return ['target-id']; } protected eventDispatcher?: EventDispatcher; protected scope: Scope = { targetId: undefined, toggle: this.toggle, state: 'undefined', isClosed: false, }; constructor(element?: HTMLElement) { super(element); this.init(Bs4ToggleButtonComponent.observedAttributes); } public toggle() { if (this.eventDispatcher) { this.eventDispatcher.trigger('toggle', this.scope.targetId); } } protected onToggledEvent(state: State) { this.scope.state = state; this.scope.isClosed = state === 'hidden'; } protected initEventDispatcher(id: string) { if (this.eventDispatcher) { this.eventDispatcher.off('toggled', this.onToggledEvent); } this.eventDispatcher = new EventDispatcher('bs4-toggle-button:' + id); this.eventDispatcher.on('toggled', this.onToggledEvent.bind(this)); } protected requiredAttributes() { return ['targetId']; } protected attributeChangedCallback(attributeName: string, oldValue: any, newValue: any, namespace: string | null) { super.attributeChangedCallback(attributeName, oldValue, newValue, namespace); } protected parsedAttributeChangedCallback(attributeName: string, oldValue: any, newValue: any, namespace: string | null) { super.parsedAttributeChangedCallback(attributeName, oldValue, newValue, namespace); if (attributeName === 'targetId' && newValue) { this.initEventDispatcher(newValue); } } // deconstructor protected disconnectedCallback() { super.disconnectedCallback(); if (this.eventDispatcher) { this.eventDispatcher.off('toggled', this.onToggledEvent); } } protected template() { if (!this.el.hasChildNodes()) { console.warn('No child elements found, this component as no template so you need to define your own as child of this component.'); } return null; } } <file_sep>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); // Port of lower_bound from http://en.cppreference.com/w/cpp/algorithm/lower_bound // Used to compute insertion index to keep queue sorted after insertion function lowerBound(array, value, comp) { let first = 0; let count = array.length; while (count > 0) { const step = (count / 2) | 0; let it = first + step; if (comp(array[it], value) <= 0) { first = ++it; count -= step + 1; } else { count = step; } } return first; } exports.lowerBound = lowerBound; class PriorityQueue { constructor() { this._queue = []; } enqueue(run, options) { options = Object.assign({ priority: 0, }, options); const element = { priority: options.priority, run }; if (this.size && this._queue[this.size - 1].priority >= options.priority) { this._queue.push(element); return; } const index = lowerBound(this._queue, element, (a, b) => b.priority - a.priority); this._queue.splice(index, 0, element); } dequeue() { return this._queue.shift().run; } get size() { return this._queue.length; } } exports.PriorityQueue = PriorityQueue; <file_sep>export declare interface ShopifyImage { alt: string; created_at: Date; height: number; id: number; src: string; updated_at: Date; width: number; } <file_sep>"use strict"; // https://github.com/sindresorhus/p-queue Object.defineProperty(exports, "__esModule", { value: true }); const priority_queue_service_1 = require("./priority-queue.service"); class PQueue { constructor(options) { this.queue = new priority_queue_service_1.PriorityQueue(); // eslint-disable-line new-cap this._intervalCount = 0; this._intervalEnd = 0; this._pendingCount = 0; options = Object.assign({ carryoverConcurrencyCount: false, intervalCap: Infinity, interval: 0, concurrency: Infinity, autoStart: true, queueClass: priority_queue_service_1.PriorityQueue, }, options); if (!(typeof options.concurrency === 'number' && options.concurrency >= 1)) { throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${options.concurrency}\` (${typeof options.concurrency})`); } if (!(typeof options.intervalCap === 'number' && options.intervalCap >= 1)) { throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${options.intervalCap}\` (${typeof options.intervalCap})`); } if (!(typeof options.interval === 'number' && Number.isFinite(options.interval) && options.interval >= 0)) { throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${options.interval}\` (${typeof options.interval})`); } this._carryoverConcurrencyCount = options.carryoverConcurrencyCount; this._isIntervalIgnored = options.intervalCap === Infinity || options.interval === 0; this._intervalCount = 0; this._intervalCap = options.intervalCap; this._interval = options.interval; this._intervalId = undefined; this._intervalEnd = 0; this._timeoutId = null; this.queue = new options.queueClass(); // eslint-disable-line new-cap this._pendingCount = 0; this._concurrency = options.concurrency; this._isPaused = options.autoStart === false; this._resolveEmpty = () => { }; // tslint:disable-line this._resolveIdle = () => { }; // tslint:disable-line } add(fn, options) { return new Promise((resolve, reject) => { const run = () => { this._pendingCount++; this._intervalCount++; try { Promise.resolve(fn()).then((val) => { resolve(val); this._next(); }, (err) => { reject(err); this._next(); }); } catch (err) { reject(err); this._next(); } }; this.queue.enqueue(run, options); this._tryToStartAnother(); }); } addAll(fns, options) { return Promise.all(fns.map((fn) => this.add(fn, options))); } start() { if (!this._isPaused) { return; } this._isPaused = false; while (this._tryToStartAnother()) { } // tslint:disable-line } pause() { this._isPaused = true; } clear() { this.queue = new priority_queue_service_1.PriorityQueue(); } onEmpty() { // Instantly resolve if the queue is empty if (this.queue.size === 0) { return Promise.resolve(); } return new Promise((resolve) => { const existingResolve = this._resolveEmpty; this._resolveEmpty = () => { existingResolve(); resolve(); }; }); } onIdle() { // Instantly resolve if none pending and if nothing else is queued if (this._pendingCount === 0 && this.queue.size === 0) { return Promise.resolve(); } return new Promise((resolve) => { const existingResolve = this._resolveIdle; this._resolveIdle = () => { existingResolve(); resolve(); }; }); } // private _resolveEmpty = () => { }; // tslint:disable-line // private _resolveIdle = () => { }; // tslint:disable-line get _doesIntervalAllowAnother() { return this._isIntervalIgnored || this._intervalCount < this._intervalCap; } get _doesConcurrentAllowAnother() { return this._pendingCount < this._concurrency; } _next() { this._pendingCount--; this._tryToStartAnother(); } _resolvePromises() { this._resolveEmpty(); this._resolveEmpty = () => { }; // tslint:disable-line if (this._pendingCount === 0) { this._resolveIdle(); this._resolveIdle = () => { }; // tslint:disable-line } } _onResumeInterval() { this._onInterval(); this._initializeIntervalIfNeeded(); this._timeoutId = null; } _intervalPaused() { const now = Date.now(); if (typeof (this._intervalId) === undefined) { const delay = this._intervalEnd - now; if (delay < 0) { // Act as the interval was done // We don't need to resume it here, // because it'll be resumed on line 160 this._intervalCount = (this._carryoverConcurrencyCount) ? this._pendingCount : 0; } else { // Act as the interval is pending if (this._timeoutId === null) { this._timeoutId = setTimeout(() => this._onResumeInterval(), delay); } return true; } } return false; } _tryToStartAnother() { if (this.queue.size === 0) { // We can clear the interval ("pause") // because we can redo it later ("resume") clearInterval(this._intervalId); this._intervalId = undefined; this._resolvePromises(); return false; } if (!this._isPaused) { const canInitializeInterval = !this._intervalPaused(); if (this._doesIntervalAllowAnother && this._doesConcurrentAllowAnother) { this.queue.dequeue()(); if (canInitializeInterval) { this._initializeIntervalIfNeeded(); } return true; } } return false; } _initializeIntervalIfNeeded() { if (this._isIntervalIgnored || this._intervalId !== null) { return; } this._intervalId = setInterval(() => this._onInterval(), this._interval); this._intervalEnd = Date.now() + this._interval; } _onInterval() { if (this._intervalCount === 0 && this._pendingCount === 0) { clearInterval(this._intervalId); this._intervalId = undefined; } this._intervalCount = (this._carryoverConcurrencyCount) ? this._pendingCount : 0; while (this._tryToStartAnother()) { } // tslint:disable-line } get size() { return this.queue.size; } get pending() { return this._pendingCount; } get isPaused() { return this._isPaused; } } exports.PQueue = PQueue; <file_sep>import { ShopifyProduct, ShopifyProductVariant } from '../interfaces'; export interface ProductsCache { [handle: string]: ShopifyProduct; } export declare class ShopifyProductService { /** * Get product object by handle * @param handle product handle */ static get(handle: string): Promise<ShopifyProduct>; /** * Check if the option values fits to the current variant. * @param variant * @param optionValues * @return Returns true if the option values fitting to the variant */ static fitsVariantOptions(variant: ShopifyProductVariant, optionValues: string[]): boolean; /** * Get product variant of (selected) option values * @param optionValues (selected) option values */ static getVariantOfOptions(product: ShopifyProduct, optionValues: string[]): ShopifyProductVariant | null; /** * Get variant object by variant id * @param id Variant id */ static getVariant(product: ShopifyProduct, id: number): null; /** * Get product option by name * @param product product wich holds the options * @param name option name */ static getOption(product: ShopifyProduct, name: string): null; /** * Prepair product, remove protocol from featured_image, lovercase the option names * @param product product object */ static prepair(product: ShopifyProduct): ShopifyProduct; protected static cache: ProductsCache; } <file_sep>import { ShopifyImage } from './image'; export declare interface ShopifyProductImage extends ShopifyImage { position: number; product_id: number; variant_ids: number[]; } export declare enum ShopifyProductWeightUnit { GRAMS = "g", KILOGRAMS = "kg", OUNCES = "oz", POUNDS = "lb" } export declare interface ShopifyProductVariant { available: boolean; barcode: string; compare_at_price: number | null; featured_image: ShopifyProductImage | null; created_at: Date; fulfillment_service?: string; grams?: number; id: number; inventory_management: string | 'shopify'; inventory_policy?: string; inventory_quantity?: number; option1: string | null; option2: string | null; option3: string | null; options: string[]; price: number; public_title: string; requires_shipping: boolean; sku: string; taxable: boolean; title: string; updated_at: Date; weight: number; weight_unit?: ShopifyProductWeightUnit; } export declare interface ShopifyProductVariantOption { name: string; position: number; values: string[]; } export declare interface ShopifyProduct { available: boolean; compare_at_price: number | null; compare_at_price_max: number; compare_at_price_min: number; compare_at_price_varies: boolean; created_at: Date; description: string; featured_image: string; handle: string; id: number; images: string[]; options: ShopifyProductVariantOption[]; price: number; price_max: number; price_min: number; price_varies: boolean; published_at: Date; tags: string[]; title: string; type: string; updated_at?: Date; url: string; variants: ShopifyProductVariant[]; vendor: string; } <file_sep>import { Component, EventDispatcher, Binder } from '@ribajs/core'; import { CollapseService } from '../../services/collapse.service'; export class Bs4NavbarComponent extends Component { public static tagName: string = 'bs4-navbar'; protected scope: any = { toggle: this.toggle, show: this.show, hide: this.hide, isCollapsed: true, collapseSelector: '.navbar-collapse', }; protected collapse?: NodeListOf<Element>; protected collapseService?: CollapseService; protected router: EventDispatcher; static get observedAttributes() { return ['collapse-selector']; } constructor(element?: HTMLElement) { super(element); this.router = new EventDispatcher('main'); this.router.on('newPageReady', this.onNewPageReady.bind(this)); this.setCollapseElement(); this.onStateChange(); this.init(Bs4NavbarComponent.observedAttributes); } public toggle(context?: Binder<any>, event?: Event) { if (this.collapseService) { this.collapseService.toggle(); } if (event) { event.preventDefault(); event.stopPropagation(); } } public show(context?: Binder<any>, event?: Event) { if (this.collapseService) { this.collapseService.show(); } if (event) { event.preventDefault(); event.stopPropagation(); } } public hide(context?: Binder<any>, event?: Event) { if (this.collapseService) { this.collapseService.hide(); } if (event) { event.preventDefault(); event.stopPropagation(); } } protected setCollapseElement() { // Remove old event listeners this.removeCollapseEventListeners(); this.collapse = this.el.querySelectorAll(this.scope.collapseSelector) || undefined; // Add new event listeners this.addCollapseEventListeners(); if (this.collapse) { this.collapseService = new CollapseService(this.collapse); } } protected addCollapseEventListeners() { if (this.collapse) { this.collapse.forEach((collapse) => { collapse.addEventListener(CollapseService.EVENT.SHOWN, this.onStateChange.bind(this)); collapse.addEventListener(CollapseService.EVENT.HIDDEN, this.onStateChange.bind(this)); }); } } protected removeCollapseEventListeners() { if (this.collapse) { this.collapse.forEach((collapse) => { collapse.removeEventListener(CollapseService.EVENT.SHOWN, this.onStateChange.bind(this)); collapse.removeEventListener(CollapseService.EVENT.HIDDEN, this.onStateChange.bind(this)); }); } } protected disconnectedCallback() { super.disconnectedCallback(); this.removeCollapseEventListeners(); this.router.off('newPageReady', this.onNewPageReady); } protected onStateChange() { if (this.collapseService) { this.scope.isCollapsed = this.collapseService.isCollapsed(); } if (this.scope.isCollapsed) { this.el.classList.add(CollapseService.CLASSNAME.COLLAPSED); this.el.setAttribute('aria-expanded', 'false'); } else { this.el.classList.remove(CollapseService.CLASSNAME.COLLAPSED); this.el.setAttribute('aria-expanded', 'true'); } } protected onNewPageReady() { if (this.collapseService) { this.collapseService.hide(); } } protected parsedAttributeChangedCallback(attributeName: string | string[], oldValue: any, newValue: any, namespace: string | null) { super.parsedAttributeChangedCallback(attributeName, oldValue, newValue, namespace); if (attributeName === 'collapseSelector') { this.setCollapseElement(); } } protected template() { return null; } } <file_sep>import { Binder } from '../interfaces'; export interface BinderAttributeChangedEvent { detail: { name: string; oldValue: string; newValue: string; namespace: null, }; } /** * Event handler to liste for publish binder event for two-way-binding in web components */ const publishBinderChangeEventHandler = function(this: any, event: Event) { const data = ( event as CustomEvent ).detail; const oldValue = this.observer.value(); if (oldValue !== data.newValue) { // TODO this overwrites also the _rv counter this.observer.setValue(data.newValue); } }; /** * Sets the attribute on the element. If no binder above is matched it will fall * back to using this binder. */ export const starBinder: Binder<string> = { name: '*', bind(el) { // Listen for changes from web component el.addEventListener('publish-binder-change:' + this.type, publishBinderChangeEventHandler.bind(this)); }, unbind(el: HTMLElement) { delete this.customData; this.el.removeEventListener('publish-binder-change', publishBinderChangeEventHandler.bind(this)); }, routine(el: HTMLElement, newValue: string) { if (!this.type) { throw new Error('Can\'t set attribute of ' + this.type); } const oldValue = el.getAttribute(this.type); if (newValue != null) { if (oldValue !== newValue) { el.setAttribute(this.type, newValue); } } else { el.removeAttribute(this.type); } if (oldValue !== newValue) { // Fallback for MutationObserver and attributeChangedCallback: Trigger event to catch them in web components to call the attributeChangedCallback method el.dispatchEvent(new CustomEvent('binder-changed', { detail: { name: this.type, oldValue, newValue, namespace: null, // TODO }} as BinderAttributeChangedEvent)); } }, };
8ee7cedec64df9e24e039504be861397e397fce7
[ "JavaScript", "TypeScript" ]
71
TypeScript
pwFoo/riba-tinybind-fork
677d43fb7ac49c01ddad7c7131cda80134be4576
bbf3ab5d063e3bd8d4526f6ca9f09951148fe3ed
refs/heads/master
<repo_name>xiaojue52/Android<file_sep>/Android1219/src/com/example/android1219/MainActivity.java package com.example.android1219; import com.example.android1219.provider.TestProviderActivity; import com.example.android1219.service.SecondActivity; import com.example.android1219.service.ThirdActivity; import com.example.android1219.socket.SocketActivity; import com.example.android1219.sql.SqlActivity; import com.example.android1219.thread.TestThreadActivity; import com.example.android1219.httpclient.TestHttpClientActivity; import com.example.android1219.listView.TestListView; import android.os.Bundle; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.Toast; public class MainActivity extends Activity { private Context context; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); context = this; setContentView(R.layout.activity_main); Button btn = (Button)this.findViewById(R.id.third); Button four = (Button)this.findViewById(R.id.four); Button five = (Button)this.findViewById(R.id.five); Button six = (Button)this.findViewById(R.id.six); Button seven = (Button)this.findViewById(R.id.seven); Button eight = (Button)this.findViewById(R.id.eight); Button nine = (Button)this.findViewById(R.id.nine); nine.setOnClickListener(TeslHttpClient); four.setOnClickListener(TestContentProvider); five.setOnClickListener(TestSql); six.setOnClickListener(TestListView); seven.setOnClickListener(TestThread); eight.setOnClickListener(TestSocket); btn.setOnClickListener(BtnOnclick); } private OnClickListener TeslHttpClient = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Intent it = new Intent(context,TestHttpClientActivity.class); startActivity(it); } }; private OnClickListener TestContentProvider = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Intent it = new Intent(context,TestProviderActivity.class); startActivity(it); } }; private OnClickListener TestSql = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Intent it = new Intent(context,SqlActivity.class); startActivity(it); } }; private OnClickListener TestListView = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Intent it = new Intent(context,TestListView.class); startActivity(it); } }; private OnClickListener TestThread = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Intent it = new Intent(context,TestThreadActivity.class); startActivity(it); } }; private OnClickListener TestSocket = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Intent it = new Intent(context,SocketActivity.class); startActivity(it); } }; private Button.OnClickListener BtnOnclick = new Button.OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Intent it = new Intent(context,ThirdActivity.class); startActivityForResult(it, 1); } }; @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == 1) { if(resultCode == RESULT_OK){ String result=data.getStringExtra("ret"); Toast.makeText(context, result, Toast.LENGTH_LONG).show(); } if (resultCode == RESULT_CANCELED) { //Write your code if there's no result } } if (requestCode == 2){ if(resultCode == RESULT_OK){ String ret = data.getStringExtra("ret"); Toast.makeText(context, ret, Toast.LENGTH_LONG).show(); } } } @Override protected void onResume() { super.onResume(); } @Override protected void onPause() { super.onPause(); } public void onClick(View view) { Intent it = new Intent(this,SecondActivity.class); startActivityForResult(it,2); } } <file_sep>/20140304/JYPMIS/src/com/jypmis/vo/SbgxVO.java package com.jypmis.vo; public class SbgxVO { public String bgxid; public String bgxmc; public String glbm; public String zt; } <file_sep>/animationList/src/com/loading/LoadInterface.java package com.loading; public interface LoadInterface { public void onFinished(String ret,boolean neterror); } <file_sep>/DataServer/src/com/project/service/CollectCourseService.java package com.project.service; import java.util.List; import com.project.po.CollectCourse; public interface CollectCourseService { public void saveCollectCourse(CollectCourse arg0); public void deleteCollectCourse(CollectCourse arg0); public void updateCollectCourse(CollectCourse arg0); public CollectCourse getCollectCourseById(Integer arg0); public List<CollectCourse> getCollectCoursesByHql(String hql); @SuppressWarnings("rawtypes") public List getList(String hql); } <file_sep>/JYPMIS0310/PMIS/src/com/jiyuan/pmis/reports/MyReportsActivity.java package com.jiyuan.pmis.reports; import java.util.ArrayList; import java.util.List; import org.ksoap2.serialization.PropertyInfo; import com.google.gson.Gson; import com.jiyuan.pmis.MainApplication; import com.jiyuan.pmis.R; import com.jiyuan.pmis.adapter.SimpleBaseExpandableListAdapter; import com.jiyuan.pmis.adapter.SimpleSpinnerAdapter; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.exception.PmisException; import com.jiyuan.pmis.project.SelectProjectsActivity; import com.jiyuan.pmis.soap.Soap; import com.jiyuan.pmis.structure.ExpandListItem; import com.jiyuan.pmis.structure.Item; import com.jiyuan.pmis.structure.Project; import com.jiyuan.pmis.structure.Report; import com.jiyuan.pmis.structure.ReportSearchField; import com.jiyuan.pmis.structure.ReportSort; import com.jiyuan.pmis.structure.ReportType; import com.jiyuan.pmis.structure.SpinnerItem; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.CheckBox; import android.widget.ExpandableListView; import android.widget.ListView; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; public class MyReportsActivity extends Activity{ private ExpandableListView my_reports_listView; private Spinner spinner_my_reports_report_types; private Context context; private Project project; private MainApplication app; private TextView textview_my_reports_startTime,textview_my_reports_endTime; private CheckBox checkbox_my_reports_refuse,checkbox_my_reports_waiting,checkbox_my_reports_pass; private TextView textview_my_reports_project; //private SeparatedListAdapter adapter; private SimpleSpinnerAdapter adapter; private SimpleBaseExpandableListAdapter expandableadapter; private boolean isFirst = false; public static final int start_from_my_report_to_select_project = 10; public static final int start_from_my_report_to_my_report_details = 11; @Override protected void onCreate(Bundle b){ super.onCreate(b); this.setContentView(R.layout.activity_my_reports); this.context = this; this.my_reports_listView = (ExpandableListView)this.findViewById(R.id.my_reports_listView); this.my_reports_listView.setGroupIndicator(null); this.initData(); this.isFirst = true; } @Override protected void onResume(){ super.onResume(); if (!this.isFirst) this.search(null); this.isFirst = false; } public void search(View v){ ReportSearchField r = this.getReportSearchField(); List<ExpandListItem> values = listReports(r); this.expandableadapter.setValues(values); this.expandableadapter.notifyDataSetChanged(); } private ReportSearchField getReportSearchField(){ ReportSearchField r = new ReportSearchField(); if (this.inProject) r.xmid = project.xmid; else r.xmid = "-1"; r.kssj = this.textview_my_reports_startTime.getText().toString(); r.jssj = this.textview_my_reports_endTime.getText().toString(); if (this.checkbox_my_reports_refuse.isChecked()){ r.xzwtg = "1"; }else r.xzwtg = "0"; if (this.checkbox_my_reports_waiting.isChecked()){ r.xzdsh = "1"; }else r.xzdsh = "0"; if (this.checkbox_my_reports_pass.isChecked()){ r.xzysh = "1"; }else r.xzysh = "0"; r.type = "0"; r.yhid = app.getUser().yhid; r.bgxid = ((SpinnerItem)this.spinner_my_reports_report_types.getSelectedItem()).key; return r; } public void back(View v){ finish(); } public void selectDate(View v) { Constant.selectDate(context, (TextView)v); } private List<ExpandListItem> listReports(ReportSearchField r){ List<ReportSort> sorts = new ArrayList<ReportSort>(); List<ExpandListItem> values = new ArrayList<ExpandListItem>(); try { sorts = this.getReports(r); } catch (PmisException e) { Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); return values; } for (int i=0;i<sorts.size();i++){ ExpandListItem expandListItem = new ExpandListItem(); List<Report> reports = sorts.get(i).list; List<Item> items = new ArrayList<Item>(); expandListItem.title = sorts.get(i).title; expandListItem.count = sorts.get(i).count; expandListItem.showCheckBox = false; for(int j=0;j<reports.size();j++){ Item item = new Item(); item.key = reports.get(j).bgid; item.firstLineText = reports.get(j).gzrq.substring(5)+" "+reports.get(j).gzxs+"小时"; item.secondLineText = reports.get(j).gznr; item.showCheckbox = false; if (reports.get(j).zt.equals("0")){ item.showTail = true; item.taiColor = Color.parseColor("#009bd9"); item.tailText = "待审核"; }else if(reports.get(j).zt.equals("1")){ item.showTail = true; item.taiColor = Color.parseColor("#8ec156"); item.tailText = "已审核"; }else { item.showTail = true; item.taiColor = Color.parseColor("#ff8974"); item.tailText = "未通过"; } items.add(item); } expandListItem.items = items; values.add(expandListItem); } return values; } private List<ReportSort> getReports(ReportSearchField r) throws PmisException{ Report[] reports = new Report[]{}; final String METHOD_NAME = "getReports"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("reportSearchFieldStr"); arg0.setValue(new Gson().toJson(r)); args.add(arg0); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("获取报工列表失败!"); } try{ reports = new Gson().fromJson(ret, Report[].class); }catch(Exception e){ throw new PmisException("当前没有报工!"); } List<Report> listReports = new ArrayList<Report>(); for (int i=0;i<reports.length;i++){ listReports.add(reports[i]); } List<ReportSort> sorts = new ArrayList<ReportSort>(); while(listReports.size()>0){ List<Report> list = new ArrayList<Report>(); Report b = listReports.get(0); list.add(b); listReports.remove(0); int i = 0; while(listReports.size()!=i){ if (b.xmjc.equals((listReports).get(i).xmjc)){ list.add((listReports).get(i)); listReports.remove(i); i--; } i++; } ReportSort sort = new ReportSort(); sort.title = b.xmjc; sort.list = list; sort.count = list.size(); if (b.xmjc!=null&&b.xmjc.equals("--")){ ReportType[] types = app.getReportTypes(); for (int j=0;j<types.length;j++){ ReportSort typeSort = new ReportSort(); typeSort.title = types[j].bgxmc; List<Report> typeList = new ArrayList<Report>(); for(int k=0;k<list.size();k++){ if (types[j].bgxid.equals(list.get(k).bgxid)){ typeList.add(list.get(k)); } } typeSort.list = typeList; typeSort.count = typeList.size(); if(typeSort.count>0) //sorts.add(typeSort); sorts.add(0, typeSort); } }else sorts.add(sort); } Log.e("pmis.....",new Gson().toJson(sorts)); return sorts; } private void initData(){ app = (MainApplication) this.getApplication(); project = new Project(); project.xmid = "-1"; project.xmjc = "全部"; View headerView = View.inflate(this, R.layout.my_report_search_box, null); headerView.setLayoutParams(new ListView.LayoutParams(ListView.LayoutParams.MATCH_PARENT, ListView.LayoutParams.WRAP_CONTENT)); this.textview_my_reports_startTime = (TextView)headerView.findViewById(R.id.textview_my_reports_startTime); this.textview_my_reports_endTime = (TextView)headerView.findViewById(R.id.textview_my_reports_endTime); this.checkbox_my_reports_refuse = (CheckBox)headerView.findViewById(R.id.checkbox_my_reports_refuse); this.checkbox_my_reports_waiting = (CheckBox)headerView.findViewById(R.id.checkbox_my_reports_waiting); this.checkbox_my_reports_pass = (CheckBox)headerView.findViewById(R.id.checkbox_my_reports_pass); this.spinner_my_reports_report_types = (Spinner)headerView.findViewById(R.id.spinner_my_reports_report_types); this.textview_my_reports_project = (TextView)headerView.findViewById(R.id.textview_my_reports_project); //this.textview_my_reports_projects.setText(project.xmjc); this.textview_my_reports_startTime.setText(Constant.getBeforeCurrentDataString("yyyy-MM-dd", 7)); this.textview_my_reports_endTime.setText(Constant.getCurrentDataString("yyyy-MM-dd")); this.checkbox_my_reports_refuse.setChecked(true); this.checkbox_my_reports_pass.setChecked(true); this.checkbox_my_reports_waiting.setChecked(true); this.textview_my_reports_project.setText(project.xmjc); ReportType[] types = app.getReportTypes(); List<SpinnerItem> typeValues = new ArrayList<SpinnerItem>(); SpinnerItem first = new SpinnerItem(); first.key = "-1"; first.value = "全部"; first.zt = "1"; first.size = 12; typeValues.add(first); for (int i = 0; i < types.length; i++) { SpinnerItem item = new SpinnerItem(); item.key = types[i].bgxid; item.value = types[i].bgxmc; item.zt = types[i].zt; item.size = 12; typeValues.add(item); } adapter = new SimpleSpinnerAdapter(this,R.layout.spinner_item,typeValues); this.spinner_my_reports_report_types.setAdapter(adapter); List<ExpandListItem> values = new ArrayList<ExpandListItem>(); expandableadapter = new SimpleBaseExpandableListAdapter(this,values); this.my_reports_listView.addHeaderView(headerView); this.my_reports_listView.setAdapter(expandableadapter); //this.review_reports_listView.setOnItemClickListener(item_listener); this.my_reports_listView.setOnChildClickListener(onChildClickListener); this.spinner_my_reports_report_types.setOnItemSelectedListener(onItemSelectedListener); this.textview_my_reports_project.setOnClickListener(project_select_Listener); } private ExpandableListView.OnChildClickListener onChildClickListener = new ExpandableListView.OnChildClickListener(){ @Override public boolean onChildClick(ExpandableListView parent, View v, int groupPosition, int childPosition, long id) { // TODO Auto-generated method stub Intent it = new Intent(context,MyReportDetailsActivity.class); it.putExtra("bgid", ((Item)expandableadapter.getChild(groupPosition, childPosition)).key); startActivityForResult(it,start_from_my_report_to_my_report_details); return false; } }; private boolean inProject = true; private Spinner.OnItemSelectedListener onItemSelectedListener = new Spinner.OnItemSelectedListener(){ @Override public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub SimpleSpinnerAdapter adapter = (SimpleSpinnerAdapter) arg0.getAdapter(); SpinnerItem item = adapter.getItem(arg2); if (item.zt.equals("0")){ inProject = false; textview_my_reports_project.setTextColor(Color.GRAY); }else { inProject = true; textview_my_reports_project.setTextColor(Color.parseColor("#2867a0")); } //Toast.makeText(context, String.valueOf(arg2), Toast.LENGTH_SHORT).show(); textview_my_reports_project.setClickable(inProject); } @Override public void onNothingSelected(AdapterView<?> arg0) { // TODO Auto-generated method stub //Toast.makeText(context, "wu", Toast.LENGTH_SHORT).show(); SimpleSpinnerAdapter adapter = (SimpleSpinnerAdapter) arg0.getAdapter(); SpinnerItem item = adapter.getItem(0); if (item.zt.equals("0")){ inProject = false; textview_my_reports_project.setTextColor(Color.GRAY); }else{ inProject = true; textview_my_reports_project.setTextColor(Color.parseColor("#2867a0")); } textview_my_reports_project.setClickable(inProject); } }; private OnClickListener project_select_Listener = new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub //search = new Search(context,v,onItemClickListener,app.getAllProjectInfos(),1); Intent it = new Intent(context, SelectProjectsActivity.class); it.putExtra("search", "1"); startActivityForResult(it,start_from_my_report_to_select_project); } }; @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); //getActivity(); if (requestCode == start_from_my_report_to_select_project) { this.isFirst = true; if (resultCode == Activity.RESULT_OK) { project.xmid = data.getStringExtra("xmid"); this.textview_my_reports_project.setText(data.getStringExtra("xmjc")); } if (resultCode == Activity.RESULT_CANCELED) { // Write your code if there's no result } }else if (requestCode == start_from_my_report_to_my_report_details) { this.isFirst = false; if (resultCode == Activity.RESULT_OK) { this.isFirst = true; } if (resultCode == Activity.RESULT_CANCELED) { // Write your code if there's no result } } } } <file_sep>/JYPMIS0310/PMIS/src/com/jiyuan/pmis/constant/Constant.java package com.jiyuan.pmis.constant; import android.annotation.SuppressLint; import android.app.DatePickerDialog; import android.content.Context; import android.content.SharedPreferences; import android.widget.DatePicker; import android.widget.TextView; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import com.jiyuan.pmis.structure.ServerInfo; @SuppressLint("SimpleDateFormat") public class Constant { public static final int REQUEST_CODE = 1; public static final int MY_REPORT_REQUEST_CODE = 2; public static final int REVIEW_REPORT_REQUEST_CODE = 3; public static final String SUCCESS = "1"; public static final String ERROR = "-1"; public static String host = "172.16.58.3:80"; public static String projectName = "JYPMIS"; public static String department_namespace = "http://department.func.jypmis.com"; public static String department_url = "http://"+host+"/"+projectName+"/services/Department"; public static String login_namespace = "http://project.func.jypmis.com"; public static String login_url = "http://"+host+"/"+projectName+"/services/Login"; public static String project_namespace = "http://project.func.jypmis.com"; public static String project_url = "http://"+host+"/"+projectName+"/services/Project"; public static String report_namespace = "http://report.func.jypmis.com"; public static String report_url = "http://"+host+"/"+projectName+"/services/Report"; public static void configServer(ServerInfo serverInfo){ host = serverInfo.server_addr+":"+serverInfo.server_port; department_url = "http://"+host+"/"+projectName+"/services/Department"; login_url = "http://"+host+"/"+projectName+"/services/Login"; project_url = "http://"+host+"/"+projectName+"/services/Project"; report_url = "http://"+host+"/"+projectName+"/services/Report"; } @SuppressLint("SimpleDateFormat") public static String getCurrentDataString(String format){ DateFormat df = new SimpleDateFormat(format); final Calendar c = Calendar.getInstance(); Date date = c.getTime(); return df.format(date); } public static String getBeforeCurrentDataString(String format,int day){ DateFormat df = new SimpleDateFormat(format); final Calendar c = Calendar.getInstance(); int d = c.get(Calendar.DAY_OF_MONTH); c.set(Calendar.DAY_OF_MONTH, d-day); Date date = c.getTime(); return df.format(date); } public static String toDateString(Date date, String format){ DateFormat df = new SimpleDateFormat(format); return df.format(date); } public static String doCode(String arg){ char[] array = arg.toCharArray(); String str = ""; for(int i=0;i<array.length;i++){ char ch = (char) (array[i]+10); str+=ch; } return str; } public static String deCode(String arg){ char[] array = arg.toCharArray(); String str = ""; for(int i=0;i<array.length;i++){ char ch = (char) (array[i]-10); str+=ch; } return str; } public static void selectDate(Context context,final TextView v) { // new DateLayout(this.context,this.getView(),v); Calendar c = Calendar.getInstance(); new DatePickerDialog(context, new DatePickerDialog.OnDateSetListener(){ @Override public void onDateSet(DatePicker arg0, int arg1, int arg2, int arg3) { // TODO Auto-generated method stub //Toast.makeText(context, arg1+";"+arg2+";"+arg3, Toast.LENGTH_SHORT).show(); Calendar ca = Calendar.getInstance(); ca.set(Calendar.YEAR, arg1); ca.set(Calendar.MONTH, arg2); ca.set(Calendar.DAY_OF_MONTH, arg3); v.setText(Constant.toDateString(ca.getTime(), "yyyy-MM-dd")); } }, c.get(Calendar.YEAR), c.get(Calendar.MONTH), c.get(Calendar.DAY_OF_MONTH)).show(); } public static final String server_identifier = "server_identifier"; public static void saveServerInfo(ServerInfo serverInfo,Context context){ SharedPreferences settings = context.getSharedPreferences(server_identifier, 0); SharedPreferences.Editor editor = settings.edit(); editor.putString(ServerInfo.server_addr_key, serverInfo.server_addr); editor.putString(ServerInfo.server_port_key, serverInfo.server_port); // Commit the edits! editor.commit(); configServer(serverInfo); } public static ServerInfo getServerInfo(Context context){ SharedPreferences settings = context.getSharedPreferences(server_identifier, 0); ServerInfo serverInfo = new ServerInfo(); serverInfo.server_addr = settings.getString(ServerInfo.server_addr_key, "192.168.1.22"); serverInfo.server_port = settings.getString(ServerInfo.server_port_key, "8080"); return serverInfo; } } <file_sep>/DataServer/src/com/project/dao/impl/CourseDAOImpl.java package com.project.dao.impl; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.List; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.Transaction; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import com.project.dao.CourseDAO; import com.project.po.Course; public class CourseDAOImpl extends HibernateDaoSupport implements CourseDAO{ public void deleteCourse(Course arg0) { // TODO Auto-generated method stub String sql1 = "delete from data_collect_course where course_id = "+arg0.getId()+";"; String sql2 = "delete from data_course where id = "+arg0.getId()+";"; Session session = this.getHibernateTemplate().getSessionFactory().openSession(); Connection conn = session.connection(); try { PreparedStatement stmt1 = conn.prepareStatement(sql1); PreparedStatement stmt2 = conn.prepareStatement(sql2); conn.setAutoCommit(false); stmt1.execute(); conn.commit(); stmt2.execute(); conn.commit(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } session.close(); } public void saveCourse(Course arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().save(arg0); } public void updateCourse(Course arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().update(arg0); } @Override public Course getCourseById(Integer arg0) { // TODO Auto-generated method stub return this.getHibernateTemplate().get(Course.class, arg0); } @SuppressWarnings("unchecked") @Override public List<Course> getCoursesByHql(String hql) { // TODO Auto-generated method stub return this.getHibernateTemplate().find(hql); } @SuppressWarnings("rawtypes") @Override public List getList(String hql) { // TODO Auto-generated method stub Query query=this.getSession().createSQLQuery(hql); return query.list(); } } <file_sep>/PMIS2/src/com/jiyuan/pmis/structure/ExpandListItem.java package com.jiyuan.pmis.structure; import java.util.ArrayList; import java.util.List; public class ExpandListItem { public String title; public List<Item> items = new ArrayList<Item>(); } <file_sep>/JYPMIS0310/JYPMIS/src/com/jypmis/func/department/DepartmentImpl.java package com.jypmis.func.department; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import com.google.gson.Gson; import com.jypmis.dbconn.OracleDBCONN; import com.jypmis.exception.MyRuntimeException; import com.jypmis.po.Sbm; import com.jypmis.vo.SbmVO; public class DepartmentImpl implements IDepartment { private static final String QUERYDEPARTMENTS= "select bmid,bmmc from sbm "; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; public String getDepartments() { // TODO Auto-generated method stub //System.out.println("客户端查询部门信息"); Sbm sbm=new Sbm(); List<Sbm>listsbm=findDepartments(); if (listsbm == null || listsbm.size() == 0) { //System.out.println("没有部门信息"); return "-1"; } else { Iterator<Sbm> itsbm = listsbm.iterator(); String jsonString = null; List<SbmVO> list = new ArrayList<SbmVO>(); Gson gson = new Gson(); while (itsbm.hasNext()) { sbm= itsbm.next(); // //Systemem.out.println("部门ID:" + sbm.getBmid() + ",部门名称:" // + sbm.getBmmc()); SbmVO sbmvo = new SbmVO(); sbmvo.bmid = sbm.getBmid(); sbmvo.bmmc = sbm.getBmmc(); list.add(sbmvo); jsonString = gson.toJson(list); } //System.out.println(jsonString); return jsonString; } } /** * 查询部门信息 * 返回必须字段:部门id,部门名称 * @return */ public List<Sbm> findDepartments() { // TODO Auto-generated method stub //System.out.println("查询部门信息..."); List<Sbm> listsbm = new ArrayList<Sbm>(); conn = OracleDBCONN.getInstance().getOracleDBCONN(); try { ps=conn.prepareStatement(QUERYDEPARTMENTS); rs = ps.executeQuery(); while(rs.next()) { Sbm sbm=new Sbm(); sbm.setBmid(rs.getString("bmid")); sbm.setBmmc(rs.getString("bmmc")); listsbm.add(sbm); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询部门信息失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return listsbm; } }<file_sep>/JYPMIS0310/PMIS/src/com/jiyuan/pmis/project/SelectProjectsSearchFragment.java package com.jiyuan.pmis.project; import java.util.ArrayList; import java.util.List; import com.jiyuan.pmis.MainApplication; import com.jiyuan.pmis.R; import com.jiyuan.pmis.search.Pinyin4jUtil; import com.jiyuan.pmis.search.SimpleSearchAdapter; import com.jiyuan.pmis.sqlite.ProjectInfo; import android.annotation.SuppressLint; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.text.Editable; import android.text.TextWatcher; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.EditText; import android.widget.ListView; import android.widget.AdapterView.OnItemClickListener; import android.widget.TextView; import android.widget.TextView.OnEditorActionListener; @SuppressLint("DefaultLocale") public class SelectProjectsSearchFragment extends Fragment implements OnEditorActionListener{ private Context context; private MainApplication app; private Activity activity; ListView mListView; SimpleSearchAdapter mAdapter; EditText mtxt; List<ProjectInfo> mAllData; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { /** * Inflate the layout for this fragment */ this.context = this.getActivity(); this.activity = this.getActivity(); app = (MainApplication) this.getActivity().getApplication(); View v = inflater.inflate(R.layout.select_projects_search_fragment, container, false); this.mListView = (ListView) v .findViewById(R.id.select_projects_search_listView); this.mtxt = (EditText)v.findViewById(R.id.edittext_select_projects_search_project_name); this.mAllData = this.app.getAllProjectInfos(); mAdapter = new SimpleSearchAdapter((Activity) this.context); mtxt.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { if (0 != mtxt.getText().length()) { String spnId = mtxt.getText().toString(); setSearchResult(spnId); } else { setData(); } } }); setData(); /*Department[] ments = app.getDepartments(); List<SpinnerItem> values = new ArrayList<SpinnerItem>(); SpinnerItem first = new SpinnerItem(); first.key = "-1"; first.value = "全部"; values.add(first); for(int i=0;i<ments.length;i++){ SpinnerItem item = new SpinnerItem(); item.key = ments[i].bmid; item.value = ments[i].bmmc; values.add(item); } this.select_projects_search_listView.setOnItemClickListener(item_listener); items = new ArrayList<Item>(); SimpleAdapter listAdapter = new SimpleAdapter(this.context,items); // Listen for Click events this.select_projects_search_listView.setAdapter(listAdapter);*/ return v; } /* private void listProjects() { String xmjc = this.edittext_select_projects_search_project_name.getText().toString(); Project[] projects = null; for (int i=0;i<projects.length;i++){ Item item = new Item(); item.firstLineText = projects[i].xmmc; item.secondLineText = projects[i].xmjc; item.key = projects[i].xmid; item.showCheckbox = false; items.add(item); } // Create the ListView Adapter } private Project[] getProjects(String bmid,String xmjc) throws PmisException{ final String METHOD_NAME = "getProjects"; Soap soap = new Soap(Constant.project_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("bmid"); arg0.setValue(bmid); arg0.setType(String.class); args.add(arg0); PropertyInfo arg1 = new PropertyInfo(); arg1.setName("xmjc"); arg1.setValue(xmjc); arg1.setType(String.class); args.add(arg1); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.project_url, Constant.project_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("获取项目列表是失败!"); } Gson gson = new Gson(); try { return gson.fromJson(ret, Project[].class); }catch(Exception e){ throw new PmisException("当前没有项目!"); } } private OnItemClickListener item_listener = new OnItemClickListener(){ @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub SeparatedListAdapter adapter = (SeparatedListAdapter) arg0.getAdapter(); Item item = (Item)adapter.getItem(arg2); //Log.v("pmis", item.toString()); Intent it = new Intent(); //it.putExtra("ret", "2"); it.putExtra("xmid", item.key); it.putExtra("xmjc", item.secondLineText); DatabaseHandler db = new DatabaseHandler(context); //ProjectInfo info = db.getProjectInfo(); if(!db.projectInfoExist(item.key)){ ProjectInfo info = new ProjectInfo(); info.setXmid(item.key); info.setXmjc(item.secondLineText); info.setXmmc(item.firstLineText); db.addProjectInfo(info); } activity.setResult(Activity.RESULT_OK,it); activity.finish(); } };*/ public void setData() { if (mAllData==null) mAllData = new ArrayList<ProjectInfo>(); mAdapter = new SimpleSearchAdapter((Activity) this.context); for (int i = 0; i < mAllData.size()&&i<30; i++) { mAdapter.addItem(mAllData.get(i)); } mListView.setOnItemClickListener(onItemClickListener); mListView.setAdapter(mAdapter); } private OnItemClickListener onItemClickListener = new OnItemClickListener(){ @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub SimpleSearchAdapter adapter = (SimpleSearchAdapter) arg0.getAdapter(); ProjectInfo item = (ProjectInfo)adapter.getItem(arg2); Intent it = new Intent(); it.putExtra("xmid", item.getXmid()); it.putExtra("xmjc", item.getXmjc()); activity.setResult(Activity.RESULT_OK,it); activity.finish(); } }; @SuppressLint("DefaultLocale") public void setSearchResult(String str) { mAdapter = new SimpleSearchAdapter((Activity) this.context); for (ProjectInfo temp : mAllData) { if (temp.getXmjc().toLowerCase().contains(str.toLowerCase())||Pinyin4jUtil.converterToFirstSpell(temp.getXmjc()).toLowerCase().contains(str.toLowerCase())) { mAdapter.addItem(temp); } } mListView.setAdapter(mAdapter); } @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { // TODO Auto-generated method stub return false; } } <file_sep>/DataServer/src/com/project/dao/impl/CollectCourseDAOImpl.java package com.project.dao.impl; import java.util.List; import org.hibernate.Query; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import com.project.dao.CollectCourseDAO; import com.project.po.CollectCourse; public class CollectCourseDAOImpl extends HibernateDaoSupport implements CollectCourseDAO{ public void deleteCollectCourse(CollectCourse arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().delete(arg0); } public void saveCollectCourse(CollectCourse arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().save(arg0); } public void updateCollectCourse(CollectCourse arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().update(arg0); } @Override public CollectCourse getCollectCourseById(Integer arg0) { // TODO Auto-generated method stub return this.getHibernateTemplate().get(CollectCourse.class, arg0); } @SuppressWarnings("unchecked") @Override public List<CollectCourse> getCollectCoursesByHql(String hql) { // TODO Auto-generated method stub return this.getHibernateTemplate().find(hql); } @SuppressWarnings("rawtypes") @Override public List getList(String hql) { // TODO Auto-generated method stub Query query=this.getSession().createSQLQuery(hql); return query.list(); } } <file_sep>/JYPMIS0310/JYPMIS/src/com/jypmis/func/report/ReportImpl.java package com.jypmis.func.report; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.sql.Date; import java.util.Iterator; import java.util.List; //Generated by MyEclipse import com.google.gson.Gson; import com.jypmis.dbconn.OracleDBCONN; import com.jypmis.exception.MyRuntimeException; import com.jypmis.func.login.LoginImpl; import com.jypmis.po.Bbg; import com.jypmis.po.Bxmjbxx; import com.jypmis.po.Sbgx; import com.jypmis.po.Sbm; import com.jypmis.vo.BbgVO; import com.jypmis.vo.ReportSearchFieldVO; import com.jypmis.vo.SbgxVO; import com.jypmis.ztools.BHGenerator; import com.jypmis.ztools.ReportQueryString; import com.jypmis.ztools.TransTimetype; public class ReportImpl implements IReport { final ReportQueryString cs = new ReportQueryString(); final String QUERYREPORTYPES = cs.getQUERYREPORTYPES(); final String INSERTREPORTS = cs.getINSERTREPORTS(); final String DELETEREPORT = cs.getDELETEREPORT(); final String UPDATEWODEREPORT = cs.getUPDATEWODEREPORT(); final String QUERYREPORTBYSEARCHSTR1 = cs.getQUERYREPORTBYSEARCHSTR1();// 查询报工时间范围内的报工信息 final String QUERYREPORTBYSEARCHSTR0 = cs.getQUERYREPORTBYSEARCHSTR0();// 查询工作日期范围内的报工信息 final String QUERYREPORTBYID = cs.getQUERYREPORTBYID(); final String QUERYREPORTWEITONGGUOCOUNT = cs .getQUERYREPORTWEITONGGUOCOUNT();// 查询未审核的报工条数 final String QUERYREPORTDAISHENHECOUNT = cs.getQUERYREPORTDAISHENHECOUNT();// 审核人的待审核报工条数 final String WEITONGGUO = cs.getWEITONGGUO(); final String DAISHENHE = cs.getDAISHENHE(); final String YITONGGUO = cs.getYITONGGUO(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; /** * 新增报工 */ @SuppressWarnings("static-access") public String saveReport(String userid, String reportStr) { // TODO Auto-generated method stub //System.out.println("客户端新增报工内容:" + userid + "和" + reportStr); conn = OracleDBCONN.getInstance().getOracleDBCONN(); Gson gson = new Gson(); BbgVO bg = gson.fromJson(reportStr, BbgVO.class); String bgr=userid; // String shr = null; // if (bg.xmid.equals("-1")) { // System.out.println("新增的报工与项目无关,查找部门主任作为审核人"); // shr = log.findDepManagerbyUserid(userid); // } else { // System.out.println("新增的报工跟项目有关,查找对应xmid下的项目经理作为审核人"); // shr = log.findProjectManagerByXMID(bg.xmid); // } BHGenerator bh = new BHGenerator(); String bgid = bh.getBh();// 设置报工ID,进行编号 TransTimetype trans = new TransTimetype(); String currentTime = trans.getCurrentTime(); Timestamp bgsj = trans.String2Timestamp(currentTime); Date gzrq = trans.String2Date(bg.gzrq); try { final String INSERTREPORTS = "insert into bbg(bgid,bglx,xmid,gzrq,gzxs,gzdd,gznr,bgr,bgsj,zt) " + "values(?,?,?,?,?,?,?,?,?,?)"; ps = conn.prepareStatement(INSERTREPORTS); ps.setString(1, bgid); ps.setString(2, bg.bgxid); ps.setString(3, bg.xmid); ps.setDate(4, gzrq); ps.setFloat(5, Float.valueOf(bg.gzxs)); ps.setString(6, bg.gzdd); ps.setString(7, bg.gznr); ps.setString(8, bgr); ps.setTimestamp(9, bgsj); ps.setString(10, "0"); //ps.setString(11, shr); ps.executeUpdate(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 return "-1"; // throw new MyRuntimeException("新增报工失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } //System.out.println("新增报工成功,报工编号为:" + bgid); return "1"; } /** * 删除报工 */ public String deleteReport(String reportid) { // TODO Auto-generated method stub //System.out.println("客户端删除了报工ID为:" + reportid + "的报工"); conn = OracleDBCONN.getInstance().getOracleDBCONN(); try { ps = conn.prepareStatement(DELETEREPORT); ps.setString(1, reportid); ps.executeUpdate(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 return "-1"; } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return "1"; } /** * 修改报工 */ public String updateReport(String yhid, String reportStr,String type) { //System.out.println("客户端修改报工内容,传来:" + yhid + "和" + reportStr); conn = OracleDBCONN.getInstance().getOracleDBCONN(); Gson gson = new Gson(); BbgVO bg = gson.fromJson(reportStr, BbgVO.class); String shr=yhid; TransTimetype trans = new TransTimetype(); Date gzrq = trans.String2Date(bg.gzrq); String currentTime = trans.getCurrentTime(); Timestamp bgsj = trans.String2Timestamp(currentTime); Timestamp shsj = trans.String2Timestamp(currentTime); final String UPDATEWODEREPORT0="update bbg set bglx=?,xmid=?,gzrq=?,gzxs=?,gzdd=?,gznr=?"+ ",zt=?,shxx=?,bgsj=?,shsj=? where bgid=?";//报工修改 final String UPDATEWODEREPORT1="update bbg set bglx=?,xmid=?,gzrq=?,gzxs=?,gzdd=?,gznr=?"+ ",zt=?,shr=?,shxx=?,bgsj=?,shsj=? where bgid=?";//报工修改 if (type.equals("0")) { try { ps = conn.prepareStatement(UPDATEWODEREPORT0); ps.setString(1, bg.bgxid); ps.setString(2, bg.xmid); ps.setDate(3, gzrq); ps.setFloat(4, Float.valueOf(bg.gzxs)); ps.setString(5, bg.gzdd); ps.setString(6, bg.gznr); if (bg.zt.equals(WEITONGGUO)) { ps.setString(7, DAISHENHE); // 修改后将状态修改为待审核 } else ps.setString(7, bg.zt); ps.setString(8, bg.shxx); ps.setTimestamp(9, bgsj); //修改报工时间为当前时间 ps.setTimestamp(10, trans.String2Timestamp(bg.shsj)); ps.setString(11, bg.bgid); ps.executeUpdate(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); OracleDBCONN.getInstance().close(conn, ps, rs); return "-1"; } }else{ try { ps = conn.prepareStatement(UPDATEWODEREPORT1); ps.setString(1, bg.bgxid); ps.setString(2, bg.xmid); ps.setDate(3, gzrq); ps.setFloat(4, Float.valueOf(bg.gzxs)); ps.setString(5, bg.gzdd); ps.setString(6, bg.gznr); ps.setString(7, bg.zt); ps.setString(8, shr); ps.setString(9, bg.shxx); ps.setTimestamp(10, trans.String2Timestamp(bg.bgsj)); ps.setTimestamp(11, shsj);////修改审核时间为当前时间 ps.setString(12, bg.bgid); ps.executeUpdate(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); OracleDBCONN.getInstance().close(conn, ps, rs); return "-1"; } } //System.out.println("修改了编号为:" + bg.bgid + "的报工"); OracleDBCONN.getInstance().close(conn, ps, rs); return "1"; } /** * 查询bgid对应的报工 */ public String showReport(String bgid) { // TODO Auto-generated method stub List<Bbg> listbbg = findReportByID(bgid); if (listbbg == null || listbbg.size() == 0) { //System.out.println("未查到报工记录,请重新检索"); return "-1"; } else { Iterator<Bbg> it = listbbg.iterator(); Bbg bbg = new Bbg(); Gson gson = new Gson(); String jsonString = null; while (it.hasNext()) { bbg = it.next(); BbgVO bgvo = new BbgVO(); bgvo.bgid = bbg.getBgid(); bgvo.bgxid = bbg.getBglx(); bgvo.xmid= bbg.getXmid(); bgvo.xmjc = bbg.getXmjc(); bgvo.gzdd = bbg.getGzdd(); LoginImpl log = new LoginImpl(); String shr = log.findUsernameByUserid(bbg.getShr());// 作为审核 bgvo.shr = shr;// 审核人 bgvo.shxx = bbg.getShxx(); bgvo.zt = bbg.getZt(); bgvo.gznr = bbg.getGznr(); bgvo.gzxs = bbg.getGzxs().toString(); bgvo.gzrq = bbg.getGzrq(); bgvo.bgsj= bbg.getBgsj(); bgvo.shsj=bbg.getShsj(); //LoginImpl log = new LoginImpl(); String bgr = log.findUsernameByUserid(bbg.getBgr());// 作为报工人 bgvo.bgr = bgr; jsonString = gson.toJson(bgvo); } //System.out.println("我的"+jsonString+"woded32342"); return jsonString; } } /** * 按条件搜索报工 */ public String getReports(String reportSearchFieldStr) { //System.out.println("客户端按条件查询报工信息" + reportSearchFieldStr); // TODO Auto-generated method stub List<Bbg> listbbg = findReportBySearchStr(reportSearchFieldStr); Iterator<Bbg> it = listbbg.iterator(); if (listbbg == null || listbbg.size() == 0) { //System.out.println("未查到报工记录,请重新检索"); return "-1"; } else { Bbg bbg = new Bbg(); Gson gson = new Gson(); String jsonString = null; //List<BbgSortVO> listsort=new ArrayList<BbgSortVO>(); List<BbgVO> listAll = new ArrayList<BbgVO>(); while (it.hasNext()) { bbg = it.next(); BbgVO bgvo = new BbgVO(); bgvo.gzrq = bbg.getGzrq(); bbg.setBgr(bbg.getBgr()); bgvo.gzxs = bbg.getGzxs().toString(); bgvo.gznr = bbg.getGznr(); bgvo.zt = bbg.getZt(); bgvo.xmjc = bbg.getXmjc(); bgvo.bgid=bbg.getBgid(); bgvo.shxx=bbg.getShxx(); LoginImpl log = new LoginImpl(); String bgr = log.findUsernameByUserid(bbg.getBgr());// 作为报工人 bgvo.bgr = bgr; bgvo.bgxid = bbg.getBglx(); listAll.add(bgvo); //System.out.println("bgvo:"+gson.toJson(bgvo)); } /*while(listAll.size()>0){ List<BbgVO> list = new ArrayList<BbgVO>(); BbgVO b = listAll.get(0); list.add(b); listAll.remove(0); int i = 0; while(listAll.size()!=i){ if(b.xmjc.equals(listAll.get(i).xmjc)){ list.add(listAll.get(i)); listAll.remove(i); i--; } i++; } BbgSortVO vo = new BbgSortVO(); vo.xmjc = b.xmjc; vo.list = list; listsort.add(vo); }*/ jsonString = gson.toJson(listAll); //System.out.println(jsonString); return jsonString; } } /** * 查询报工数:未通过,待审核->用于在主页面显示提示 */ public int getReportCount(String yhid, String zt) { // TODO Auto-generated method stub //System.out.println("客户端传来"+yhid+",状态"+zt); conn = OracleDBCONN.getInstance().getOracleDBCONN(); // LoginImpl log = new LoginImpl(); // String bgr = log.findUsernameByUserid(yhid);// 根据用户id查询用户姓名作报工人 // String shr = log.findUsernameByUserid(yhid); String bgr=yhid; int count = 0; try { if (zt.equals(WEITONGGUO)) { //System.out.println("查询我的报工未通过的报工记录数"); ps = conn.prepareStatement(QUERYREPORTWEITONGGUOCOUNT); ps.setString(1, bgr); ps.setString(2, zt); rs = ps.executeQuery(); if (rs.next()) count = rs.getInt(1); else return 0; } else if (zt.equals(DAISHENHE)) { //System.out.println("查询审核报工中待审核的报工记录数"); Gson gson=new Gson(); ReportSearchFieldVO rs1=new ReportSearchFieldVO(); rs1.xmid="-1"; rs1.kssj="2000-03-21"; rs1.jssj="2016-03-21"; rs1.xzwtg="0"; rs1.xzdsh="1"; rs1.xzysh="0"; rs1.yhid=yhid; rs1.type="1"; rs1.xzdy="1"; rs1.xzxy="1"; rs1.xzeq="1"; rs1.bgxid="-1"; String reportSearchFieldStr=gson.toJson(rs1); List<Bbg> listbbg =this.findReportBySearchStr(reportSearchFieldStr); if(listbbg.size()>0) { //System.out.println("待审核个数:"+listbbg.size()); count=listbbg.size(); } //this.checkYHJSbyYHID(yhid); // ps = conn.prepareStatement(QUERYREPORTDAISHENHECOUNT); // ps.setString(1, shr); // ps.setString(2, zt); } else return 0; } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询报工记录数失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return count; } /** * 获取报工类型 */ public String getReportTypes() { // TODO Auto-generated method stub //System.out.println("客户端查询报工类型"); Sbgx sbgx = new Sbgx(); List<Sbgx> listsbgx = findReportTypes(); if (listsbgx == null || listsbgx.size() == 0) { //System.out.println("没有报工类型"); return "-1"; } else { Iterator<Sbgx> itsbgx = listsbgx.iterator(); String jsonString = null; List<SbgxVO> list = new ArrayList<SbgxVO>(); Gson gson = new Gson(); while (itsbgx.hasNext()) { sbgx = itsbgx.next(); // System.out.println("报工项ID:" + sbgx.getBgxid() + ",报工项名称:" // + sbgx.getBgxmc()); SbgxVO sbgxvo = new SbgxVO(); sbgxvo.bgxid = sbgx.getBgxid(); sbgxvo.bgxmc = sbgx.getBgxmc(); sbgxvo.zt = sbgx.getZt(); list.add(sbgxvo); jsonString = gson.toJson(list); // System.out.println("jsonString:"+jsonString); } //System.out.println(jsonString); return jsonString; } } public List<Sbgx> findReportTypes() { // TODO Auto-generated method stub //System.out.println("查询报工类型..."); List<Sbgx> listsbgx = new ArrayList<Sbgx>(); conn = OracleDBCONN.getInstance().getOracleDBCONN(); try { ps = conn.prepareStatement(QUERYREPORTYPES); rs = ps.executeQuery(); while (rs.next()) { Sbgx sbgx = new Sbgx(); sbgx.setBgxid(rs.getString("bgxid")); sbgx.setBgxmc(rs.getString("bgxmc")); sbgx.setZt(rs.getString("zt")); listsbgx.add(sbgx); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询报工类型失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return listsbgx; } public List<Bbg> findReportByID(String bgid) { // TODO Auto-generated method stub //System.out.println("查询" + bgid + "的报工信息..."); List<Bbg> listbbg = new ArrayList<Bbg>(); conn = OracleDBCONN.getInstance().getOracleDBCONN(); try { ps = conn.prepareStatement(QUERYREPORTBYID); ps.setString(1, bgid); rs = ps.executeQuery(); TransTimetype trans = new TransTimetype(); //System.out.println(rs.getTimestamp("bgsj")); String bgsj= null; //System.out.println(bgsj); String shsj= null; while(rs.next()) { Bbg bbg = new Bbg(); bgsj= trans.Timestamp2String(rs.getTimestamp("bgsj")); shsj= trans.Timestamp2String(rs.getTimestamp("shsj")); bbg.setBgid(bgid); //,bgsj,shsj bbg.setBglx(rs.getString("bglx")); bbg.setXmid(rs.getString("xmid")); bbg.setXmjc(rs.getString("xmjc")); bbg.setGzdd(rs.getString("gzdd")); bbg.setShr(rs.getString("shr")); bbg.setShxx(rs.getString("shxx")); bbg.setZt(rs.getString("zt")); bbg.setBgsj(bgsj); bbg.setShsj(shsj); bbg.setGznr(rs.getString("gznr")); bbg.setGzxs(rs.getFloat("gzxs")); String gzrq=trans.Date2String(rs.getDate("gzrq")); bbg.setGzrq(gzrq); bbg.setBgr(rs.getString("bgr")); listbbg.add(bbg); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询" + bgid + "报工失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return listbbg; } /** * 按条件查询报工 报工时间段或工作日期间隔,工作小时等搜素条件 */ public List<Bbg> findReportBySearchStr(String reportSearchFieldStr) { // TODO Auto-generated method stub //System.out.println("按条件查询报工信息..."); List<Bbg> listbbg = new ArrayList<Bbg>(); conn = OracleDBCONN.getInstance().getOracleDBCONN(); Gson gson = new Gson(); ReportSearchFieldVO rsfvo = gson.fromJson(reportSearchFieldStr, ReportSearchFieldVO.class); // LoginImpl log = new LoginImpl(); // String bgr = log.findUsernameByUserid(rsfvo.yhid);// 作为报工人 String bgr =rsfvo.yhid;// 作为报工人 // String shr = log.findUsernameByUserid(rsfvo.yhid);// 作为审核人 // String shr=rsfvo.yhid; String type = rsfvo.type; // 判断审核状态选择情况 String xzwtg = ""; String xzdsh = ""; String xzysh = ""; try { if (rsfvo.xzwtg.equals("1")) { xzwtg = "-1"; } else { xzwtg = ""; } if (rsfvo.xzdsh.equals("1")) { xzdsh = "0"; } else { xzdsh = ""; } if (rsfvo.xzysh.equals("1")) { xzysh = "1"; } else { xzysh = ""; } }catch (Exception e) { xzwtg = "";xzdsh = "0";xzysh = ""; } String querystr = ""; int itemp = 0; try { itemp = Integer.parseInt(rsfvo.xzdy + rsfvo.xzxy + rsfvo.xzeq); } catch (Exception e) { itemp = Integer.parseInt("111"); } // 判断工作小时的选择情况 switch (itemp) { case 111: querystr += " and (gzxs>8 or gzxs<8 or gzxs=8)";// 选择了>,<,= break; case 110: querystr += " and (gzxs>8 or gzxs<8 )";// 选择了>,< break; case 101: querystr += " and (gzxs>8 or gzxs=8 )";// 选择了>,= break; case 100: querystr += " and gzxs>8";// 选择了> break; case 11: querystr += " and (gzxs<8 or gzxs=8)";// 选择了<,= break; case 10: querystr += " and gzxs<8";// 选择了< break; case 1: querystr += " and gzxs=8";// 选择了= break; case 0: querystr += "";// 未选择 break; } TransTimetype trans = new TransTimetype(); // 查询时间范围:近一天或两天等 try { if (type.equals("1")) { //System.out.println("作为审核人"); //查看当前用户角色 String dshsqlstr=this.getMyDSHSQLStr(rsfvo);//查询待我审核的报工 //System.out.println("查询待审核报工:"+dshsqlstr); if(dshsqlstr.equals("")) { //System.out.println("无待审核报工"); }else{ ps=conn.prepareStatement(dshsqlstr); rs=ps.executeQuery(); } } else { //System.out.println("报工人查看工作日期间隔内的报工"); // 查询工作日期范围: Date kssj = null; try { kssj = trans.String2Date(rsfvo.kssj);// 开始日期 } catch (Exception e) { kssj = trans.String2Date("1975-01-01"); } Date jssj = trans.String2Date(rsfvo.jssj);// 结束日期 //QUERYREPORTBYSEARCHSTR0="select gzrq,bgr,gzxs,gznr,zt,xmjc,bgid,shxx "+ //"from bbg a,bxmjbxx b where a.xmid=b.xmid and (gzrq between ? and ?)"+ //"and (zt=? or zt=? or zt=?) and bgr=? "; if (!rsfvo.xmid.equals("-1")) { //查询所有项目 ps = conn.prepareStatement(QUERYREPORTBYSEARCHSTR0 + querystr + " and a.xmid='" + rsfvo.xmid + "'"); } else { String bgxStr = ""; if (!rsfvo.bgxid.equals("-1")){ //查询所有报工类型 bgxStr = " and a.bglx ='"+rsfvo.bgxid+"'"; } ps = conn.prepareStatement(QUERYREPORTBYSEARCHSTR0 + querystr + bgxStr ); } ps.setDate(1, kssj); ps.setDate(2, jssj); ps.setString(6, bgr); ps.setString(3, xzwtg); ps.setString(4, xzdsh); ps.setString(5, xzysh); rs = ps.executeQuery(); } while (rs.next()) { Bbg bbg = new Bbg(); String gzrq=trans.Date2String(rs.getDate("gzrq")); bbg.setGzrq(gzrq); bbg.setGzxs(rs.getFloat("gzxs")); bbg.setGznr(rs.getString("gznr")); bbg.setZt(rs.getString("zt")); bbg.setXmjc(rs.getString("xmjc")); bbg.setBgid(rs.getString("bgid")); bbg.setShxx(rs.getString("shxx")); bbg.setBgr(rs.getString("bgr")); bbg.setBglx(rs.getString("bglx")); listbbg.add(bbg); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("按条件查询报工失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return listbbg; } /** * 从部门表中找到所有与yhid相同的部门主任+部门编号 * @param rsfvo * @return */ private List<Sbm> getBMZRs(String yhid){ String QUERYBMZRS="select * from sbm where bmzr='"+ yhid + "'"; conn = OracleDBCONN.getInstance().getOracleDBCONN(); List<Sbm> listbmzrs = new ArrayList<Sbm>(); try { ps = conn.prepareStatement(QUERYBMZRS); rs=ps.executeQuery(); while(rs.next()) { Sbm sbm=new Sbm(); sbm.setBmid(rs.getString("bmid")); sbm.setBmzr(rs.getString("bmzr")); sbm.setBmbh(rs.getString("bmbh")); listbmzrs.add(sbm); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return listbmzrs; } /** * 从项目列表中查找所有与yhid相同的项目经理 * @param rsfvo * @return */ public List<Bxmjbxx> getXMIDList(String yhid) { String QUERYBMZRS="select xmid from bxmjbxx where xmjl='" + yhid + "'"; List<Bxmjbxx> xmjlxmid = new ArrayList<Bxmjbxx>(); try { ps = conn.prepareStatement(QUERYBMZRS); rs=ps.executeQuery(); while(rs.next()) { Bxmjbxx xm=new Bxmjbxx(); xm.setXmid(rs.getString("xmid")); xmjlxmid.add(xm); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return xmjlxmid; } /** * 判断用户角色,是否为部门主任、分部门主任、项目经理或者三种角色的组合形式 * @param rsfvo * @return */ public String checkYHJSbyYHID(String yhid) { String []jstype={"0","0","0"};//分别对应:部门主任、分部门主任、项目经理 String bmbh=""; //查看用户是否为部门主任或分部门主任 List<Sbm> listbmzrs=this.getBMZRs(yhid); if(listbmzrs.size()>0) { for(int i=0;i<listbmzrs.size();i++) { bmbh=listbmzrs.get(i).getBmbh(); //System.out.println("编号:"+bmbh); if(bmbh.length()>4) { // System.out.println(yhid+"为"+bmbh+"分部门主任"); jstype[1]="1"; }else if(bmbh.length()<=4) { //System.out.println(yhid+"为"+bmbh+"部门主任"); jstype[0]="1"; } } }else{ //System.out.println("您不是部门主任或分部门主任"); } //查看用户是否为项目经理 List<Bxmjbxx> xmjlxmid=this.getXMIDList(yhid); String xmjl=""; try{ xmjl=xmjlxmid.get(0).getXmid(); }catch(Exception e) { } if(xmjl==null||xmjl.equals("")) { }else{ jstype[2]="1"; } return jstype[0]+jstype[1]+jstype[2]; } //根据用户角色查看待审核报工 public String getMyDSHSQLStr(ReportSearchFieldVO rsfvo){ TransTimetype trans = new TransTimetype(); Timestamp kssj = trans.String2Timestamp(rsfvo.kssj+" 00:00:01");// 开始时间 Timestamp jssj = trans.String2Timestamp(rsfvo.jssj+" 23:59:59");// 结束时间 String querystr = ""; int itemp1 = 0; try { itemp1 = Integer.parseInt(rsfvo.xzdy + rsfvo.xzxy + rsfvo.xzeq); } catch (Exception e) { itemp1 = Integer.parseInt("111"); } // 判断工作小时的选择情况 switch (itemp1) { case 111: querystr += " and (gzxs>8 or gzxs<8 or gzxs=8) ";// 选择了>,<,= break; case 110: querystr += " and (gzxs>8 or gzxs<8 ) ";// 选择了>,< break; case 101: querystr += " and (gzxs>8 or gzxs=8 ) ";// 选择了>,= break; case 100: querystr += " and gzxs>8 ";// 选择了> break; case 11: querystr += " and (gzxs<8 or gzxs=8) ";// 选择了<,= break; case 10: querystr += " and gzxs<8 ";// 选择了< break; case 1: querystr += " and gzxs=8 ";// 选择了= break; case 0: querystr += "";// 未选择 break; } String yhid=rsfvo.yhid; String SSQL="select gzrq,bgr,gzxs,gznr,zt,a.xmid,nvl(xmjc, '--') xmjc,bgid,shxx,bglx "+ "from bbg a left join bxmjbxx b on a.xmid=b.xmid where "+ "(a.bgsj between to_timestamp('"+kssj+"', 'yyyy-mm-dd hh24:mi:ss.ff') and to_timestamp('"+jssj+"', 'yyyy-mm-dd hh24:mi:ss.ff'))"+ "and zt='0' "+querystr;//查询附加条件 //String osql=" order by a.bgsj desc";//结果排序 String case1sql=SSQL+" and xmjl="+yhid+" and bgr!="+yhid+" and a.bglx='0001'";//项目经理 String case10sql=SSQL+" and a.bgr in (select yhid as bgr from sbm p,sxtyh q "+ "where p.bmid=q.bmid and length(bmbh)>4 and bmzr='"+yhid+"')"+ "and bgr!="+yhid+" and a.bglx!='0001'";//分部门主任 String case11sql=case1sql+" union "+case10sql;//分部门主任+项目经理 String case100sql=SSQL+"and a.bgr in (select yhid as bgr from sbm p,sxtyh q where p.bmid=q.bmid and length(bmbh)<=4 and bmzr='"+yhid+"')"+ " and a.bglx!='0001' "+" union "+SSQL+"and a.bgr in (select xmjl from bxmjbxx a,sbm b where a.zrbm=b.bmid and length(bmbh)<=4 and bmzr='"+yhid+"') "+ "and a.bglx='0001' "+" union "+SSQL +"and a.bgr in ( select bmzr from sbm where substr(bmbh, 0, length(bmbh)-4) in (select bmbh from sbm where length(bmbh)=4 and bmzr='"+yhid+"')) "+ "and a.bglx!='0001' ";//部门主任 String case101sql=case100sql+case1sql;//部门主任+项目经理 String case110sql=case100sql+case10sql;//部门主任+分部门主任 String case111sql=case110sql+case1sql;//部门主任+分部门主任+项目经理 String compsql=""; int itemp=0; try { itemp=Integer.parseInt(checkYHJSbyYHID(yhid)); } catch (Exception e) { itemp = Integer.parseInt("000");//!(部门主任+分部门主任+项目经理) } switch(itemp) { case 111: compsql=case111sql;;//部门主任+分部门主任+项目经理 break; case 110: compsql=case110sql;//部门主任+分部门主任 break; case 101: compsql=case101sql;//部门主任+项目经理 break; case 100: compsql=case100sql;//部门主任 break; case 11: compsql=case11sql;//分部门主任+项目经理 break; case 10: compsql=case10sql;//分部门主任 break; case 1: compsql=case1sql;//项目经理 break; default: break; } return compsql; } public static void main(String[] args) { ReportImpl rep = new ReportImpl(); Gson json=new Gson(); System.out.println(rep.getReportCount("263", "0")); ReportSearchFieldVO rsfvo=new ReportSearchFieldVO(); rsfvo.yhid="263"; System.out.println(json.toJson(rep.getBMZRs("263"))); System.out.println(rep.checkYHJSbyYHID("263")); } }<file_sep>/Android1219/src/com/example/android1219/Thread/TestThreadActivity.java package com.example.android1219.thread; import com.example.android1219.R; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; public class TestThreadActivity extends Activity { private Context context; private Button test_updateUI,test_handler,test_newClass; private TextView test_updateUI_text; private Handler handler=new Handler(){ @Override public void handleMessage(Message msg){ if (msg.what==3){ Toast.makeText(context, "i sleeped 3 s", Toast.LENGTH_LONG).show(); } } }; public TestThreadActivity() { // TODO Auto-generated constructor stub context = this; } @Override protected void onCreate(Bundle saveBundle){ super.onCreate(saveBundle); this.setContentView(R.layout.thread_activity); this.test_updateUI = (Button)this.findViewById(R.id.test_updateUI); this.test_updateUI_text = (TextView)this.findViewById(R.id.test_updateUI_text); this.test_updateUI.setOnClickListener(Test_UpdateUI); this.test_handler = (Button)this.findViewById(R.id.test_handler); this.test_handler.setOnClickListener(Test_Handler); this.test_newClass = (Button)this.findViewById(R.id.test_newClass); this.test_newClass.setOnClickListener(Test_NewClass); loopThread = new LoopThread(); loopThread.start(); } LoopThread loopThread; private OnClickListener Test_NewClass = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Test test = new Test(){ @Override public void run(){ Toast.makeText(context, "this is override method", Toast.LENGTH_LONG).show(); } }; test.run2(); } }; private OnClickListener Test_Handler = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Runnable run = new Runnable(){ @Override public void run() { // TODO Auto-generated method stub try { Thread.sleep(3000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } handler.sendEmptyMessage(3); } }; Thread thread = new Thread(run); thread.start(); } }; private OnClickListener Test_UpdateUI = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub loopThread.handler.sendEmptyMessage(1); Thread thread = new Thread(new Runnable(){ @Override public void run() { // TODO Auto-generated method stub TestThreadActivity.this.runOnUiThread(new Runnable(){ @Override public void run() { // TODO Auto-generated method stub Toast.makeText(context, "i am from a new thread", Toast.LENGTH_LONG).show(); } }); try { Thread.sleep(3000); test_updateUI_text.post(new Runnable(){ @Override public void run() { // TODO Auto-generated method stub test_updateUI_text.setText("i am from another thread too"); } }); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); thread.start(); } }; @Override protected void onResume(){ super.onResume(); } @Override protected void onStart(){ super.onStart(); } @Override protected void onPause(){ super.onPause(); } @Override protected void onStop(){ super.onStop(); } @Override protected void onDestroy(){ super.onDestroy(); } class Test{ public Test(){ this.run(); } public void run(){ Log.v("Test","parent"); } public void run2(){ Toast.makeText(context, "parent method", Toast.LENGTH_LONG).show(); } } class LoopThread extends Thread{ public Handler handler; @Override public void run(){ Looper.prepare(); handler = new Handler(){ @Override public void handleMessage(Message mes){ //Message. Toast.makeText(context, "i am from loopthread class method", Toast.LENGTH_LONG).show(); } }; Looper.loop(); } }; } <file_sep>/20140304/数据库.sql prompt PL/SQL Developer import file prompt Created on 2014年3月24日 by Administrator set feedback off set define off prompt Disabling triggers for BBG... alter table BBG disable all triggers; prompt Disabling triggers for BBGSHTHJL... alter table BBGSHTHJL disable all triggers; prompt Disabling triggers for BXMJBXX... alter table BXMJBXX disable all triggers; prompt Disabling triggers for BCGJH... alter table BCGJH disable all triggers; prompt Disabling triggers for BHTXX... alter table BHTXX disable all triggers; prompt Disabling triggers for BHKJH... alter table BHKJH disable all triggers; prompt Disabling triggers for BJDJH... alter table BJDJH disable all triggers; prompt Disabling triggers for BKPJH... alter table BKPJH disable all triggers; prompt Disabling triggers for BLSXMCB... alter table BLSXMCB disable all triggers; prompt Disabling triggers for BLWWBCGJH... alter table BLWWBCGJH disable all triggers; prompt Disabling triggers for BRY... alter table BRY disable all triggers; prompt Disabling triggers for BZDBB... alter table BZDBB disable all triggers; prompt Disabling triggers for HCGJH... alter table HCGJH disable all triggers; prompt Disabling triggers for HHKJH... alter table HHKJH disable all triggers; prompt Disabling triggers for HHTXX... alter table HHTXX disable all triggers; prompt Disabling triggers for HJDJH... alter table HJDJH disable all triggers; prompt Disabling triggers for HKPJH... alter table HKPJH disable all triggers; prompt Disabling triggers for HXMJBXX... alter table HXMJBXX disable all triggers; prompt Disabling triggers for PMISUSER... alter table PMISUSER disable all triggers; prompt Disabling triggers for SBGX... alter table SBGX disable all triggers; prompt Disabling triggers for SBM... alter table SBM disable all triggers; prompt Disabling triggers for SCD... alter table SCD disable all triggers; prompt Disabling triggers for SJS... alter table SJS disable all triggers; prompt Disabling triggers for SCD_JS... alter table SCD_JS disable all triggers; prompt Disabling triggers for SDMK... alter table SDMK disable all triggers; prompt Disabling triggers for SFILTER... alter table SFILTER disable all triggers; prompt Disabling triggers for SGW... alter table SGW disable all triggers; prompt Disabling triggers for SJDJD... alter table SJDJD disable all triggers; prompt Disabling triggers for SKHXX... alter table SKHXX disable all triggers; prompt Disabling triggers for SWBDW... alter table SWBDW disable all triggers; prompt Disabling triggers for SXTYH... alter table SXTYH disable all triggers; prompt Disabling triggers for SXTYH_JS... alter table SXTYH_JS disable all triggers; prompt Disabling triggers for SYJFK... alter table SYJFK disable all triggers; prompt Disabling triggers for SZCPX... alter table SZCPX disable all triggers; prompt Disabling foreign key constraints for BCGJH... alter table BCGJH disable constraint FK_BCGJH_FK_CGYQ_H_BXMJBXX; prompt Disabling foreign key constraints for BHTXX... alter table BHTXX disable constraint FK_BHTXX_FK_HTXX_X_BXMJBXX; prompt Disabling foreign key constraints for BHKJH... alter table BHKJH disable constraint FK_BHKJH_FK_HKJH_H_BHTXX; alter table BHKJH disable constraint FK_BHKJH_FK_HKYQ_H_BXMJBXX; prompt Disabling foreign key constraints for BJDJH... alter table BJDJH disable constraint FK_BJDJH_FK_JDYQ_H_BXMJBXX; prompt Disabling foreign key constraints for BKPJH... alter table BKPJH disable constraint FK_BKPJH_FK_KPJH_H_BHTXX; alter table BKPJH disable constraint FK_BKPJH_FK_KPYQ_H_BXMJBXX; prompt Disabling foreign key constraints for BLWWBCGJH... alter table BLWWBCGJH disable constraint FK_BLWWBCGJ_REFERENCE_BXMJBXX; prompt Disabling foreign key constraints for SCD_JS... alter table SCD_JS disable constraint FK_SCD_JS_FK_CDJS_J_SJS; alter table SCD_JS disable constraint FK_SCD_JS_FK_YHJS_J_SCD; prompt Disabling foreign key constraints for SXTYH... alter table SXTYH disable constraint FK_SXTYH_FK_YH_BMI_SBM; alter table SXTYH disable constraint FK_SXTYH_FK_YH_WBD_SWBDW; prompt Disabling foreign key constraints for SXTYH_JS... alter table SXTYH_JS disable constraint FK_SXTYH_JS_FK_YHJS_J_SJS; alter table SXTYH_JS disable constraint FK_SXTYH_JS_FK_YHJS_Y_SXTYH; prompt Deleting SZCPX... delete from SZCPX; commit; prompt Deleting SYJFK... delete from SYJFK; commit; prompt Deleting SXTYH_JS... delete from SXTYH_JS; commit; prompt Deleting SXTYH... delete from SXTYH; commit; prompt Deleting SWBDW... delete from SWBDW; commit; prompt Deleting SKHXX... delete from SKHXX; commit; prompt Deleting SJDJD... delete from SJDJD; commit; prompt Deleting SGW... delete from SGW; commit; prompt Deleting SFILTER... delete from SFILTER; commit; prompt Deleting SDMK... delete from SDMK; commit; prompt Deleting SCD_JS... delete from SCD_JS; commit; prompt Deleting SJS... delete from SJS; commit; prompt Deleting SCD... delete from SCD; commit; prompt Deleting SBM... delete from SBM; commit; prompt Deleting SBGX... delete from SBGX; commit; prompt Deleting PMISUSER... delete from PMISUSER; commit; prompt Deleting HXMJBXX... delete from HXMJBXX; commit; prompt Deleting HKPJH... delete from HKPJH; commit; prompt Deleting HJDJH... delete from HJDJH; commit; prompt Deleting HHTXX... delete from HHTXX; commit; prompt Deleting HHKJH... delete from HHKJH; commit; prompt Deleting HCGJH... delete from HCGJH; commit; prompt Deleting BZDBB... delete from BZDBB; commit; prompt Deleting BRY... delete from BRY; commit; prompt Deleting BLWWBCGJH... delete from BLWWBCGJH; commit; prompt Deleting BLSXMCB... delete from BLSXMCB; commit; prompt Deleting BKPJH... delete from BKPJH; commit; prompt Deleting BJDJH... delete from BJDJH; commit; prompt Deleting BHKJH... delete from BHKJH; commit; prompt Deleting BHTXX... delete from BHTXX; commit; prompt Deleting BCGJH... delete from BCGJH; commit; prompt Deleting BXMJBXX... delete from BXMJBXX; commit; prompt Deleting BBGSHTHJL... delete from BBGSHTHJL; commit; prompt Deleting BBG... delete from BBG; commit; prompt Loading BBG... insert into BBG (BGID, BGLX, XMID, GZRQ, GZXS, GZDD, GZNR, BGR, BGSJ, ZT, SHR, SHSJ, SHXX, CZXTH, SJBZM) values ('20140324143631635001', '0003', '-1', to_date('24-03-2014', 'dd-mm-yyyy'), 8, 'tyu', 'dfrty', '20140227115137078017', to_timestamp('24-03-2014 14:36:31.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', null, null, null, 'czxth0001', 'sjbzm0001'); insert into BBG (BGID, BGLX, XMID, GZRQ, GZXS, GZDD, GZNR, BGR, BGSJ, ZT, SHR, SHSJ, SHXX, CZXTH, SJBZM) values ('20140324143655100002', '0003', '-1', to_date('24-03-2014', 'dd-mm-yyyy'), 8, 'tyu', 'dftyui', '20140227115137078017', to_timestamp('24-03-2014 14:36:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', null, null, null, 'czxth0001', 'sjbzm0001'); insert into BBG (BGID, BGLX, XMID, GZRQ, GZXS, GZDD, GZNR, BGR, BGSJ, ZT, SHR, SHSJ, SHXX, CZXTH, SJBZM) values ('20140324143739376003', '0003', '-1', to_date('24-03-2014', 'dd-mm-yyyy'), 8, 'tyu', 'dfgh', '20140227115137078017', to_timestamp('24-03-2014 14:37:39.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', null, null, null, 'czxth0001', 'sjbzm0001'); commit; prompt 3 records loaded prompt Loading BBGSHTHJL... prompt Table is empty prompt Loading BXMJBXX... insert into BXMJBXX (XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS) values ('20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113319687007', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '57', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()'); insert into BXMJBXX (XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS) values ('20140227141147015050', 'B34635140007', '国家电网客户服务部中心信息系统运维', '客服中心运维', '0006', '20140227113319687007', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '5', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维'); insert into BXMJBXX (XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS) values ('20140227141229531052', 'B34635140007', '国家电网客户服务部中心信息系统运维', '客服运维', '0006', '20140227113319687007', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '5', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维'); commit; prompt 3 records loaded prompt Loading BCGJH... insert into BCGJH (CGJHID, XMID, HWMC, XH, DW, JSTX, QTXX, SL, DJ, YQDHSJ) values ('20140227162103156089', '20140227141121765048', '笔记本电脑', 'EM6400', '台', '双核1g内存', '3年原厂维保,个人垫付全款的30%', 1, .8, to_date('07-03-2014', 'dd-mm-yyyy')); insert into BCGJH (CGJHID, XMID, HWMC, XH, DW, JSTX, QTXX, SL, DJ, YQDHSJ) values ('20140227162514593091', '20140227141121765048', '操作系统', 'win7', '套', '企业级正版带lisence', '维保人员上门安装,带配置好安全信息', 1, 1.2, to_date('06-03-2014', 'dd-mm-yyyy')); insert into BCGJH (CGJHID, XMID, HWMC, XH, DW, JSTX, QTXX, SL, DJ, YQDHSJ) values ('20140228110320203019', '20140227141121765048', '机架', '8U', '组', '黑色、金属材质', '提供一年维保', 1, .5, to_date('15-02-2014', 'dd-mm-yyyy')); commit; prompt 3 records loaded prompt Loading BHTXX... insert into BHTXX (HTID, XMID, HTMC, HTBH, HTQDSJ, HTE, ERPXMBH, HTLYJFSJ, HTLYJSSJ, KHMC, XSDDH, HTZT, SL, XSJL) values ('20140227143136859054', '20140227141121765048', '国网客服中心信息系统运维', '2014-01-01-yw001', to_date('01-01-2014', 'dd-mm-yyyy'), 48, 'B34635140007', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), '1', '500192876', 'YQD', 6, null); insert into BHTXX (HTID, XMID, HTMC, HTBH, HTQDSJ, HTE, ERPXMBH, HTLYJFSJ, HTLYJSSJ, KHMC, XSDDH, HTZT, SL, XSJL) values ('20140228103510578010', '20140227141121765048', '合同1', 'HT1223', null, 100, 'ERP123', null, null, '28', 'SX111', 'WQD', 6, null); commit; prompt 2 records loaded prompt Loading BHKJH... insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140227143852468065', '20140227141121765048', '20140227143136859054', '合同50%履约回款', '日期', to_date('01-07-2014', 'dd-mm-yyyy'), 60, '1'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140227143852531067', '20140227141121765048', '20140227143136859054', '合同后期履约回款', '日期', to_date('01-09-2014', 'dd-mm-yyyy'), 30, '1'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140227143852531069', '20140227141121765048', '20140227143136859054', '合同完整履约回款', '客户签订履约报告', to_date('31-12-2014', 'dd-mm-yyyy'), 10, '1'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140227143852546071', '20140227141121765048', '20140227143136859054', null, null, null, 0, '0'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140227143852546073', '20140227141121765048', '20140227143136859054', null, null, null, 0, '0'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140228111815609030', '20140227141121765048', '20140228103510578010', '立项阶段', '客户签订进场单', to_date('28-02-2014', 'dd-mm-yyyy'), 30, '1'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140228111815640032', '20140227141121765048', '20140228103510578010', '服务履约', '履约报告完成', to_date('01-11-2014', 'dd-mm-yyyy'), 60, '1'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140228111815656034', '20140227141121765048', '20140228103510578010', '验收', '项目验收报告', to_date('31-12-2014', 'dd-mm-yyyy'), 10, '1'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140228111815656036', '20140227141121765048', '20140228103510578010', null, null, null, 0, '0'); insert into BHKJH (HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT) values ('20140228111815656038', '20140227141121765048', '20140228103510578010', null, null, null, 0, '0'); commit; prompt 10 records loaded prompt Loading BJDJH... insert into BJDJH (JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ) values ('20140227144046000085', '20140227141121765048', '21', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('28-02-2014', 'dd-mm-yyyy'), null, null, '项目启动,资源进场。'); insert into BJDJH (JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ) values ('20140228103810187013', '20140227141121765048', '立项阶段', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-01-2014', 'dd-mm-yyyy'), null, null, '为运维项目立项及入场准备。' || chr(13) || '' || chr(10) || '客户签了入场单。'); insert into BJDJH (JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ) values ('20140228103902000015', '20140227141121765048', '服务实施', to_date('01-02-2014', 'dd-mm-yyyy'), to_date('31-10-2014', 'dd-mm-yyyy'), null, null, '对合同的服务进行履约实施。' || chr(13) || '' || chr(10) || '每个月的服务报告'); insert into BJDJH (JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ) values ('20140228103945250017', '20140227141121765048', '验收阶段', to_date('01-10-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '完成验收工作。' || chr(13) || '' || chr(10) || '客户签收验收单。'); commit; prompt 4 records loaded prompt Loading BKPJH... insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140227143657968057', '20140227141121765048', '20140227143136859054', '0002', 60, to_date('01-07-2014', 'dd-mm-yyyy'), '1'); insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140227143658031059', '20140227141121765048', '20140227143136859054', '0002', 30, to_date('01-09-2014', 'dd-mm-yyyy'), '1'); insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140227143658046061', '20140227141121765048', '20140227143136859054', '0002', 10, to_date('31-12-2014', 'dd-mm-yyyy'), '1'); insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140227143658046063', '20140227141121765048', '20140227143136859054', '0002', 0, null, '0'); insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140228111534484022', '20140227141121765048', '20140228103510578010', '0001', 60, to_date('01-06-2014', 'dd-mm-yyyy'), '1'); insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140228111534515024', '20140227141121765048', '20140228103510578010', '0001', 30, to_date('01-11-2014', 'dd-mm-yyyy'), '1'); insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140228111534515026', '20140227141121765048', '20140228103510578010', '0001', 10, to_date('31-12-2014', 'dd-mm-yyyy'), '1'); insert into BKPJH (KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT) values ('20140228111534515028', '20140227141121765048', '20140228103510578010', '0001', 0, null, '0'); commit; prompt 8 records loaded prompt Loading BLSXMCB... prompt Table is empty prompt Loading BLWWBCGJH... insert into BLWWBCGJH (LWWBCGJHID, XMID, YHID, GW, GWJB, FL, GZL, XQSL, KSSJ, JSSJ, JNYQ, BZ) values ('20140227163848000093', '20140227141121765048', '-1', '0020', '0003', 1500, 20, null, to_date('01-02-2014', 'dd-mm-yyyy'), to_date('20-02-2014', 'dd-mm-yyyy'), '架构设计', '性格内向,为人诚实'); insert into BLWWBCGJH (LWWBCGJHID, XMID, YHID, GW, GWJB, FL, GZL, XQSL, KSSJ, JSSJ, JNYQ, BZ) values ('20140227164156468095', '20140227141121765048', '-1', '0008', '0002', 2000, 20, null, to_date('01-02-2014', 'dd-mm-yyyy'), to_date('20-02-2014', 'dd-mm-yyyy'), null, null); insert into BLWWBCGJH (LWWBCGJHID, XMID, YHID, GW, GWJB, FL, GZL, XQSL, KSSJ, JSSJ, JNYQ, BZ) values ('20140227174401031100', '20140227141229531052', '-1', '0002', '0003', 123, 123, null, to_date('13-02-2014', 'dd-mm-yyyy'), to_date('28-02-2014', 'dd-mm-yyyy'), null, null); insert into BLWWBCGJH (LWWBCGJHID, XMID, YHID, GW, GWJB, FL, GZL, XQSL, KSSJ, JSSJ, JNYQ, BZ) values ('20140228111038062021', '20140227141121765048', '-1', '0008', '0003', 2000, 100, null, to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), 'ITIL服务经理资质', '五年工作经验,211大学毕业。'); insert into BLWWBCGJH (LWWBCGJHID, XMID, YHID, GW, GWJB, FL, GZL, XQSL, KSSJ, JSSJ, JNYQ, BZ) values ('20140228114150031041', '20140227141229531052', '20140228114002500040', '0003', '0002', 0, 20, 1, to_date('01-02-2014', 'dd-mm-yyyy'), to_date('20-02-2014', 'dd-mm-yyyy'), null, null); commit; prompt 5 records loaded prompt Loading BRY... insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20140227130522375021', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20140227130455515019', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20140227130508390020', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20140227130809125022', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20140227130824468023', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20140227130834828024', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20140228114002500040', null, '0003', 0, '0002'); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('3', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('4', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('5', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('6', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('1', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('2', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('7', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('8', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('9', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('10', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('11', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('12', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('13', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('14', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('15', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('16', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('17', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('18', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('19', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('20', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('21', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('22', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('23', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('24', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('25', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('26', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('27', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('28', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('29', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('30', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('31', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('32', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('33', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('34', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('35', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('36', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('37', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('38', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('39', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('40', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('41', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('42', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('43', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('44', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('45', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('46', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('47', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('48', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('49', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('50', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('51', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('52', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('53', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('54', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('55', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('56', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('57', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('58', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('59', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('60', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('61', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('62', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('63', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('64', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('65', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('66', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('67', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('68', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('69', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('70', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('71', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('72', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('73', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('74', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('75', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('76', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('77', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('78', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('79', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('80', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('81', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('82', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('83', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('84', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('85', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('86', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('87', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('88', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('89', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('90', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('91', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('92', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('93', null, null, null, null); commit; prompt 100 records committed... insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('94', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('95', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('96', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('97', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('98', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('99', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('100', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('101', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('102', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('103', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('104', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('105', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('106', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('107', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('108', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('109', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('110', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('111', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('112', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('113', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('114', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('115', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('116', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('117', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('118', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('119', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('120', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('121', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('122', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('123', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('124', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('125', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('126', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('127', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('128', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('129', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('130', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('131', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('132', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('133', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('134', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('135', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('136', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('137', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('138', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('139', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('140', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('141', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('142', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('143', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('144', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('145', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('146', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('147', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('148', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('149', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('150', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('151', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('152', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('153', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('154', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('155', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('156', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('157', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('158', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('159', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('160', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('161', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('162', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('163', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('164', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('165', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('166', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('167', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('168', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('169', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('170', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('171', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('172', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('173', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('174', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('175', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('176', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('177', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('178', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('179', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('180', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('181', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('182', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('183', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('184', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('185', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('186', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('187', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('188', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('189', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('190', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('191', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('192', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('193', null, null, null, null); commit; prompt 200 records committed... insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('194', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('195', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('196', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('197', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('198', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('199', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('200', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('201', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('202', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('203', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('204', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('205', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('206', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('207', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('208', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('209', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('210', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('211', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('212', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('213', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('214', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('215', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('216', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('217', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('218', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('219', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('220', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('221', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('222', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('223', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('224', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('225', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('226', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('227', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('228', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('229', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('230', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('231', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('232', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('233', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('234', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('235', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('236', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('237', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('238', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('239', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('240', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('241', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('242', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('243', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('244', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('245', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('246', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('247', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('248', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('249', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('250', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('251', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('252', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('253', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('254', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('255', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('256', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('257', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('258', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('259', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('260', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('261', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('262', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('263', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('264', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('265', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('266', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('267', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('268', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('269', null, null, 0, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('270', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('271', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('272', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('273', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('274', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('275', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('276', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('277', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('278', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('279', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('280', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('281', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('282', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('283', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('284', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('285', null, null, null, null); insert into BRY (YHID, SJHM, GW, FL, GWJB) values ('286', null, null, null, null); commit; prompt 293 records loaded prompt Loading BZDBB... insert into BZDBB (ZDBBID, PTLX, BBH, WJDX) values (1, 'iphone', '1.1.1.051021_beta', '0.556'); insert into BZDBB (ZDBBID, PTLX, BBH, WJDX) values (2, 'android', '1.1.1.051021_beta', '0.774'); commit; prompt 2 records loaded prompt Loading HCGJH... insert into HCGJH (CGJHHJID, CGJHID, XMID, HWMC, XH, DW, JSTX, QTXX, SL, DJ, YQDHSJ, CZR, CZSJ, CZLX) values ('20140227162103203090', '20140227162103156089', '20140227141121765048', '笔记本电脑', 'EM6400', '台', '双核1g内存', '3年原厂维保,个人垫付全款的30%', 1, .8, to_date('07-03-2014', 'dd-mm-yyyy'), '1', to_date('27-02-2014 16:21:03', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HCGJH (CGJHHJID, CGJHID, XMID, HWMC, XH, DW, JSTX, QTXX, SL, DJ, YQDHSJ, CZR, CZSJ, CZLX) values ('20140227162514609092', '20140227162514593091', '20140227141121765048', '操作系统', 'win7', '套', '企业级正版带lisence', '维保人员上门安装,带配置好安全信息', 1, 1.2, to_date('06-03-2014', 'dd-mm-yyyy'), '1', to_date('27-02-2014 16:25:14', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HCGJH (CGJHHJID, CGJHID, XMID, HWMC, XH, DW, JSTX, QTXX, SL, DJ, YQDHSJ, CZR, CZSJ, CZLX) values ('20140228092306390004', '20140227162514593091', '20140227141121765048', '操作系统', 'win7', '套', '企业级正版带lisence', '维保人员上门安装,带配置好安全信息', 1, 1.2, to_date('06-03-2014', 'dd-mm-yyyy'), '1', to_date('28-02-2014 09:23:06', 'dd-mm-yyyy hh24:mi:ss'), '1'); insert into HCGJH (CGJHHJID, CGJHID, XMID, HWMC, XH, DW, JSTX, QTXX, SL, DJ, YQDHSJ, CZR, CZSJ, CZLX) values ('20140228110320218020', '20140228110320203019', '20140227141121765048', '机架', '8U', '组', '黑色、金属材质', '提供一年维保', 1, .5, to_date('15-02-2014', 'dd-mm-yyyy'), '217', to_date('28-02-2014 11:03:20', 'dd-mm-yyyy hh24:mi:ss'), '0'); commit; prompt 4 records loaded prompt Loading HHKJH... insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140227143852515066', '20140227143852468065', '20140227141121765048', '20140227143136859054', '合同50%履约回款', '日期', to_date('01-07-2014', 'dd-mm-yyyy'), 60, '1', '1', '0', to_timestamp('27-02-2014 14:38:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140227143852531068', '20140227143852531067', '20140227141121765048', '20140227143136859054', '合同后期履约回款', '日期', to_date('01-09-2014', 'dd-mm-yyyy'), 30, '1', '1', '0', to_timestamp('27-02-2014 14:38:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140227143852546070', '20140227143852531069', '20140227141121765048', '20140227143136859054', '合同完整履约回款', '客户签订履约报告', to_date('31-12-2014', 'dd-mm-yyyy'), 10, '1', '1', '0', to_timestamp('27-02-2014 14:38:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140227143852546072', '20140227143852546071', '20140227141121765048', '20140227143136859054', null, null, null, 0, '0', '1', '0', to_timestamp('27-02-2014 14:38:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140227143852546074', '20140227143852546073', '20140227141121765048', '20140227143136859054', null, null, null, 0, '0', '1', '0', to_timestamp('27-02-2014 14:38:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140228111815640031', '20140228111815609030', '20140227141121765048', '20140228103510578010', '立项阶段', '客户签订进场单', to_date('28-02-2014', 'dd-mm-yyyy'), 30, '1', '217', '0', to_timestamp('28-02-2014 11:18:15.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140228111815656033', '20140228111815640032', '20140227141121765048', '20140228103510578010', '服务履约', '履约报告完成', to_date('01-11-2014', 'dd-mm-yyyy'), 60, '1', '217', '0', to_timestamp('28-02-2014 11:18:15.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140228111815656035', '20140228111815656034', '20140227141121765048', '20140228103510578010', '验收', '项目验收报告', to_date('31-12-2014', 'dd-mm-yyyy'), 10, '1', '217', '0', to_timestamp('28-02-2014 11:18:15.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140228111815656037', '20140228111815656036', '20140227141121765048', '20140228103510578010', null, null, null, 0, '0', '217', '0', to_timestamp('28-02-2014 11:18:15.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HHKJH (HKJHHJID, HKJHID, XMID, HTID, HKJDMC, HKQDBZJNR, JHSJ, HKBL, ZT, CZR, CZLX, CZSJ) values ('20140228111815671039', '20140228111815656038', '20140227141121765048', '20140228103510578010', null, null, null, 0, '0', '217', '0', to_timestamp('28-02-2014 11:18:15.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); commit; prompt 10 records loaded prompt Loading HHTXX... insert into HHTXX (HTHJID, HTID, XMID, HTMC, HTBH, HTQDSJ, HTE, ERPXMBH, HTLYJFSJ, HTLYJSSJ, KHMC, CZR, CZSJ, CZLX, XSDDH, HTZT) values ('20140227143136875055', '20140227143136859054', '20140227141121765048', '国网客服中心信息系统运维', '2014-01-01-yw001', to_date('01-01-2014', 'dd-mm-yyyy'), 48, 'B34635140007', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), '1', '1', to_date('27-02-2014 14:31:36', 'dd-mm-yyyy hh24:mi:ss'), '0', '500192876', 'YQD'); insert into HHTXX (HTHJID, HTID, XMID, HTMC, HTBH, HTQDSJ, HTE, ERPXMBH, HTLYJFSJ, HTLYJSSJ, KHMC, CZR, CZSJ, CZLX, XSDDH, HTZT) values ('20140228103510593011', '20140228103510578010', '20140227141121765048', '合同1', 'HT1223', null, 100, 'ERP123', null, null, '28', '217', to_date('28-02-2014 10:35:10', 'dd-mm-yyyy hh24:mi:ss'), '0', 'SX111', 'YQD'); insert into HHTXX (HTHJID, HTID, XMID, HTMC, HTBH, HTQDSJ, HTE, ERPXMBH, HTLYJFSJ, HTLYJSSJ, KHMC, CZR, CZSJ, CZLX, XSDDH, HTZT) values ('20140228103519015012', '20140228103510578010', null, '合同1', 'HT1223', null, 100, 'ERP123', null, null, '28', '217', to_date('28-02-2014 10:35:19', 'dd-mm-yyyy hh24:mi:ss'), '1', 'SX111', 'WQD'); commit; prompt 3 records loaded prompt Loading HJDJH... insert into HJDJH (JDJHHJID, JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ, CZR, CZLX, CZSJ) values ('20140227144046046086', '20140227144046000085', '20140227141121765048', '21', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('28-02-2014', 'dd-mm-yyyy'), null, null, '项目启动,资源进场。', '1', '0', to_timestamp('27-02-2014 14:40:46.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HJDJH (JDJHHJID, JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ, CZR, CZLX, CZSJ) values ('20140228092301906003', '20140227144046000085', '20140227141121765048', '21', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('28-02-2014', 'dd-mm-yyyy'), null, null, '项目启动,资源进场。', '1', '1', to_timestamp('28-02-2014 09:23:01.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HJDJH (JDJHHJID, JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ, CZR, CZLX, CZSJ) values ('20140228103810187014', '20140228103810187013', '20140227141121765048', '立项阶段', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-01-2014', 'dd-mm-yyyy'), null, null, '为运维项目立项及入场准备。' || chr(13) || '' || chr(10) || '客户签了入场单。', '217', '0', to_timestamp('28-02-2014 10:38:10.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HJDJH (JDJHHJID, JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ, CZR, CZLX, CZSJ) values ('20140228103902000016', '20140228103902000015', '20140227141121765048', '服务实施', to_date('01-02-2014', 'dd-mm-yyyy'), to_date('31-10-2014', 'dd-mm-yyyy'), null, null, '对合同的服务进行履约实施。' || chr(13) || '' || chr(10) || '每个月的服务报告', '217', '0', to_timestamp('28-02-2014 10:39:02.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into HJDJH (JDJHHJID, JDJHID, XMID, JDMC, JHKSSJ, JHJSSJ, SJKSSJ, SJJSSJ, GZNRJBZ, CZR, CZLX, CZSJ) values ('20140228103945265018', '20140228103945250017', '20140227141121765048', '验收阶段', to_date('01-10-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '完成验收工作。' || chr(13) || '' || chr(10) || '客户签收验收单。', '217', '0', to_timestamp('28-02-2014 10:39:45.000000', 'dd-mm-yyyy hh24:mi:ss.ff')); commit; prompt 5 records loaded prompt Loading HKPJH... insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140227143658015058', '20140227143657968057', '20140227141121765048', '20140227143136859054', '0002', 60, to_date('01-07-2014', 'dd-mm-yyyy'), '1', '1', to_date('27-02-2014 14:36:58', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140227143658046060', '20140227143658031059', '20140227141121765048', '20140227143136859054', '0002', 30, to_date('01-09-2014', 'dd-mm-yyyy'), '1', '1', to_date('27-02-2014 14:36:58', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140227143658046062', '20140227143658046061', '20140227141121765048', '20140227143136859054', '0002', 10, to_date('31-12-2014', 'dd-mm-yyyy'), '1', '1', to_date('27-02-2014 14:36:58', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140227143658046064', '20140227143658046063', '20140227141121765048', '20140227143136859054', '0002', 0, null, '0', '1', to_date('27-02-2014 14:36:58', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140228111534500023', '20140228111534484022', '20140227141121765048', '20140228103510578010', '0001', 60, to_date('01-06-2014', 'dd-mm-yyyy'), '1', '217', to_date('28-02-2014 11:15:34', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140228111534515025', '20140228111534515024', '20140227141121765048', '20140228103510578010', '0001', 30, to_date('01-11-2014', 'dd-mm-yyyy'), '1', '217', to_date('28-02-2014 11:15:34', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140228111534515027', '20140228111534515026', '20140227141121765048', '20140228103510578010', '0001', 10, to_date('31-12-2014', 'dd-mm-yyyy'), '1', '217', to_date('28-02-2014 11:15:34', 'dd-mm-yyyy hh24:mi:ss'), '0'); insert into HKPJH (KPJHHJID, KPJHID, XMID, HTID, KPLX, BL, JHSJ, ZT, CZR, CZSJ, CZLX) values ('20140228111534515029', '20140228111534515028', '20140227141121765048', '20140228103510578010', '0001', 0, null, '0', '217', to_date('28-02-2014 11:15:34', 'dd-mm-yyyy hh24:mi:ss'), '0'); commit; prompt 8 records loaded prompt Loading HXMJBXX... insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228095642906008', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '57', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('28-02-2014 09:56:42.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228095709328009', '20140227141147015050', 'B34635140007', '国家电网客户服务部中心信息系统运维', '客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '5', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维', '1', to_timestamp('28-02-2014 09:57:09.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227141121781049', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维', '217', to_timestamp('27-02-2014 14:11:21.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227141147015051', '20140227141147015050', 'B34635140007', '国家电网客户服务部中心信息系统运维', '客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维', '217', to_timestamp('27-02-2014 14:11:47.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227141229531053', '20140227141229531052', 'B34635140007', '国家电网客户服务部中心信息系统运维', '客服运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维', '217', to_timestamp('27-02-2014 14:12:29.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227143513703056', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维', '1', to_timestamp('27-02-2014 14:35:13.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227160122687087', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('27-02-2014 16:01:22.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227160133906088', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('27-02-2014 16:01:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227163944875094', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('27-02-2014 16:39:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227164325234096', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('27-02-2014 16:43:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227164332781097', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('27-02-2014 16:43:32.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227164400437098', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('27-02-2014 16:44:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140227172611625099', '20140227141229531052', 'B34635140007', '国家电网客户服务部中心信息系统运维', '客服运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '1', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维', '1', to_timestamp('27-02-2014 17:26:11.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228092059187000', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '7', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('28-02-2014 09:20:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228092101640001', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '7', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('28-02-2014 09:21:01.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228092103562002', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '7', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('28-02-2014 09:21:03.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228094948281005', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '7', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('28-02-2014 09:49:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228095039843006', '20140227141121765048', 'B34635140007', '国家电网客户服务部中心信息系统运维', '国网客服中心运维', '0006', '20140227113402953012', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '7', '王旭东', '11111111111', 29, 12, 31.9, 8, '0', '0001', '国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()国家电网客户服务部中心信息系统运维,。、;~!@#¥%……&*()', '1', to_timestamp('28-02-2014 09:50:39.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); insert into HXMJBXX (XMHJID, XMID, XMBH, XMMC, XMJC, XMLX, ZRBM, XMJL, CPX, ZCPX, KSRQ, JSRQ, XSFZR, SQGCS, KHMC, KHFZR, LXFS, YJRS, GQ, RGCB, QTJJFY, SFXCP, XMJB, XMJS, CZR, CZSJ, CZLX) values ('20140228095632218007', '20140227141229531052', 'B34635140007', '国家电网客户服务部中心信息系统运维', '客服运维', '0006', '20140227113319687007', '231', '0004', '18', to_date('01-01-2014', 'dd-mm-yyyy'), to_date('31-12-2014', 'dd-mm-yyyy'), null, null, '5', '王旭东', '11111111111', 5, 12, 100, 20, '0', '0001', '国家电网客户服务部中心信息系统运维', '1', to_timestamp('28-02-2014 09:56:32.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1'); commit; prompt 19 records loaded prompt Loading PMISUSER... prompt Table is empty prompt Loading SBGX... insert into SBGX (BGXID, BGXMC, GLBM, ZT) values ('0003', '技术支持', '电力事业部', '0'); insert into SBGX (BGXID, BGXMC, GLBM, ZT) values ('0001', '参与项目', '电力事业部', '1'); insert into SBGX (BGXID, BGXMC, GLBM, ZT) values ('0002', '综合事务', '电力事业部', '0'); insert into SBGX (BGXID, BGXMC, GLBM, ZT) values ('0004', '营销服务', '电力事业部', '0'); commit; prompt 4 records loaded prompt Loading SBM... insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113212859000', '综合办公室', null, null, '1', null, null, '0001'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113221250001', '人资部', null, null, '1', null, null, '0002'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113229453002', '财资部', null, null, '1', null, null, '0003'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113240546003', '物资部', null, null, '1', null, null, '0004'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113250843004', '项目管理办公室', null, null, '1', null, '19', '0005'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113302125005', '电力事业部', null, null, '1', null, null, '0006'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113311562006', '系统集成部', null, '0001', '1', null, null, '0007'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113319687007', '运维事业部', null, null, '1', null, '193', '0008'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113326906008', 'ERP事业部', null, null, '1', null, null, '0009'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113335859009', '软件工程部', null, null, '1', null, null, '0010'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113344906010', '研发部', null, null, '1', null, null, '0011'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113354843011', '网通事业部', null, null, '1', null, null, '0012'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113402953012', '产品制造部', null, null, '1', null, '263', '0013'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113412656013', 'GIS事业部', null, null, '1', null, null, '0014'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113420078014', '瑞远公司', null, null, '1', null, null, '0015'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113428000015', '营销部', null, null, '1', null, null, '0016'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227113435921016', '拓展事业部', null, null, '1', null, null, '0017'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227123210234018', '北京信息技术项目部', null, null, '1', null, null, '0018'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131119156025', '文秘分部', null, null, '1', null, null, '00010001'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131134140026', '党群分部', null, null, '1', null, null, '00010002'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131147453027', '物业分部', null, null, '1', null, null, '00010003'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131223625028', '综合管理中心', null, null, '1', null, null, '00160001'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131236750029', '销售一中心', null, null, '1', null, null, '00160002'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131249890030', '销售二中心', null, null, '1', null, null, '00160003'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131305625031', '销售三中心', null, null, '1', null, null, '00160004'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131320562032', '技术支持中心', null, null, '1', null, null, '00160005'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131348671033', '设备服务中心', null, null, '1', null, null, '00090001'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131359390034', '财务服务中心', null, null, '1', null, null, '00090002'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131418625035', '技术服务中心', null, null, '1', null, null, '00090003'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131451968036', '物资服务中心', null, null, '1', null, null, '00090004'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131508250037', '人资服务中心', null, null, '1', null, null, '00090005'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131542593038', '项目服务中心', null, null, '1', null, null, '00090006'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131628015039', '集成服务中心', null, null, '1', null, null, '00070001'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('20140227131650015040', '产品方案中心', null, null, '1', null, null, '00070002'); insert into SBM (BMID, BMMC, BMJC, BMLX, ZT, BZ, BMZR, BMBH) values ('1', '其它', '其它', '1', '1', null, null, '9999'); commit; prompt 35 records loaded prompt Loading SCD... insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140224151845687000', '0002', '报工管理', 'CDJ', null, 1, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140226234056859002', '00020004', '统计报工', 'CDX', '/bui/bggl/tjbg.do?doAction=toTjbgList', 4, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140224173512656011', '00020002', '我的报工', 'CDX', '/bui/bggl/wdbg.do?doAction=toWdbgList', 2, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140224173532484012', '00020003', '审核报工', 'CDX', '/bui/bggl/shbg.do?doAction=toShbgList', 3, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140216101935403002', '99990005', '代码库管理', 'CDX', '/sys/xtgl/dmk.do?doAction=toDmkList', 5, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('1', '9999', '系统管理', 'CDJ', null, 3, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('2', '99990001', '部门管理', 'CDX', '/sys/xtgl/bm.do?doAction=toBmList', 1, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('4', '99990003', '角色管理', 'CDX', '/sys/xtgl/js.do?doAction=toJsList', 3, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('3', '99990002', '用户管理', 'CDX', '/sys/xtgl/xtyh.do?doAction=toXtyhList', 2, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('5', '99990004', '菜单管理', 'CDX', '/sys/xtgl/cd.do?doAction=toCdListWrapper', 4, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140216153051753004', '0001', '项目计划管理', 'CDJ', null, 2, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140216153310872006', '00010001', '项目信息管理', 'CDX', '/bui/xmqdgl/xmjbxx.do?doAction=toXmjbxxList', 1, null, '1'); insert into SCD (CDID, CDBH, CDMC, CDLX, FWLJ, PX, BZ, ZT) values ('20140224155939864000', '99990006', '历史项目维护', 'CDX', '/bui/xmqdgl/xmjbxx.do?doAction=toLsxmUpdate', 6, null, '1'); commit; prompt 13 records loaded prompt Loading SJS... insert into SJS (JSID, JSMC, JSMS, ZT) values ('20140227135811984041', '配置管理员', '配置管理', '1'); insert into SJS (JSID, JSMC, JSMS, ZT) values ('20140227135825375042', '职员', null, '1'); insert into SJS (JSID, JSMC, JSMS, ZT) values ('1', '管理员角色', null, '1'); commit; prompt 3 records loaded prompt Loading SCD_JS... insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140312105720334008', '20140227135811984041', '20140224151845687000'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140312105720350009', '20140227135811984041', '20140224173532484012'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140312105720350010', '20140227135811984041', '20140216153051753004'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140312105720350011', '20140227135811984041', '20140216153310872006'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140312105720350012', '20140227135811984041', '1'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140312105720366013', '20140227135811984041', '3'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912546028', '1', '20140224151845687000'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912546029', '1', '20140224173532484012'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912546030', '1', '20140216153051753004'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912546031', '1', '20140216153310872006'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912546032', '1', '1'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912546033', '1', '2'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912562034', '1', '3'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912562035', '1', '4'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912562036', '1', '5'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912562037', '1', '20140216101935403002'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303120912562038', '1', '20140224155939864000'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303121214796000', '20140227135825375042', '20140224151845687000'); insert into SCD_JS (CD_JSID, JSID, CDID) values ('20140303121214796001', '20140227135825375042', '20140224173532484012'); commit; prompt 19 records loaded prompt Loading SDMK... insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140216101636275000', '0001', 'BMLX', '事业部', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140216101715491001', '0002', 'BMLX', '营销部', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140216102425145011', '0003', 'BMLX', '采购部', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140216102432347012', '0004', 'BMLX', '财务部', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140216102437128013', '0005', 'BMLX', '其它', '给超级管理员用', '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115409822000', '0001', 'XMLX', '软件开发', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115419747001', '0002', 'XMLX', '系统集成', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115430601002', '0003', 'XMLX', '系统实施', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115440009003', '0004', 'XMLX', '信息运维', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115448831004', '0005', 'XMLX', '软件评测', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115458504005', '0006', 'XMLX', '产品制造', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115511019006', '0007', 'XMLX', '简单销售', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115520508007', '0008', 'XMLX', '信息监理', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115531882008', '0009', 'XMLX', '科技研究', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115540947009', '0010', 'XMLX', '技术服务', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115552628010', '0001', 'CPX', '信息管理', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115601850011', '0002', 'CPX', '企业信息化', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115609733012', '0003', 'CPX', '信息安全', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115618116013', '0004', 'CPX', '信息系统集成及运维', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115626109014', '0005', 'CPX', '信息通信综合监管', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115634298015', '0006', 'CPX', '通信系统及通信设备', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115643205016', '0007', 'CPX', '通信运营管理及系统集成', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115656013017', '0001', 'XMJB', 'A级', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115706768018', '0002', 'XMJB', 'B级', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115719238019', '0003', 'XMJB', 'C级', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140217115725714020', '0004', 'XMJB', 'D级', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140225160251375000', '0001', 'BGLX', '参与项目', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140225160521625001', '0002', 'BGLX', '综合事务', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140225160531015002', '0003', 'BGLX', '技术支持', null, '1'); insert into SDMK (DMID, DMBH, DMFL, DMMC, DMMX, ZT) values ('20140225160541203003', '0004', 'BGLX', '营销服务', null, '1'); commit; prompt 30 records loaded prompt Loading SFILTER... insert into SFILTER (FILTERID, URI) values ('1', '/sys/login.do'); commit; prompt 1 records loaded prompt Loading SGW... prompt Table is empty prompt Loading SJDJD... insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('1', '0001', '立项实施'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('2', '0001', '项目上线'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('3', '0001', '项目验收'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('4', '0001', '项目结项'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('5', '0002', '立项实施'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('6', '0002', '项目上线'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('7', '0002', '项目验收'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('8', '0002', '项目结项'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('9', '0003', '立项实施'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('10', '0003', '测试完成'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('11', '0003', '服务确认'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('12', '0003', '项目结项'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('13', '0004', '立项实施'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('14', '0004', '项目上线'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('15', '0004', '项目验收'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('16', '0004', '项目结项'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('17', '0005', '立项实施'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('18', '0005', '项目上线'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('19', '0005', '项目验收'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('20', '0005', '项目结项'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('21', '0006', '立项实施'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('22', '0007', '立项实施'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('23', '0007', '生产完成'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('24', '0007', '完全供货'); insert into SJDJD (JDJDID, XMLX, JDJDMC) values ('25', '0007', '项目结项'); commit; prompt 25 records loaded prompt Loading SKHXX... insert into SKHXX (KHID, KHMC) values ('3', '滁州滁源物资销售有限公司'); insert into SKHXX (KHID, KHMC) values ('5', '安徽省送变电工程公司'); insert into SKHXX (KHID, KHMC) values ('8', '安徽继远电网技术有限责任公司'); insert into SKHXX (KHID, KHMC) values ('13', '福建省电力物资有限公司'); insert into SKHXX (KHID, KHMC) values ('24', '安徽电力贵池供电有限责任公司'); insert into SKHXX (KHID, KHMC) values ('28', '安徽明生电力发展投资集团有限公司'); insert into SKHXX (KHID, KHMC) values ('57', '安徽省电力公司(物流服务中心)'); insert into SKHXX (KHID, KHMC) values ('94', '安徽省电力公司宣城供电公司'); commit; prompt 8 records loaded prompt Loading SWBDW... insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('-1', null, null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('1', '上海博辕信息技术服务有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('2', '合肥正恩信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('3', '安徽金谷传媒科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('4', '合肥盛鹏软件有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('5', '合肥瑞智电力电子有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('6', '北京柯莱特科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('7', '合肥安风德信息技术有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('8', '武汉纬创纬尊', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('9', '国网信通亿力科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('10', '合肥格瑞特信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('11', '合肥达力信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('12', '合肥华宇随身软件技术有限公司(合肥华泓)', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('13', '沈阳鸿雨青山', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('14', '合肥永华网络科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('15', '合肥智同', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('16', '合肥和顺信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('17', '四川华雁信息产业股份有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('18', '合肥华宜信息技术有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('19', '合肥智畅信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('20', '合肥易格网络', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('21', '合肥朗珺信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('22', '合肥英泽', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('23', '南京异构科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('24', '武汉鼎元', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('25', '上海宏力达信息技术有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('26', '合肥智合科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('27', '上海新炬网络技术有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('28', '中科软科技股份有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('29', '上海格蒂电力科技股份有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('30', '合肥微卓信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('31', '江西双源电力高新技术有限责任公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('32', '安徽谷德', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('33', '安徽飞骏科技', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('34', '杭州天元', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('35', '合肥迪知', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('36', '博雅软件股份有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('37', '山东鲁能软件技术有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('38', '福建畅云安鼎信息科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('39', '合肥龙旭网络', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('40', 'IBM', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('41', '黑龙江傲立信息产业有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('42', '安徽玑成', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('43', '安徽鸿诚', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('44', '广州佳众联科技有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('45', '上海远博', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('46', '普元信息技术股份有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('47', '联迪恒星(南京)信息系统有限公司', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('48', '杭州南悦', null, null, null); insert into SWBDW (WBDWID, WBDWMC, LXR, LXDH, LXDZ) values ('49', '陕西电力信通公司', null, null, null); commit; prompt 50 records loaded prompt Loading SXTYH... insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('42', '20140227113435921016', '-1', '-1', '徐亮', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('43', '20140227113435921016', '-1', '-1', '叶祥龙', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('44', '20140227113435921016', '-1', '-1', '吴巍', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('45', '20140227113435921016', '-1', '-1', '陶涛', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('46', '20140227113435921016', '-1', '-1', '龚涛', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('47', '20140227113435921016', '-1', '-1', '王泽华', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('48', '20140227113435921016', '-1', '-1', '徐凯', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('49', '20140227113435921016', '-1', '-1', '叶玉兰', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('50', '20140227113435921016', '-1', '-1', '陈澍', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('51', '20140227123210234018', '-1', '-1', '陈瑞祥', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('52', '20140227123210234018', '-1', '-1', '孙镇', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('53', '20140227123210234018', '-1', '-1', '曹冬芳', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('54', '20140227123210234018', '-1', '-1', '任新星', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('55', '20140227123210234018', '-1', '-1', '柳绢花', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('56', '20140227123210234018', '-1', '-1', '舒雯', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('57', '20140227123210234018', '-1', '-1', '袁园', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('58', '20140227113326906008', '-1', '-1', '谢科军', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('59', '20140227113326906008', '-1', '-1', '徐海青', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('60', '20140227113326906008', '-1', '-1', '陈子浩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('61', '20140227113326906008', '-1', '-1', '金琴', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('62', '20140227113335859009', '-1', '-1', '刘才华', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('63', '20140227113335859009', '-1', '-1', '王慧', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('64', '20140227113335859009', '-1', '-1', '郑磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('65', '20140227113335859009', '-1', '-1', '胡传胜', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('66', '20140227113335859009', '-1', '-1', '乔雄', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('67', '20140227113335859009', '-1', '-1', '李郭闫', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('68', '20140227113335859009', '-1', '-1', '张传军', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('69', '20140227113335859009', '-1', '-1', '汪少成', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('70', '20140227113335859009', '-1', '-1', '赵志国', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('71', '20140227113335859009', '-1', '-1', '尚守卫', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('72', '20140227113335859009', '-1', '-1', '李志', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('73', '20140227113335859009', '-1', '-1', '程琳', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('74', '20140227113335859009', '-1', '-1', '慈龙生', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('75', '20140227113335859009', '-1', '-1', '陈圣刚', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('76', '20140227113335859009', '-1', '-1', '李昂', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('77', '20140227113335859009', '-1', '-1', '邢栋', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('78', '20140227113335859009', '-1', '-1', '杨德胜', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('79', '20140227113335859009', '-1', '-1', '洪薇', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('80', '20140227113335859009', '-1', '-1', '陈磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('81', '20140227113335859009', '-1', '-1', '吴红侠', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('86', '20140227113335859009', '-1', '-1', '林胜', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('87', '20140227113335859009', '-1', '-1', '郑斌', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('88', '20140227113344906010', '-1', '-1', '陈峰', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('89', '20140227113344906010', '-1', '-1', '范叶平', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('90', '20140227113344906010', '-1', '-1', '贾林', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('91', '20140227113344906010', '-1', '-1', '孔伟伟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('92', '20140227113344906010', '-1', '-1', '张华强', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('93', '20140227113344906010', '-1', '-1', '袁睿智', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('94', '20140227113344906010', '-1', '-1', '郭瑞祥', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('95', '20140227113344906010', '-1', '-1', '姚庆生', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('96', '20140227113344906010', '-1', '-1', '周盼', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('97', '20140227113344906010', '-1', '-1', '徐远远', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('98', '20140227113344906010', '-1', '-1', '凌武凤', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('99', '20140227113302125005', '-1', '-1', '周逞', '96E79218965EB72C92A549DD5A330112', '1', '部门主任', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('100', '20140227113302125005', '-1', '-1', '汪春燕', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('101', '20140227113302125005', '-1', '-1', '鲍娟娟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('102', '20140227113302125005', '-1', '-1', '吴金淦', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('103', '20140227113302125005', '-1', '-1', '吴立刚', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('104', '20140227113302125005', '-1', '-1', '杨凡', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('105', '20140227113302125005', '-1', '-1', '高杨', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('106', '20140227113302125005', '-1', '-1', '李磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('107', '20140227113302125005', '-1', '-1', '李闯', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('108', '20140227113302125005', '-1', '-1', '沈放', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('109', '20140227113302125005', '-1', '-1', '丁波', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('110', '20140227113302125005', '-1', '-1', '汪君毅', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('111', '20140227113302125005', '-1', '-1', '万祥虎', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('112', '20140227113302125005', '-1', '-1', '郭刚刚', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('113', '20140227113302125005', '-1', '-1', '董飞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('114', '20140227113302125005', '-1', '-1', '周伟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('115', '20140227113302125005', '-1', '-1', '牛志', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('116', '20140227113302125005', '-1', '-1', '郭振', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('117', '20140227113302125005', '-1', '-1', '王超', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('118', '20140227113302125005', '-1', '-1', '高超', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('119', '20140227113302125005', '-1', '-1', '施乃恩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('120', '20140227113302125005', '-1', '-1', '张冠军', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('121', '20140227113302125005', '-1', '-1', '王维佳', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('122', '20140227113302125005', '-1', '-1', '宋飞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('123', '20140227113302125005', '-1', '-1', '章玉龙', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('124', '20140227113302125005', '-1', '-1', '冯锐涛', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('125', '20140227113302125005', '-1', '-1', '曹斯彤', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('126', '20140227113302125005', '-1', '-1', '刘祥', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('127', '20140227113302125005', '-1', '-1', '徐殿洋', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('128', '20140227113302125005', '-1', '-1', '秦婷', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('129', '20140227113302125005', '-1', '-1', '余丽', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('130', '20140227113302125005', '-1', '-1', '刘啸', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('131', '20140227113302125005', '-1', '-1', '廖祥苏', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('132', '20140227113302125005', '-1', '-1', '夏丽丽', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('133', '20140227113302125005', '-1', '-1', '李杨月', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('134', '20140227113311562006', '-1', '-1', '杜林', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('135', '20140227113311562006', '-1', '-1', '秦浩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('136', '20140227113311562006', '-1', '-1', '陈真', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('137', '20140227113311562006', '-1', '-1', '陶俊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('138', '20140227113311562006', '-1', '-1', '张倩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('139', '20140227113311562006', '-1', '-1', '娄盼盼', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('140', '20140227113311562006', '-1', '-1', '张锐', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('141', '20140227113311562006', '-1', '-1', '张盈', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('142', '20140227113311562006', '-1', '-1', '薛伟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('143', '20140227113311562006', '-1', '-1', '陈绪宝', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('144', '20140227113311562006', '-1', '-1', '刘传宝', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('145', '20140227113311562006', '-1', '-1', '胡铮', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); commit; prompt 100 records committed... insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('146', '20140227113311562006', '-1', '-1', '唐春亮', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('147', '20140227113311562006', '-1', '-1', '倪良稳', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('148', '20140227113311562006', '-1', '-1', '戴航', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('149', '20140227113311562006', '-1', '-1', '蒲强', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('150', '20140227113311562006', '-1', '-1', '藤凤墨', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('151', '20140227113311562006', '-1', '-1', '黄犟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('152', '20140227113311562006', '-1', '-1', '储世华', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('153', '20140227113311562006', '-1', '-1', '张顺仕', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('154', '20140227113311562006', '-1', '-1', '郑彤', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('155', '20140227113311562006', '-1', '-1', '边亮', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('156', '20140227113311562006', '-1', '-1', '乔瑜瑜', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('157', '20140227113311562006', '-1', '-1', '韦思炜', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('158', '20140227113311562006', '-1', '-1', '刘云', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('159', '20140227113311562006', '-1', '-1', '吴杰', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('160', '20140227113311562006', '-1', '-1', '杨春', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('161', '20140227113311562006', '-1', '-1', '孟岩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('162', '20140227113354843011', '-1', '-1', '王光发', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('163', '20140227113354843011', '-1', '-1', '石锋', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('164', '20140227113354843011', '-1', '-1', '黄逸飞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('165', '20140227113354843011', '-1', '-1', '沈晓霞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('166', '20140227113354843011', '-1', '-1', '解广州', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('167', '20140227113354843011', '-1', '-1', '郝令', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('168', '20140227113354843011', '-1', '-1', '张勇', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('169', '20140227113354843011', '-1', '-1', '程亚安', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140227115137078017', '20140227113402953012', null, '-1', 'zhangsan', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('170', '20140227113354843011', '-1', '-1', '李剑', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('171', '20140227113354843011', '-1', '-1', '陈慧丽', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('172', '20140227113354843011', '-1', '-1', '叶志远', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('173', '20140227113354843011', '-1', '-1', '陈是同', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('174', '20140227113354843011', '-1', '-1', '李志浩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('175', '20140227113354843011', '-1', '-1', '许先明', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('176', '20140227113354843011', '-1', '-1', '汪玉成', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('177', '20140227113354843011', '-1', '-1', '徐彬', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('178', '20140227113354843011', '-1', '-1', '陈军', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('179', '20140227113354843011', '-1', '-1', '胡恩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('180', '20140227113354843011', '-1', '-1', '徐逸', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('181', '20140227113354843011', '-1', '-1', '杨阳', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('182', '20140227113354843011', '-1', '-1', '蔡东云', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('183', '20140227113354843011', '-1', '-1', '王红全', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('184', '20140227113354843011', '-1', '-1', '王忠培', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('185', '20140227113354843011', '-1', '-1', '刘智威', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('186', '20140227113354843011', '-1', '-1', '邹海峰', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('187', '20140227113354843011', '-1', '-1', '王卫华', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('188', '20140227113354843011', '-1', '-1', '唐登辉', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('189', '20140227113354843011', '-1', '-1', '陈海钟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('190', '20140227113354843011', '-1', '-1', '汤大伟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('191', '20140227113354843011', '-1', '-1', '李频', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('192', '20140227113354843011', '-1', '-1', '刘宝新', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('193', '20140227113319687007', '-1', '-1', '窦国贤', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('194', '20140227113319687007', '-1', '-1', '刘江', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('195', '20140227113319687007', '-1', '-1', '顾昊旻', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('196', '20140227113319687007', '-1', '-1', '唐莹莹', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('197', '20140227113319687007', '-1', '-1', '匡祯友', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('198', '20140227113319687007', '-1', '-1', '李金凤', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('199', '20140227113319687007', '-1', '-1', '仇晨春', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('200', '20140227113319687007', '-1', '-1', '张有明', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('201', '20140227113319687007', '-1', '-1', '许翔', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('202', '20140227113319687007', '-1', '-1', '华岸俊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('203', '20140227113319687007', '-1', '-1', '杨敏', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('204', '20140227113319687007', '-1', '-1', '陈华', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('205', '20140227113319687007', '-1', '-1', '耿盼盼', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('206', '20140227113319687007', '-1', '-1', '潘艳如', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('207', '20140227113319687007', '-1', '-1', '俞长亮', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('208', '20140227113319687007', '-1', '-1', '李彬', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('209', '20140227113319687007', '-1', '-1', '徐天', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('210', '20140227113319687007', '-1', '-1', '李冬', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('211', '20140227113319687007', '-1', '-1', '万磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('212', '20140227113319687007', '-1', '-1', '费建章', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('213', '20140227113319687007', '-1', '-1', '余延春', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('214', '20140227113319687007', '-1', '-1', '何文金', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('215', '20140227113319687007', '-1', '-1', '华亮', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('216', '20140227113319687007', '-1', '-1', '程玉亭', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('217', '20140227113319687007', '-1', '-1', '谭贵玲', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('218', '20140227113319687007', '-1', '-1', '俞洋', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('219', '20140227113319687007', '-1', '-1', '吴陶', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('220', '20140227113319687007', '-1', '-1', '许牧晨', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('221', '20140227113319687007', '-1', '-1', '宋静', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('222', '20140227113319687007', '-1', '-1', '宋跃明', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('223', '20140227113319687007', '-1', '-1', '陆宏波', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('224', '20140227113319687007', '-1', '-1', '王翌', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('225', '20140227113319687007', '-1', '-1', '谢斌', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('226', '20140227113319687007', '-1', '-1', '袁以友', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('227', '20140227113319687007', '-1', '-1', '吴祥', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('228', '20140227113319687007', '-1', '-1', '张琼尹', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('229', '20140227113319687007', '-1', '-1', '孙磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('230', '20140227113319687007', '-1', '-1', '宋晓波', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('231', '20140227113319687007', '-1', '-1', '吴刚', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('232', '20140227113319687007', '-1', '-1', '李源泽', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('233', '20140227113319687007', '-1', '-1', '潘学良', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('234', '20140227113319687007', '-1', '-1', '李竞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('235', '20140227113319687007', '-1', '-1', '陈颢', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('236', '20140227113412656013', '-1', '-1', '魏永', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('237', '20140227113412656013', '-1', '-1', '章敏燕', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('238', '20140227113412656013', '-1', '-1', '刘发年', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('239', '20140227113412656013', '-1', '-1', '程亮亮', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('240', '20140227113412656013', '-1', '-1', '刘涛', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('241', '20140227113412656013', '-1', '-1', '苏坤', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('242', '20140227113412656013', '-1', '-1', '梁磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('243', '20140227113412656013', '-1', '-1', '陶俊2', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('244', '20140227113412656013', '-1', '-1', '章兵', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); commit; prompt 200 records committed... insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('245', '20140227113412656013', '-1', '-1', '李杉懋', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('246', '20140227113412656013', '-1', '-1', '郑春着', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('247', '20140227113412656013', '-1', '-1', '郑飞翔', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('248', '20140227113412656013', '-1', '-1', '王甲甲', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('249', '20140227113412656013', '-1', '-1', '杨彬彬', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('250', '20140227113412656013', '-1', '-1', '李皓', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('251', '20140227113412656013', '-1', '-1', '张为兵', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('252', '20140227113412656013', '-1', '-1', '王舒', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('253', '20140227113412656013', '-1', '-1', '陈彪', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('254', '20140227113412656013', '-1', '-1', '宫玉洁', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('255', '20140227113412656013', '-1', '-1', '隋超', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('256', '20140227113412656013', '-1', '-1', '戴军', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('257', '20140227113412656013', '-1', '-1', '尹淑兰', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('258', '20140227113412656013', '-1', '-1', '马景涛', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('259', '20140227113412656013', '-1', '-1', '范海波', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('260', '20140227113412656013', '-1', '-1', '张利益', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('261', '20140227113412656013', '-1', '-1', '石周', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('262', '20140227113412656013', '-1', '-1', '杨磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('263', '20140227113402953012', '-1', '-1', '程周育', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('264', '20140227113402953012', '-1', '-1', '张磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('265', '20140227113402953012', '-1', '-1', '王超2', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('266', '20140227113402953012', '-1', '-1', '宋云', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('267', '20140227113402953012', '-1', '-1', '王鹿军', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('268', '20140227113402953012', '-1', '-1', '孙明明', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('269', '20140227113402953012', '-1', '-1', '陶鹏', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('270', '20140227113420078014', '-1', '-1', '夏同飞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('271', '20140227113420078014', '-1', '-1', '赖伶', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('272', '20140227113420078014', '-1', '-1', '朱海清', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('273', '20140227113420078014', '-1', '-1', '尉双梅', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('274', '20140227113420078014', '-1', '-1', '方丽萍', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('275', '20140227113420078014', '-1', '-1', '郭德红', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('276', '20140227113420078014', '-1', '-1', '杨金凤', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('277', '20140227113420078014', '-1', '-1', '李红燕', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('278', '20140227113420078014', '-1', '-1', '陈勇', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('279', '20140227113420078014', '-1', '-1', '张灿', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('280', '20140227113420078014', '-1', '-1', '张一楠', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('281', '20140227113420078014', '-1', '-1', '骆海艳', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('282', '20140227113420078014', '-1', '-1', '吴琦', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('283', '20140227113420078014', '-1', '-1', '叶旭', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('284', '20140227113420078014', '-1', '-1', '颜薇薇', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('285', '20140227113420078014', '-1', '-1', '董天宇', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('286', '20140227113420078014', '-1', '-1', '管马舟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140227130455515019', '20140227113212859000', null, '-1', '王佩光', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140227130508390020', '20140227113212859000', null, '-1', '李玉', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140227130522375021', '20140227113212859000', null, '-1', '周健', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140227130809125022', '20140227113428000015', null, '-1', '陈瑞祥1', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140227130824468023', '20140227113428000015', null, '-1', '吴涛', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140227130834828024', '20140227113428000015', null, '-1', '曹静', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20140228114002500040', '20140227113335859009', null, '13', 'asd', '96E79218965EB72C92A549DD5A330112', '1', null, '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('82', '20140227113335859009', '-1', '-1', '束道胜', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('83', '20140227113335859009', '-1', '-1', '龚卫国', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('84', '20140227113335859009', '-1', '-1', '张平', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('85', '20140227113335859009', '-1', '-1', '马陈锋', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('1', '1', null, null, 'admin', '1A1DC91C907325C69271DDF0C944BC72', '1', null, null); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('2', '20140227113221250001', '-1', '-1', '钟美兰', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('3', '20140227113221250001', '-1', '-1', '郑娟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('4', '20140227113221250001', '-1', '-1', '刘红', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('5', '20140227113221250001', '-1', '-1', '李洁', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('6', '20140227113221250001', '-1', '-1', '张捷', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('7', '20140227113221250001', '-1', '-1', '王倩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('8', '20140227113229453002', '-1', '-1', '张洁', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('9', '20140227113229453002', '-1', '-1', '周俊强', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('10', '20140227113229453002', '-1', '-1', '陈朝霞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('11', '20140227113229453002', '-1', '-1', '胡琪', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('12', '20140227113229453002', '-1', '-1', '潘文静', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('13', '20140227113229453002', '-1', '-1', '黄奕', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('14', '20140227113229453002', '-1', '-1', '陈秀伟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('15', '20140227113229453002', '-1', '-1', '沈艳', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('16', '20140227113229453002', '-1', '-1', '王路路', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('17', '20140227113229453002', '-1', '-1', '李华超', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('18', '20140227113250843004', '-1', '-1', '程周育02', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('19', '20140227113250843004', '-1', '-1', '章红琴', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('20', '20140227113250843004', '-1', '-1', '戴聿雯', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('21', '20140227113250843004', '-1', '-1', '王文清', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('22', '20140227113250843004', '-1', '-1', '林君', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('23', '20140227113250843004', '-1', '-1', '刘淑霞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('24', '20140227113250843004', '-1', '-1', '穆宇浩', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('25', '20140227113250843004', '-1', '-1', '卓国栋', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('26', '20140227113240546003', '-1', '-1', '陈红梅', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('27', '20140227113240546003', '-1', '-1', '杨劲松', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('28', '20140227113240546003', '-1', '-1', '张雪燕', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('29', '20140227113240546003', '-1', '-1', '沈国汉', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('30', '20140227113240546003', '-1', '-1', '尹明', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('31', '20140227113240546003', '-1', '-1', '孙嫣', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('32', '20140227113240546003', '-1', '-1', '江世鹏', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('33', '20140227113240546003', '-1', '-1', '江磊', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('34', '20140227113240546003', '-1', '-1', '项玲', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('35', '20140227113240546003', '-1', '-1', '黄海燕', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('36', '20140227113435921016', '-1', '-1', '杨栋枢', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('37', '20140227113435921016', '-1', '-1', '赵伟', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('38', '20140227113435921016', '-1', '-1', '李霞', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('39', '20140227113435921016', '-1', '-1', '管放鸣', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('40', '20140227113435921016', '-1', '-1', '安克', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); insert into SXTYH (YHID, BMID, GWID, WBDWID, YHXM, DLKL, ZT, BZ, FBID) values ('41', '20140227113435921016', '-1', '-1', '王颖', '96E79218965EB72C92A549DD5A330112', '1', '无。', '-1'); commit; prompt 294 records loaded prompt Loading SXTYH_JS... insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140227140051187047', '217', '20140227135811984041'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('1', '1', '1'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140303172640365000', '269', '20140227135825375042'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140303172646928001', '268', '20140227135825375042'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140303172703131002', '266', '20140227135811984041'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140303172803115007', '267', '20140227135825375042'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140303172744584004', '263', '1'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140303172749881005', '264', '20140227135825375042'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140303172754444006', '265', '20140227135825375042'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140304100308953001', '231', '20140227135825375042'); insert into SXTYH_JS (YH_JSID, YHID, JSID) values ('20140304100445140002', '213', '20140227135825375042'); commit; prompt 11 records loaded prompt Loading SYJFK... insert into SYJFK (YJFKID, FBR, LYRQ, YJNR, HFSJ, HFNR, ZT, BZ) values ('20140307173056156000', '266', to_timestamp('07-03-2014 17:30:56.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '请修改界面123', to_timestamp('07-03-2014 17:31:24.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '好的', '1', null); insert into SYJFK (YJFKID, FBR, LYRQ, YJNR, HFSJ, HFNR, ZT, BZ) values ('20140307173436687001', '268', to_timestamp('07-03-2014 17:34:36.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '好22222', to_timestamp('09-03-2014 10:08:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '好的', '1', null); insert into SYJFK (YJFKID, FBR, LYRQ, YJNR, HFSJ, HFNR, ZT, BZ) values ('20140307164941781010', '269', to_timestamp('07-03-2014 16:49:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '请修改登录界面', to_timestamp('07-03-2014 16:58:09.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '好的', '1', null); commit; prompt 3 records loaded prompt Loading SZCPX... insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('1', '0001', '电网生产管理'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('2', '0001', '电网调度管理'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('3', '0001', '电网营销管理'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('4', '0001', '电网规划管理'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('5', '0001', '电网建设管理'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('6', '0001', '协同办公管理'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('7', '0001', '综合管理'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('8', '0001', '多业务应用集成及集成服务'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('9', '0001', '智能分析与决策'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('10', '0001', '实时数据库及应用'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('11', '0001', '空间地理信息平台及应用'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('12', '0001', '其他信息管理系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('13', '0002', '企业资源计划(ERP)'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('14', '0002', '企业信息化咨询服务'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('15', '0003', '信息安全设备及接入系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('16', '0003', '信息安全咨询服务'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('17', '0004', '国网信息化及网络设备集招'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('18', '0004', '信息系统运维'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('19', '0004', '数据(容灾)中心建设'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('20', '0004', '云计算应用'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('21', '0004', '其他信息系统集成及服务'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('22', '0005', '信息通信综合监管及咨询服务'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('23', '0005', '综合网管系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('24', '0006', '传输网设备'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('25', '0006', '业务网设备'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('26', '0006', '支撑网设备'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('27', '0006', '终端通信接入网设备'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('28', '0007', '通信网络运营和维护'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('29', '0007', '应急指挥及通信系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('30', '0007', '视频监控系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('31', '0007', '其他通信系统集成'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('32', '0008', '输电环节感知系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('33', '0008', '变电环节感知系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('34', '0008', '配电环节感知系统'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('35', '0009', '安全类芯片'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('36', '0009', '通信类芯片'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('37', '0009', '控制类芯片'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('38', '0009', '射频标签类芯片'); insert into SZCPX (ZCPXID, CPX, ZCPXMC) values ('39', '0009', '时钟类芯片'); commit; prompt 39 records loaded prompt Enabling foreign key constraints for BCGJH... alter table BCGJH enable constraint FK_BCGJH_FK_CGYQ_H_BXMJBXX; prompt Enabling foreign key constraints for BHTXX... alter table BHTXX enable constraint FK_BHTXX_FK_HTXX_X_BXMJBXX; prompt Enabling foreign key constraints for BHKJH... alter table BHKJH enable constraint FK_BHKJH_FK_HKJH_H_BHTXX; alter table BHKJH enable constraint FK_BHKJH_FK_HKYQ_H_BXMJBXX; prompt Enabling foreign key constraints for BJDJH... alter table BJDJH enable constraint FK_BJDJH_FK_JDYQ_H_BXMJBXX; prompt Enabling foreign key constraints for BKPJH... alter table BKPJH enable constraint FK_BKPJH_FK_KPJH_H_BHTXX; alter table BKPJH enable constraint FK_BKPJH_FK_KPYQ_H_BXMJBXX; prompt Enabling foreign key constraints for BLWWBCGJH... alter table BLWWBCGJH enable constraint FK_BLWWBCGJ_REFERENCE_BXMJBXX; prompt Enabling foreign key constraints for SCD_JS... alter table SCD_JS enable constraint FK_SCD_JS_FK_CDJS_J_SJS; alter table SCD_JS enable constraint FK_SCD_JS_FK_YHJS_J_SCD; prompt Enabling foreign key constraints for SXTYH... alter table SXTYH enable constraint FK_SXTYH_FK_YH_BMI_SBM; alter table SXTYH enable constraint FK_SXTYH_FK_YH_WBD_SWBDW; prompt Enabling foreign key constraints for SXTYH_JS... alter table SXTYH_JS enable constraint FK_SXTYH_JS_FK_YHJS_J_SJS; alter table SXTYH_JS enable constraint FK_SXTYH_JS_FK_YHJS_Y_SXTYH; prompt Enabling triggers for BBG... alter table BBG enable all triggers; prompt Enabling triggers for BBGSHTHJL... alter table BBGSHTHJL enable all triggers; prompt Enabling triggers for BXMJBXX... alter table BXMJBXX enable all triggers; prompt Enabling triggers for BCGJH... alter table BCGJH enable all triggers; prompt Enabling triggers for BHTXX... alter table BHTXX enable all triggers; prompt Enabling triggers for BHKJH... alter table BHKJH enable all triggers; prompt Enabling triggers for BJDJH... alter table BJDJH enable all triggers; prompt Enabling triggers for BKPJH... alter table BKPJH enable all triggers; prompt Enabling triggers for BLSXMCB... alter table BLSXMCB enable all triggers; prompt Enabling triggers for BLWWBCGJH... alter table BLWWBCGJH enable all triggers; prompt Enabling triggers for BRY... alter table BRY enable all triggers; prompt Enabling triggers for BZDBB... alter table BZDBB enable all triggers; prompt Enabling triggers for HCGJH... alter table HCGJH enable all triggers; prompt Enabling triggers for HHKJH... alter table HHKJH enable all triggers; prompt Enabling triggers for HHTXX... alter table HHTXX enable all triggers; prompt Enabling triggers for HJDJH... alter table HJDJH enable all triggers; prompt Enabling triggers for HKPJH... alter table HKPJH enable all triggers; prompt Enabling triggers for HXMJBXX... alter table HXMJBXX enable all triggers; prompt Enabling triggers for PMISUSER... alter table PMISUSER enable all triggers; prompt Enabling triggers for SBGX... alter table SBGX enable all triggers; prompt Enabling triggers for SBM... alter table SBM enable all triggers; prompt Enabling triggers for SCD... alter table SCD enable all triggers; prompt Enabling triggers for SJS... alter table SJS enable all triggers; prompt Enabling triggers for SCD_JS... alter table SCD_JS enable all triggers; prompt Enabling triggers for SDMK... alter table SDMK enable all triggers; prompt Enabling triggers for SFILTER... alter table SFILTER enable all triggers; prompt Enabling triggers for SGW... alter table SGW enable all triggers; prompt Enabling triggers for SJDJD... alter table SJDJD enable all triggers; prompt Enabling triggers for SKHXX... alter table SKHXX enable all triggers; prompt Enabling triggers for SWBDW... alter table SWBDW enable all triggers; prompt Enabling triggers for SXTYH... alter table SXTYH enable all triggers; prompt Enabling triggers for SXTYH_JS... alter table SXTYH_JS enable all triggers; prompt Enabling triggers for SYJFK... alter table SYJFK enable all triggers; prompt Enabling triggers for SZCPX... alter table SZCPX enable all triggers; set feedback on set define on prompt Done. <file_sep>/DataServer/src/com/project/action/LoginAction.java package com.project.action; import java.util.HashMap; import java.util.List; import java.util.Map; import com.constant.Constant; import com.opensymphony.xwork2.ActionSupport; import com.project.po.Student; import com.project.po.Teacher; import com.project.service.StudentService; import com.project.service.TeacherService; public class LoginAction extends ActionSupport{ /** * */ private static final long serialVersionUID = 1L; private int tag = 0; private TeacherService teacherService; private StudentService studentService; private String name; private String password; public int getTag() { return tag; } public void setTag(int tag) { this.tag = tag; } public TeacherService getTeacherService() { return teacherService; } public void setTeacherService(TeacherService teacherService) { this.teacherService = teacherService; } public StudentService getStudentService() { return studentService; } public void setStudentService(StudentService studentService) { this.studentService = studentService; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getPassword() { return password; } public void setPassword(String password) { this.password = <PASSWORD>; } public void login(){ System.out.println(tag+"...."+name+"...."+password); int data = -1; if(tag==0){ String hql = "from Student obj where obj.name = '"+name+"' and obj.password = '"+password+"'"; List<Student> list = this.studentService.getStudentsByHql(hql); if(list.size()>0){ data = list.get(0).getId(); }else { data = -1; } }else{ String hql = "from Teacher obj where obj.name = '"+name+"' and obj.password = '"+<PASSWORD>+"'"; List<Teacher> list = this.teacherService.getTeachersByHql(hql); if(list.size()>0){ data = list.get(0).getId(); }else { data = -1; } } Constant.flush(data); } } <file_sep>/20140304/PMIS/src/com/jiyuan/pmis/project/SelectProjectsSearchFragment.java package com.jiyuan.pmis.project; import java.util.ArrayList; import java.util.List; import com.jiyuan.pmis.MainApplication; import com.jiyuan.pmis.R; import com.jiyuan.pmis.search.Pinyin4jUtil; import com.jiyuan.pmis.search.SimpleSearchAdapter; import com.jiyuan.pmis.sqlite.ProjectInfo; import android.annotation.SuppressLint; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.support.v4.app.Fragment; import android.text.Editable; import android.text.TextWatcher; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.EditText; import android.widget.ListView; import android.widget.AdapterView.OnItemClickListener; import android.widget.TextView; import android.widget.TextView.OnEditorActionListener; @SuppressLint("DefaultLocale") public class SelectProjectsSearchFragment extends Fragment implements OnEditorActionListener{ private Context context; private MainApplication app; private Activity activity; ListView mListView; SimpleSearchAdapter mAdapter; EditText mtxt; List<ProjectInfo> mAllData; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { /** * Inflate the layout for this fragment */ this.context = this.getActivity(); this.activity = this.getActivity(); app = (MainApplication) this.getActivity().getApplication(); View v = inflater.inflate(R.layout.select_projects_search_fragment, container, false); this.mListView = (ListView) v .findViewById(R.id.select_projects_search_listView); this.mtxt = (EditText)v.findViewById(R.id.edittext_select_projects_search_project_name); mAdapter = new SimpleSearchAdapter((Activity) this.context); mtxt.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { if (0 != mtxt.getText().length()) { String spnId = mtxt.getText().toString(); setSearchResult(spnId); } else { setData(); } } }); pd = ProgressDialog.show(context, "数据加载", "数据加载中,请稍后。。。。。。"); new Thread(){ @Override public void run(){ mAllData = app.getAllProjectInfos(); handler.sendEmptyMessage(0); } }.start(); return v; } private ProgressDialog pd; private Handler handler = new Handler(){ @Override public void handleMessage(Message mes){ setData(); pd.dismiss(); } }; public void setData() { if (mAllData==null) mAllData = new ArrayList<ProjectInfo>(); mAdapter = new SimpleSearchAdapter((Activity) this.context); for (int i = 0; i < mAllData.size()&&i<30; i++) { mAdapter.addItem(mAllData.get(i)); } mListView.setOnItemClickListener(onItemClickListener); mListView.setAdapter(mAdapter); } private OnItemClickListener onItemClickListener = new OnItemClickListener(){ @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub SimpleSearchAdapter adapter = (SimpleSearchAdapter) arg0.getAdapter(); ProjectInfo item = (ProjectInfo)adapter.getItem(arg2); Intent it = new Intent(); it.putExtra("xmid", item.getXmid()); it.putExtra("xmjc", item.getXmjc()); activity.setResult(Activity.RESULT_OK,it); activity.finish(); } }; @SuppressLint("DefaultLocale") public void setSearchResult(String str) { mAdapter = new SimpleSearchAdapter((Activity) this.context); for (ProjectInfo temp : mAllData) { if (temp.getXmjc().toLowerCase().contains(str.toLowerCase())||Pinyin4jUtil.converterToFirstSpell(temp.getXmjc()).toLowerCase().contains(str.toLowerCase())) { mAdapter.addItem(temp); } } mListView.setAdapter(mAdapter); } @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { // TODO Auto-generated method stub return false; } } <file_sep>/DataClient/src/com/stracture/Course.java package com.stracture; public class Course { private Integer id; //主键,自增 private Teacher teacher; //教师外键 private ClassRoom classRoom; //教室外键 private String week; //星期几 private String lesson; //第几节课 private String content; public String getContent() { return content; } public void setContent(String content) { this.content = content; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Teacher getTeacher() { return teacher; } public void setTeacher(Teacher teacher) { this.teacher = teacher; } public ClassRoom getClassRoom() { return classRoom; } public void setClassRoom(ClassRoom classRoom) { this.classRoom = classRoom; } public String getWeek() { return week; } public void setWeek(String week) { this.week = week; } public String getLesson() { return lesson; } public void setLesson(String lesson) { this.lesson = lesson; } } <file_sep>/PMIS/src/com/jiyuan/pmis/TabHostActivity.java package com.jiyuan.pmis; import java.util.ArrayList; import java.util.List; import org.ksoap2.serialization.PropertyInfo; import com.google.gson.Gson; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.exception.PmisException; import com.jiyuan.pmis.fragment.FragmentPage1; import com.jiyuan.pmis.fragment.FragmentPage2; import com.jiyuan.pmis.fragment.FragmentPage3; import com.jiyuan.pmis.fragment.FragmentPage4; import com.jiyuan.pmis.fragment.FragmentPage5; import com.jiyuan.pmis.soap.Soap; import com.jiyuan.pmis.structure.Department; import com.jiyuan.pmis.structure.ReportType; import com.jiyuan.pmis.structure.User; import android.os.Bundle; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentTabHost; import android.view.LayoutInflater; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import android.widget.TabHost.TabSpec; import android.widget.Toast; public class TabHostActivity extends FragmentActivity{ private FragmentTabHost mTabHost; private MainApplication app; private User user; private LayoutInflater layoutInflater; @SuppressWarnings("rawtypes") private Class fragmentArray[] = {FragmentPage1.class,FragmentPage2.class,FragmentPage3.class,FragmentPage4.class,FragmentPage5.class}; private int mImageViewArray[] = {R.drawable.tab_item1,R.drawable.tab_item2,R.drawable.tab_item3, R.drawable.tab_item4,R.drawable.tab_item5}; private String mTextviewArray[] = {"新建报工", "我的报工", "审核报工", "设置", "更多"}; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main_tab_layout); this.app = (MainApplication) this.getApplication(); this.user = app.getUser(); initView(); try { initDate(); } catch (PmisException e) { // TODO Auto-generated catch block Toast.makeText(this, e.getMessage(), Toast.LENGTH_SHORT).show(); } } private void initView(){ layoutInflater = LayoutInflater.from(this); mTabHost = (FragmentTabHost)findViewById(android.R.id.tabhost); mTabHost.setup(this, getSupportFragmentManager(), R.id.realtabcontent); int count = fragmentArray.length; for(int i = 0; i < count; i++){ TabSpec tabSpec = mTabHost.newTabSpec(mTextviewArray[i]).setIndicator(getTabItemView(i)); mTabHost.addTab(tabSpec, fragmentArray[i], null); mTabHost.getTabWidget().getChildAt(i).setBackgroundResource(R.drawable.selector_tab_background); } } private View getTabItemView(int index){ View view = layoutInflater.inflate(R.layout.tab_item_view, null); ImageView imageView = (ImageView) view.findViewById(R.id.imageview); imageView.setImageResource(mImageViewArray[index]); TextView textView = (TextView) view.findViewById(R.id.textview); textView.setText(mTextviewArray[index]); TextView count = (TextView)view.findViewById(R.id.count); if(index!=1||index!=2){ count.setVisibility(View.GONE); } return view; } private void initDate() throws PmisException{ try{ this.getReportTypes(); this.getDepartments(); }catch(Exception e){ throw new PmisException("获取数据失败!"); } } private void getReportTypes() throws Exception { final String METHOD_NAME = "getReportTypes"; Soap soap = new Soap(Constant.report_namespace, METHOD_NAME); String ret = null; ret = soap.getResponse(Constant.report_url, Constant.report_url + "/" + METHOD_NAME); Gson gson = new Gson(); ReportType[] reportTypes = gson.fromJson(ret, ReportType[].class); app.setReportTypes(reportTypes); } private void getDepartments() throws Exception { final String METHOD_NAME = "getDepartments"; Soap soap = new Soap(Constant.department_namespace, METHOD_NAME); String ret = null; ret = soap.getResponse(Constant.department_url, Constant.department_url + "/" + METHOD_NAME); Gson gson = new Gson(); Department[] departments = gson.fromJson(ret, Department[].class); app.setDepartments(departments); } private String getReportCount(String zt) throws Exception { final String METHOD_NAME = "getReportCount"; Soap soap = new Soap(Constant.report_namespace, METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("yhid"); arg0.setValue(user.yhid); arg0.setType(String.class); PropertyInfo arg1 = new PropertyInfo(); arg1.setName("zt"); arg1.setValue(zt); arg1.setType(String.class); args.add(arg0); args.add(arg1); soap.setPropertys(args); String ret = soap .getResponse(Constant.report_url, Constant.report_url + "/" + METHOD_NAME); return ret; } } <file_sep>/DataServer/src/com/project/action/TeacherAction.java package com.project.action; import java.util.HashMap; import java.util.List; import java.util.Map; import com.constant.Constant; import com.opensymphony.xwork2.ActionSupport; import com.project.po.Teacher; import com.project.service.TeacherService; public class TeacherAction extends ActionSupport{ /** * */ private static final long serialVersionUID = 7482287939244643007L; private TeacherService teacherService; private List<Teacher> list; private Teacher teacher; public Teacher getTeacher() { return teacher; } public void setTeacher(Teacher teacher) { this.teacher = teacher; } public TeacherService getTeacherService() { return teacherService; } public void setTeacherService(TeacherService teacherService) { this.teacherService = teacherService; } public List<Teacher> getList() { return list; } public void setList(List<Teacher> list) { this.list = list; } public void addTeacherAction(){ String hql = "from Teacher teacher where teacher.name = '"+teacher.getName()+"'"; if(this.teacherService.getTeachersByHql(hql).size()>0){ Constant.flush("-1"); }else{ this.teacherService.saveTeacher(teacher); Constant.flush("1"); } } } <file_sep>/JYPMIS0310/PMIS/src/com/jiyuan/pmis/structure/ServerInfo.java package com.jiyuan.pmis.structure; public class ServerInfo { public static final String server_addr_key = "server_addr"; public static final String server_port_key = "server_port"; public String server_addr; public String server_port; } <file_sep>/Android1219/src/com/example/android1219/Provider/TestProviderActivity.java package com.example.android1219.provider; import com.example.android1219.R; import android.app.Activity; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.Toast; public class TestProviderActivity extends Activity { private Context context; private Button test_provider,test_updateProvider,test_deleteProvider,test_insertProvider; private ContentResolver contentResolver; public TestProviderActivity() { // TODO Auto-generated constructor stub context = this; } @Override public void onCreate(Bundle b){ super.onCreate(b); this.setContentView(R.layout.provider_activity); this.test_provider = (Button)this.findViewById(R.id.test_provider); this.test_provider.setOnClickListener(Test_Provider); this.contentResolver = this.getContentResolver(); this.test_deleteProvider = (Button)this.findViewById(R.id.test_deleteProvider); this.test_insertProvider = (Button)this.findViewById(R.id.test_insertProvider); this.test_updateProvider = (Button)this.findViewById(R.id.test_updateProvider); this.test_deleteProvider.setOnClickListener(this.Test_DeleteProvider); this.test_insertProvider.setOnClickListener(this.Test_InsertProvider); this.test_updateProvider.setOnClickListener(this.Test_UpdateProvider); } private OnClickListener Test_DeleteProvider = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub contentResolver.delete(Uri.parse("content://"+NotesContentProvider.AUTHORITY+"/"+Note.table_name+"/1"), "", null); } }; private OnClickListener Test_InsertProvider = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub ContentValues values = new ContentValues(); //values.put(Note.Notes.CONTENT_TYPE, "type"); values.put(Note.TEXT, "Text"); values.put(Note.TITLE, "title"); contentResolver.insert(Uri.parse("content://"+NotesContentProvider.AUTHORITY+"/"+Note.table_name), values); } }; private OnClickListener Test_UpdateProvider = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub ContentValues values = new ContentValues(); values.put(Note.TEXT, "update text"); values.put(Note.TITLE, "update title"); contentResolver.update(Uri.parse("content://"+NotesContentProvider.AUTHORITY+"/"+Note.table_name), values, null, null); } }; private OnClickListener Test_Provider = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Cursor cursor = contentResolver.query(Uri.parse("content://"+NotesContentProvider.AUTHORITY+"/"+Note.table_name), new String[]{Note.TITLE,Note.TEXT}, null, null, null); String content = ""; if(cursor.moveToFirst()){ do{ content += cursor.getString(0)+"--"+cursor.getString(1)+";"; }while(cursor.moveToNext()); } if (content.length()==0){ Toast.makeText(context, "there is no record!", Toast.LENGTH_LONG).show(); }else Toast.makeText(context, content, Toast.LENGTH_LONG).show(); cursor.close(); } }; } <file_sep>/PMIS/src/com/jiyuan/pmis/structure/Item.java package com.jiyuan.pmis.structure; public class Item { public int imageRid; public String firstLineText; public String secondLineText; public String count; public boolean showCheckbox; public String key; public boolean isChecked = false; } <file_sep>/JYPMIS0310/JYPMIS/src/com/jypmis/vo/BbgSortVO.java package com.jypmis.vo; import java.util.ArrayList; import java.util.List; public class BbgSortVO { /** * 项目名称 */ public String xmmc; /** * 项目id */ public String xmid; /** * 项目简称 */ public String xmjc; /** * 项目下面的报工列表 */ public List<BbgVO> list = new ArrayList<BbgVO>(); } <file_sep>/PMIS/src/com/jiyuan/pmis/fragment/FragmentPage1.java package com.jiyuan.pmis.fragment; import java.util.ArrayList; import java.util.List; import org.ksoap2.serialization.PropertyInfo; import com.calendar.DateLayout; import com.google.gson.Gson; import com.jiyuan.pmis.MainApplication; import com.jiyuan.pmis.R; import com.jiyuan.pmis.adapter.SimpleSpinnerAdapter; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.constant.MLocation; import com.jiyuan.pmis.reports.SelectProjectsActivity; import com.jiyuan.pmis.soap.Soap; import com.jiyuan.pmis.sqlite.DatabaseHandler; import com.jiyuan.pmis.sqlite.ProjectInfo; import com.jiyuan.pmis.structure.Report; import com.jiyuan.pmis.structure.ReportType; import com.jiyuan.pmis.structure.SpinnerItem; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; public class FragmentPage1 extends Fragment { private Context context; private Activity activity; private Spinner spinner_add_reports_reports_option; private MainApplication app; static final int DATE_PICKER_ID = 1111; private TextView textview_add_report_date, textview_add_report_project; private EditText edittext_add_report_content, edittext_add_report_working_time, edittext_add_report_position; private Button button_add_report_submit; private Report report; private boolean inProject = false; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_1, null); this.context = this.getActivity(); this.activity = this.getActivity(); report = new Report(); app = (MainApplication) this.activity.getApplication(); this.initData(v); return v; } public void selectDate(TextView v) { new DateLayout(this.context,this.getView(),v); } public void selectProjects(View v) { // Toast.makeText(this, "this is a test", Toast.LENGTH_SHORT).show(); Intent it = new Intent(context, SelectProjectsActivity.class); startActivityForResult(it, Constant.REQUEST_CODE); } private void initData(View v) { MLocation.getCNBylocation(this.context); String location = MLocation.cityName; this.textview_add_report_date = (TextView) v.findViewById(R.id.textview_add_report_date); this.spinner_add_reports_reports_option = (Spinner) v .findViewById(R.id.spinner_add_reports_reports_option); this.textview_add_report_project = (TextView) v .findViewById(R.id.textView_add_page_project); this.edittext_add_report_content = (EditText) v .findViewById(R.id.edittext_add_report_content); this.edittext_add_report_position = (EditText) v .findViewById(R.id.edittext_add_report_position); this.edittext_add_report_working_time = (EditText) v .findViewById(R.id.edittext_add_report_working_time); this.button_add_report_submit = (Button)v.findViewById(R.id.button_add_report_submit); ReportType[] types = app.getReportTypes(); List<SpinnerItem> values = new ArrayList<SpinnerItem>(); for (int i = 0; i < types.length; i++) { SpinnerItem item = new SpinnerItem(); item.key = types[i].bgxid; item.value = types[i].bgxmc; item.zt = types[i].zt; values.add(item); } SimpleSpinnerAdapter adapter = new SimpleSpinnerAdapter(this.context, R.layout.spinner_item, values); // adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); this.spinner_add_reports_reports_option.setAdapter(adapter); textview_add_report_date.setText(Constant.getCurrentDataString("yyyy-MM-dd")); DatabaseHandler db = new DatabaseHandler(this.context); ProjectInfo info = db.getLastProjectInfo(); if (info != null) { report.xmid = info.getXmid(); this.textview_add_report_project.setText(info.getXmjc()); } this.textview_add_report_date.setOnClickListener(selectDate_listener); this.button_add_report_submit.setOnClickListener(submit_listener); this.spinner_add_reports_reports_option .setOnItemSelectedListener(onItemSelectedListener); this.edittext_add_report_position.setText(location); } /** * 提交 * * @param v */ public void done() { Float f = null; try { f = Float.valueOf(this.edittext_add_report_working_time.getText() .toString()); } catch (Exception e) { Toast.makeText(this.context, "工作小时为小于24的数字!", Toast.LENGTH_SHORT).show(); return; } if (f <= 0 || f > 24) { Toast.makeText(this.context, "工作小时为小于24的数字!", Toast.LENGTH_SHORT).show(); return; } if (!inProject) report.xmid = "-1"; report.bgxid = ((SpinnerItem) this.spinner_add_reports_reports_option .getSelectedItem()).key; report.gznr = this.edittext_add_report_content.getText().toString(); report.gzxs = this.edittext_add_report_working_time.getText() .toString(); report.gzdd = this.edittext_add_report_position.getText().toString(); report.gzrq = this.textview_add_report_date.getText().toString(); report.zt = "-1"; Gson gson = new Gson(); final String METHOD_NAME = "saveReport"; Soap soap = new Soap(Constant.report_namespace, METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("yhid"); arg0.setValue(app.getUser().yhid); arg0.setType(String.class); PropertyInfo arg1 = new PropertyInfo(); arg1.setName("reportStr"); arg1.setValue(gson.toJson(report)); arg1.setType(String.class); Log.v("pmis", gson.toJson(report)); args.add(arg0); args.add(arg1); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url + "/" + METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block Toast.makeText(this.context, "提交失败!", Toast.LENGTH_SHORT).show(); return; } if (ret.equals(Constant.SUCCESS)) { Toast.makeText(this.context, "提交成功!", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(this.context, "提交失败!", Toast.LENGTH_SHORT).show(); return; } this.activity.finish(); } private Spinner.OnItemSelectedListener onItemSelectedListener = new Spinner.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub SimpleSpinnerAdapter adapter = (SimpleSpinnerAdapter) arg0 .getAdapter(); SpinnerItem item = adapter.getItem(arg2); if (item.zt.equals("0")) { inProject = false; textview_add_report_project.setTextColor(Color.GRAY); } else { inProject = true; textview_add_report_project.setTextColor(Color.BLACK); } textview_add_report_project.setClickable(inProject); // Toast.makeText(context, String.valueOf(arg2), // Toast.LENGTH_SHORT).show(); } @Override public void onNothingSelected(AdapterView<?> arg0) { // TODO Auto-generated method stub // Toast.makeText(context, "wu", Toast.LENGTH_SHORT).show(); SimpleSpinnerAdapter adapter = (SimpleSpinnerAdapter) arg0 .getAdapter(); SpinnerItem item = adapter.getItem(0); if (item.zt.equals("0")) { inProject = false; textview_add_report_project.setTextColor(Color.GRAY); } else { inProject = true; textview_add_report_project.setTextColor(Color.BLACK); } textview_add_report_project.setClickable(inProject); } }; private OnClickListener selectDate_listener = new OnClickListener(){ @Override public void onClick(View arg0) { // TODO Auto-generated method stub selectDate((TextView)arg0); } }; private OnClickListener submit_listener = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub done(); } }; } <file_sep>/DataServer/src/com/project/dao/impl/ClassRoomDAOImpl.java package com.project.dao.impl; import java.util.List; import org.hibernate.Query; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import com.project.dao.ClassRoomDAO; import com.project.po.ClassRoom; public class ClassRoomDAOImpl extends HibernateDaoSupport implements ClassRoomDAO{ public void deleteClassRoom(ClassRoom arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().delete(arg0); } public void saveClassRoom(ClassRoom arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().save(arg0); } public void updateClassRoom(ClassRoom arg0) { // TODO Auto-generated method stub this.getHibernateTemplate().update(arg0); } @Override public ClassRoom getClassRoomById(Integer arg0) { // TODO Auto-generated method stub return this.getHibernateTemplate().get(ClassRoom.class, arg0); } @SuppressWarnings("unchecked") @Override public List<ClassRoom> getClassRoomsByHql(String hql) { // TODO Auto-generated method stub return this.getHibernateTemplate().find(hql); } @SuppressWarnings("rawtypes") @Override public List getList(String hql) { // TODO Auto-generated method stub Query query=this.getSession().createSQLQuery(hql); return query.list(); } } <file_sep>/Android1219/src/com/example/android1219/Service/SecondActivity.java package com.example.android1219.service; import com.example.android1219.R; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.view.View; import android.widget.Button; import android.widget.Toast; public class SecondActivity extends Activity { private Context context; private Intent iService; public SecondActivity() { // TODO Auto-generated constructor stub context = this; } @Override public void onCreate(Bundle b){ super.onCreate(b); this.setContentView(R.layout.second_activity); Button btn = (Button)this.findViewById(R.id.button2); iService = new Intent(context,BackIntentService.class); btn.setOnClickListener(new Button.OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub Toast.makeText(context, "this is a test", Toast.LENGTH_LONG).show(); Intent it = new Intent(); it.putExtra("ret", "2"); setResult(RESULT_OK,it); finish(); } }); Button next = (Button)this.findViewById(R.id.stop); next.setOnClickListener(new Button.OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub //Intent service = new Intent(context,BackIntentService.class); iService.setData(Uri.parse("0")); context.startService(iService); } }); } } <file_sep>/20140304/PMIS/src/com/jiyuan/pmis/sqlite/RecentProjectInfo.java package com.jiyuan.pmis.sqlite; public class RecentProjectInfo { final static String key_id = "id"; final static String table_name = "rencentprojectinfo"; final static String key_xmmc = "xmmc"; final static String key_xmjc = "xmjc"; final static String key_xmid = "xmid"; final static String key_identifier = "identifier"; final static String key_yhid = "yhid"; final static String key_sj = "sj"; private int id; private String xmmc; private String xmjc; private String xmid; private String identifier; private String yhid; private String sj; public String getYhid() { return yhid; } public void setYhid(String yhid) { this.yhid = yhid; } public String getSj() { return sj; } public void setSj(String sj) { this.sj = sj; } public RecentProjectInfo(int id, String xmid, String xmjc, String xmmc, String identifier,String yhid,String sj) { // TODO Auto-generated constructor stub this.id = id; this.xmid = xmid; this.xmjc = xmjc; this.xmmc = xmmc; this.identifier = identifier; this.sj = sj; this.yhid = yhid; } public RecentProjectInfo() { } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getXmmc() { return xmmc; } public void setXmmc(String xmmc) { this.xmmc = xmmc; } public String getXmjc() { return xmjc; } public void setXmjc(String xmjc) { this.xmjc = xmjc; } public String getXmid() { return xmid; } public void setXmid(String xmid) { this.xmid = xmid; } public String getIdentifier() { return identifier; } public void setIdentifier(String identifier) { this.identifier = identifier; } } <file_sep>/JYPMIS0310/JYPMIS/src/com/jypmis/vo/BxmjbxxVO.java package com.jypmis.vo; public class BxmjbxxVO { public String xmid; public String xmbx; public String xmmc; public String xmjc; public String xmlx; public String zrbm; public String xmjl; public String cpx; public String zcpx; public String ksrq; public String jsrq; public String xsfzr; public String sqgcs; public String khmc; public String khfzr; public String lxfs; public String yjrs; public String gq; public String rgcb; public String qtjjfy; public String sfxcp; public String xmjb; public String xmjs; /* * 同步标示 */ public String identifier; } <file_sep>/JYPMIS0310/JYPMIS/src/com/jypmis/func/report/IReport.java package com.jypmis.func.report; //Generated by MyEclipse import java.util.List; import com.jypmis.po.Bbg; import com.jypmis.po.Sbgx; public interface IReport { /** * 新增报工,手机端函数 * @param userid * @param reportStr报工信息json格式的字符串 * @return */ public String saveReport(String userid, String reportStr); /*** * 删除报工,手机端函数 * @param reportid * @return */ public String deleteReport(String reportid); /*** * 修改报工,手机端函数 * * @param workdate * @param reporttype * @param reportproject * @param reportplace * @param workhour * @param workcontent * @return */ public String updateReport(String yhid,String reportStr,String type); /*** * 获取报工内容,手机端函数 * * @param reportid * @return */ public String showReport(String bgid); /** * 查询报工条数,手机函数 * 用户在登陆主页上显示提示数字 * @param yhid * @param zt * @return */ public int getReportCount(String yhid,String zt); /*** * 查询报工,手机端函数 * 根据输入的搜索条件查询报工 */ public String getReports(String reportSearchFieldStr); /** * 报工类型,手机端函数 * @return */ public String getReportTypes(); /** * 查询报工项,为报工类型提供列表 * @return */ public List<Sbgx> findReportTypes(); /** * 查询报工,根据报工id:bgid * @param bgid * @return */ public List<Bbg> findReportByID(String bgid); /** * 查询报工,根据搜索条件进行查询 * @param reportSearchFieldStr 搜索条件 * @return */ public List<Bbg> findReportBySearchStr(String reportSearchFieldStr); }<file_sep>/DataClient/src/com/dataclient/TabHostActivity.java package com.dataclient; import com.dataclient.fragment.FragmentPage1; import com.dataclient.fragment.FragmentPage2; import com.dataclient.fragment.FragmentPage3; import com.example.dataclient.R; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentTabHost; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.ImageView; import android.widget.TabHost; import android.widget.TextView; import android.widget.TabHost.TabSpec; import android.widget.Toast; public class TabHostActivity extends FragmentActivity{ private FragmentTabHost mTabHost; private MainApplication app; private Context context; private LayoutInflater layoutInflater; @SuppressWarnings("rawtypes") private Class fragmentArray[] = {FragmentPage1.class,FragmentPage2.class}; private int mImageViewArray[] = {R.drawable.tab_item1,R.drawable.tab_item2}; private String mTextviewArray[] = {"课程表", "增加课程"}; private String mCount[] = {"","","",""}; private int mShowCount[] = {View.GONE,View.GONE}; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main_tab_layout); this.app = (MainApplication) this.getApplication(); if (this.app.getTag()==1){ fragmentArray[1] = FragmentPage3.class; } this.context = this; initView(); } @Override protected void onResume(){ super.onResume(); //this.setCount(); } private void initView(){ layoutInflater = LayoutInflater.from(this); mTabHost = (FragmentTabHost)findViewById(android.R.id.tabhost); mTabHost.setup(this, getSupportFragmentManager(), R.id.realtabcontent); int count = fragmentArray.length; for(int i=0;i<count;i++){ TabSpec tabSpec = mTabHost.newTabSpec(mTextviewArray[i]).setIndicator(getTabItemView(i)); mTabHost.addTab(tabSpec, fragmentArray[i], null); mTabHost.getTabWidget().getChildAt(i).setBackgroundResource(R.drawable.selector_tab_background); } } private View getTabItemView(int index){ View view = layoutInflater.inflate(R.layout.tab_item_view, null); ImageView imageView = (ImageView) view.findViewById(R.id.imageview); imageView.setImageResource(mImageViewArray[index]); TextView textView = (TextView) view.findViewById(R.id.textview); textView.setText(mTextviewArray[index]); TextView count = (TextView)view.findViewById(R.id.count); count.setVisibility(mShowCount[index]); count.setText(mCount[index]); return view; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); } @Override public boolean dispatchTouchEvent(MotionEvent ev) { if (ev.getAction() == MotionEvent.ACTION_DOWN) { // 获得当前得到焦点的View,一般情况下就是EditText(特殊情况就是轨迹求或者实体案件会移动焦点) View v = getCurrentFocus(); if (isShouldHideInput(v, ev)) { hideSoftInput(v.getWindowToken()); } } return super.dispatchTouchEvent(ev); } /** * 根据EditText所在坐标和用户点击的坐标相对比,来判断是否隐藏键盘,因为当用户点击EditText时没必要隐藏 * * @param v * @param event * @return */ private boolean isShouldHideInput(View v, MotionEvent event) { if (v != null && (v instanceof EditText)) { int[] l = { 0, 0 }; v.getLocationInWindow(l); int left = l[0], top = l[1], bottom = top + v.getHeight(), right = left + v.getWidth(); if (event.getX() > left && event.getX() < right && event.getY() > top && event.getY() < bottom) { // 点击EditText的事件,忽略它。 return false; } else { return true; } } // 如果焦点不是EditText则忽略,这个发生在视图刚绘制完,第一个焦点不在EditView上,和用户用轨迹球选择其他的焦点 return false; } /** * 多种隐藏软件盘方法的其中一种 * * @param token */ private void hideSoftInput(IBinder token) { if (token != null) { InputMethodManager im = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); im.hideSoftInputFromWindow(token, InputMethodManager.HIDE_NOT_ALWAYS); } } } <file_sep>/Android1219/src/com/example/android1219/httpclient/TestHttpClientActivity.java package com.example.android1219.httpclient; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import com.example.android1219.R; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.Toast; public class TestHttpClientActivity extends Activity { private Context context; private Button test_get; @Override protected void onCreate(Bundle b) { super.onCreate(b); context = this; this.setContentView(R.layout.test_httpclient_activity); this.test_get = (Button) this.findViewById(R.id.test_get); this.test_get.setOnClickListener(Test_Get); } private OnClickListener Test_Get = new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub String content = getContentFromURL("http://www.baidu.com"); // String content = "testTimeout("http://www.baidu.com"); Toast.makeText(context, content, Toast.LENGTH_SHORT).show(); } }; public void post(View v) { String ret = this.getData(); Toast.makeText(context, ret, Toast.LENGTH_SHORT).show(); } private String getContentFromURL(String url) { HttpClient httpClient = new DefaultHttpClient(); HttpGet httpGet = new HttpGet(url); HttpResponse response; InputStream in; try { response = httpClient.execute(httpGet); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { HttpEntity entity = response.getEntity(); in = entity.getContent(); BufferedReader reader = new BufferedReader( new InputStreamReader(in)); StringBuilder sb = new StringBuilder(); String line = null; while ((line = reader.readLine()) != null) { sb.append(line + "\n"); } return sb.toString(); } } catch (ClientProtocolException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return "-1"; } private ProgressDialog pd; private Handler handler = new Handler(){ @Override public void handleMessage(Message mes){ Toast.makeText(context, mes.getData().getString("mes"), Toast.LENGTH_LONG).show(); pd.dismiss(); } }; public void AnscPost(View v) { pd = ProgressDialog.show(context, "数据加载中", "请稍后。。。。。。"); new Thread() { @Override public void run() { String ret = getData(); Bundle b = new Bundle(); b.putString("mes", ret); Message mes = new Message(); mes.setData(b); handler.sendMessage(mes); } }.start(); }; private String getData() { // 创建请求对象 HttpPost post; // 创建客户端对象 HttpClient client; // 创建发送请求的对象 HttpResponse response; // 创建接收返回数据的对象 HttpEntity entity; // 创建流对象 InputStream is; UrlEncodedFormEntity urlEntity; { post = new HttpPost("http://192.168.1.101:8000/SSHProject/getCostomizeHqlData.action"); client = new DefaultHttpClient(); // 参数设置 List<BasicNameValuePair> pairs = new ArrayList<BasicNameValuePair>(); pairs.add(new BasicNameValuePair("name", "llllllllll")); try { // 用UrlEncodedFormEntity来封装List对象 urlEntity = new UrlEncodedFormEntity(pairs); // 设置使用的Entity post.setEntity(urlEntity); try { // 客户端开始向指定的网址发送请求 response = client.execute(post); // 获得请求的Entity if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { entity = response.getEntity(); is = entity.getContent(); // 下面是读取数据的过程 BufferedReader br = new BufferedReader( new InputStreamReader(is)); String line = null; StringBuffer sb = new StringBuffer(); while ((line = br.readLine()) != null) { sb.append(line); } System.out.println(sb.toString()); //Toast.makeText(context, sb.toString(), // Toast.LENGTH_SHORT).show(); return sb.toString(); } } catch (ClientProtocolException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return "error"; } } <file_sep>/DataServer/src/com/project/service/impl/CollectCourseServiceImpl.java package com.project.service.impl; import java.util.List; import com.project.dao.CollectCourseDAO; import com.project.po.CollectCourse; import com.project.service.CollectCourseService; public class CollectCourseServiceImpl implements CollectCourseService { private CollectCourseDAO collectCourseDAO; public void setCollectCourseDAO(CollectCourseDAO collectCourseDAO) { this.collectCourseDAO = collectCourseDAO; } @Override public void saveCollectCourse(CollectCourse arg0) { // TODO Auto-generated method stub collectCourseDAO.saveCollectCourse(arg0); } @Override public void deleteCollectCourse(CollectCourse arg0) { // TODO Auto-generated method stub collectCourseDAO.deleteCollectCourse(arg0); } @Override public void updateCollectCourse(CollectCourse arg0) { // TODO Auto-generated method stub collectCourseDAO.updateCollectCourse(arg0); } @Override public CollectCourse getCollectCourseById(Integer arg0) { // TODO Auto-generated method stub return collectCourseDAO.getCollectCourseById(arg0); } @Override public List<CollectCourse> getCollectCoursesByHql(String hql) { // TODO Auto-generated method stub return collectCourseDAO.getCollectCoursesByHql(hql); } @SuppressWarnings("rawtypes") @Override public List getList(String hql) { // TODO Auto-generated method stub return this.collectCourseDAO.getList(hql); } } <file_sep>/DataClient/src/com/dataclient/fragment/FragmentPage2.java package com.dataclient.fragment; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.adapter.SimpleSpinnerAdapter; import com.constant.Constant; import com.dataclient.MainApplication; import com.example.dataclient.R; import com.google.gson.Gson; import com.stracture.Course; import com.stracture.SpinnerItem; import android.app.ProgressDialog; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.Button; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; public class FragmentPage2 extends Fragment { private Context context; private MainApplication app; // private SimpleSpinnerAdapter spinnerApdapter; private Button button_collect_submit; private Spinner spinner_collect_course; private TextView textview_collect_week, textview_collect_lesson, textview_collect_teacher; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.collect, null); this.context = this.getActivity(); this.app = (MainApplication) this.getActivity().getApplication(); this.spinner_collect_course = (Spinner) v .findViewById(R.id.spinner_collect_course); this.spinner_collect_course.setOnItemSelectedListener(item_listener); this.textview_collect_week = (TextView) v .findViewById(R.id.textview_collect_week); this.textview_collect_lesson = (TextView) v .findViewById(R.id.textview_collect_lesson); this.textview_collect_teacher = (TextView) v .findViewById(R.id.textview_collect_teacher); this.button_collect_submit = (Button) v .findViewById(R.id.button_collect_submit); this.button_collect_submit.setOnClickListener(listener); this.initData(); return v; } private void initData() { pd = ProgressDialog.show(context, "初始化数据", "请稍后。。。。。。"); new Thread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub String localhost = Constant.addr + ":" + Constant.port; // spandTimeMethod();// 耗时的方法 String url = "http://" + localhost + "/DataServer/listCourses.action"; String ret = Constant.getData(url, null); Message mes = Message.obtain(); Bundle b = new Bundle(); b.putInt("identify", 1); b.putString("data", ret); mes.setData(b); handler.sendMessage(mes); } }).start(); } private Course[] list; private ProgressDialog pd; private Handler handler = new Handler() { @Override public void handleMessage(Message mes) { if (mes.getData().getInt("identify") == 1) { String ret = mes.getData().getString("data"); list = new Gson().fromJson(ret, Course[].class); if (list.length != 0) { List<SpinnerItem> items = new ArrayList<SpinnerItem>(); for (int i = 0; i < list.length; i++) { SpinnerItem item = new SpinnerItem(); item.key = String.valueOf(list[i].getId()); item.value = list[i].getContent(); items.add(item); } SimpleSpinnerAdapter adapter = new SimpleSpinnerAdapter( context, R.layout.spinner_item, items); spinner_collect_course.setAdapter(adapter); setView(list[0]); } } else if (mes.getData().getInt("identify") == 2) { if(mes.getData().getString("data")!=null){ Toast.makeText(context, "提交成功!", Toast.LENGTH_LONG).show(); } else Toast.makeText(context, "提交失败!", Toast.LENGTH_LONG).show(); } pd.dismiss(); } }; private void setView(Course course){ switch (Integer.valueOf(course.getWeek())) { case 1: textview_collect_week.setText("星期一"); break; case 2: textview_collect_week.setText("星期二"); break; case 3: textview_collect_week.setText("星期三"); break; case 4: textview_collect_week.setText("星期四"); break; case 5: textview_collect_week.setText("星期五"); break; } switch (Integer.valueOf(course.getLesson())) { case 1: textview_collect_lesson.setText("第一节课"); break; case 2: textview_collect_lesson.setText("第二节课"); break; case 3: textview_collect_lesson.setText("第三节课"); break; case 4: textview_collect_lesson.setText("第四节课"); break; case 5: textview_collect_lesson.setText("第五节课"); break; case 6: textview_collect_lesson.setText("第六节课"); break; case 7: textview_collect_lesson.setText("第七节课"); break; case 8: textview_collect_lesson.setText("第八节课"); break; } textview_collect_teacher.setText(course.getTeacher() .getName()); } private OnItemSelectedListener item_listener = new OnItemSelectedListener(){ @Override public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub setView(list[arg2]); } @Override public void onNothingSelected(AdapterView<?> arg0) { // TODO Auto-generated method stub } }; private OnClickListener listener = new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub pd = ProgressDialog.show(context, "正在提交", "请稍后。。。。。。"); new Thread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub String localhost = Constant.addr + ":" + Constant.port; // spandTimeMethod();// 耗时的方法 String url = "http://" + localhost + "/DataServer/addCollectCourse.action"; Map<String, String> pars = new HashMap<String, String>(); pars.put("collectCourse.student.id", app.getUserId()); pars.put("collectCourse.course.id", ((SpinnerItem) spinner_collect_course .getSelectedItem()).key); String ret = Constant.getData(url, pars); Message mes = Message.obtain(); Bundle b = new Bundle(); b.putInt("identify", 2); b.putString("data", ret); mes.setData(b); handler.sendMessage(mes); } }).start(); } }; }<file_sep>/PMIS/src/com/jiyuan/pmis/structure/SpinnerItem.java package com.jiyuan.pmis.structure; import android.graphics.Color; public class SpinnerItem { public String key; public String value; public String zt; public int color = Color.BLACK; } <file_sep>/JYPMIS0310/JYPMIS/src/com/jypmis/po/Bbg.java package com.jypmis.po; import java.sql.Date; public class Bbg { // Fields private String bgid; private String bglx; private String xmid; private String xmjc;// 新增的属性,数据库中无 private String gzrq; private Float gzxs; private String gzdd; private String gznr; private String bgr; private String bgsj; private String zt; private String shr; private String shsj; private String shxx; // Constructors /** default constructor */ public Bbg() { } /** minimal constructor */ public Bbg(String bgid) { this.bgid = bgid; } /** full constructor */ public Bbg(String bgid, String bglx, String xmid, String gzrq, Float gzxs, String gzdd, String gznr, String bgr, String bgsj, String zt, String shr, String shsj, String shxx) { this.bgid = bgid; this.bglx = bglx; this.xmid = xmid; this.gzrq = gzrq; this.gzxs = gzxs; this.gzdd = gzdd; this.gznr = gznr; this.bgr = bgr; this.bgsj = bgsj; this.zt = zt; this.shr = shr; this.shsj = shsj; this.shxx = shxx; } // Property accessors public String getBgid() { return this.bgid; } public void setBgid(String bgid) { this.bgid = bgid; } public String getBglx() { return this.bglx; } public void setBglx(String bglx) { this.bglx = bglx; } public String getXmid() { return this.xmid; } public void setXmid(String xmid) { this.xmid = xmid; } public String getGzrq() { return this.gzrq; } public void setGzrq(String gzrq) { this.gzrq = gzrq; } public Float getGzxs() { return this.gzxs; } public void setGzxs(Float gzxs) { this.gzxs = gzxs; } public String getGzdd() { return this.gzdd; } public void setGzdd(String gzdd) { this.gzdd = gzdd; } public String getGznr() { return this.gznr; } public void setGznr(String gznr) { this.gznr = gznr; } public String getBgr() { return this.bgr; } public void setBgr(String bgr) { this.bgr = bgr; } public String getBgsj() { return this.bgsj; } public void setBgsj(String bgsj) { this.bgsj = bgsj; } public String getZt() { return this.zt; } public void setZt(String zt) { this.zt = zt; } public String getShr() { return this.shr; } public void setShr(String shr) { this.shr = shr; } public String getShsj() { return this.shsj; } public void setShsj(String shsj) { this.shsj = shsj; } public String getShxx() { return this.shxx; } public void setShxx(String shxx) { this.shxx = shxx; } public String getXmjc() { return xmjc; } public void setXmjc(String xmjc) { this.xmjc = xmjc; } } <file_sep>/DataClient/src/com/dataclient/CourseActivity.java package com.dataclient; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.adapter.SimpleSpinnerAdapter; import com.constant.Constant; import com.example.dataclient.R; import com.google.gson.Gson; import com.stracture.ClassRoom; import com.stracture.Course; import com.stracture.SpinnerItem; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.inputmethod.InputMethodManager; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import android.widget.Toast; public class CourseActivity extends Activity { private Context context; private Spinner spinner_course_classRoon,spinner_course_week,spinner_course_lesson; private EditText editText_course_content; private Button button_course_submit; private MainApplication app; private Course sourse; @Override public void onCreate(Bundle b) { super.onCreate(b); this.setContentView(R.layout.course); this.spinner_course_classRoon = (Spinner)this.findViewById(R.id.spinner_course_classRoom); this.spinner_course_lesson = (Spinner)this.findViewById(R.id.spinner_course_lesson); this.spinner_course_week = (Spinner)this.findViewById(R.id.spinner_course_week); this.editText_course_content = (EditText)this.findViewById(R.id.edittext_course_content); this.button_course_submit = (Button)this.findViewById(R.id.button_course_submit); this.button_course_submit.setOnClickListener(listener); this.context = this; this.app = (MainApplication)this.getApplication(); sourse = new Course(); sourse.setId(Integer.valueOf(this.getIntent().getStringExtra("id"))); this.initData(); } private void initData(){ pd = ProgressDialog.show(context, "获取数据中", "请稍后。。。。。。"); new Thread(new Runnable(){ @Override public void run() { // TODO Auto-generated method stub String localhost = Constant.addr+":"+Constant.port; // spandTimeMethod();// 耗时的方法 String url = "http://"+localhost+"/DataServer/showCourse.action"; Map<String,String> pars = new HashMap<String,String>(); pars.put("course.id", String.valueOf(sourse.getId())); String ret = Constant.getData(url, pars); Message mes = Message.obtain(); Bundle b = new Bundle(); b.putString("data", ret); b.putInt("identify", 1); mes.setData(b); handler.sendMessage(mes); } }).start(); ClassRoom[] types = app.getClassRooms(); List<SpinnerItem> values = new ArrayList<SpinnerItem>(); for (int i = 0; i < types.length; i++) { SpinnerItem item = new SpinnerItem(); item.key = String.valueOf(types[i].getId()); item.value = types[i].getName(); values.add(item); } SimpleSpinnerAdapter adapter = new SimpleSpinnerAdapter(context, R.layout.spinner_item, values); // adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spinner_course_classRoon.setAdapter(adapter); List<SpinnerItem> weeks = new ArrayList<SpinnerItem>(); SpinnerItem item1 = new SpinnerItem(); item1.key = "1"; item1.value = "星期一"; weeks.add(item1); SpinnerItem item2 = new SpinnerItem(); item2.key = "2"; item2.value = "星期二"; weeks.add(item2); SpinnerItem item3 = new SpinnerItem(); item3.key = "3"; item3.value = "星期三"; weeks.add(item3); SpinnerItem item4 = new SpinnerItem(); item4.key = "4"; item4.value = "星期四"; weeks.add(item4); SpinnerItem item5 = new SpinnerItem(); item5.key = "5"; item5.value = "星期五"; weeks.add(item5); SimpleSpinnerAdapter adapter2 = new SimpleSpinnerAdapter(context, R.layout.spinner_item, weeks); // adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spinner_course_week.setAdapter(adapter2); List<SpinnerItem> lessons = new ArrayList<SpinnerItem>(); SpinnerItem lesson1 = new SpinnerItem(); lesson1.key = "1"; lesson1.value = "第一节课"; lessons.add(lesson1); SpinnerItem lesson2 = new SpinnerItem(); lesson2.key = "2"; lesson2.value = "第二节课"; lessons.add(lesson2); SpinnerItem lesson3 = new SpinnerItem(); lesson3.key = "3"; lesson3.value = "第三节课"; lessons.add(lesson3); SpinnerItem lesson4 = new SpinnerItem(); lesson4.key = "4"; lesson4.value = "第四节课"; lessons.add(lesson4); SpinnerItem lesson5 = new SpinnerItem(); lesson5.key = "5"; lesson5.value = "第五节课"; lessons.add(lesson5); SpinnerItem lesson6 = new SpinnerItem(); lesson6.key = "6"; lesson6.value = "第六节课"; lessons.add(lesson6); SpinnerItem lesson7 = new SpinnerItem(); lesson7.key = "7"; lesson7.value = "第七节课"; lessons.add(lesson7); SpinnerItem lesson8 = new SpinnerItem(); lesson8.key = "8"; lesson8.value = "第八节课"; lessons.add(lesson8); SimpleSpinnerAdapter adapter3 = new SimpleSpinnerAdapter(context, R.layout.spinner_item, lessons); // adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spinner_course_lesson.setAdapter(adapter3); } private OnClickListener listener = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub final String classRoomid = ((SpinnerItem)spinner_course_classRoon.getSelectedItem()).key; final String week = ((SpinnerItem)spinner_course_week.getSelectedItem()).key; final String lesson = ((SpinnerItem)spinner_course_lesson.getSelectedItem()).key; final String content = editText_course_content.getText().toString(); if (content.length()==0) { Toast.makeText(context, "内容不能为空!", Toast.LENGTH_LONG).show(); return; } //Toast.makeText(context, classRoomid+"..."+week+"..."+lesson+"..."+content, Toast.LENGTH_LONG).show(); pd = ProgressDialog.show(context, "提交中", "请稍后。。。。。。"); new Thread(new Runnable(){ @Override public void run() { // TODO Auto-generated method stub String localhost = Constant.addr+":"+Constant.port; // spandTimeMethod();// 耗时的方法 String url = "http://"+localhost+"/DataServer/updateCourse.action"; Map<String,String> pars = new HashMap<String,String>(); pars.put("course.classRoom.id", classRoomid); pars.put("course.teacher.id", app.getUserId()); pars.put("course.week", week); pars.put("course.lesson", lesson); pars.put("course.content", content); pars.put("course.id", String.valueOf(sourse.getId())); String ret = Constant.getData(url, pars); Message mes = Message.obtain(); Bundle b = new Bundle(); b.putString("data", ret); mes.setData(b); handler.sendMessage(mes); } }).start(); } }; private ProgressDialog pd; private Handler handler = new Handler(){ @Override public void handleMessage(Message mes){ if (mes.getData().getInt("identify")==1){ String ret = mes.getData().getString("data"); Course course = new Gson().fromJson(ret, Course.class); //ClassRoom[] types = app.getClassRooms(); int count = spinner_course_classRoon.getAdapter().getCount(); for(int i=0;i<count;i++){ if(course.getClassRoom().getId()==Integer.valueOf(((SpinnerItem)spinner_course_classRoon.getAdapter().getItem(i)).key)){ spinner_course_classRoon.setSelection(i); } } spinner_course_week.setSelection(Integer.valueOf(course.getWeek())-1); spinner_course_lesson.setSelection(Integer.valueOf(course.getLesson())-1); editText_course_content.setText(course.getContent()); pd.dismiss(); return; } if (mes.getData().getString("data")!=null&&mes.getData().getString("data").equals("1")){ Toast.makeText(context, "提交成功!", Toast.LENGTH_LONG).show(); finish(); } else Toast.makeText(context, "提交失败!", Toast.LENGTH_LONG).show(); pd.dismiss(); } }; /** * 监听touch事件,关闭软键盘 */ @Override public boolean dispatchTouchEvent(MotionEvent ev) { if (ev.getAction() == MotionEvent.ACTION_DOWN) { // 获得当前得到焦点的View,一般情况下就是EditText(特殊情况就是轨迹求或者实体案件会移动焦点) View v = getCurrentFocus(); if (isShouldHideInput(v, ev)) { hideSoftInput(v.getWindowToken()); } } return super.dispatchTouchEvent(ev); } /** * 根据EditText所在坐标和用户点击的坐标相对比,来判断是否隐藏键盘,因为当用户点击EditText时没必要隐藏 * * @param v * @param event * @return */ private boolean isShouldHideInput(View v, MotionEvent event) { if (v != null && (v instanceof EditText)) { int[] l = { 0, 0 }; v.getLocationInWindow(l); int left = l[0], top = l[1], bottom = top + v.getHeight(), right = left + v.getWidth(); if (event.getX() > left && event.getX() < right && event.getY() > top && event.getY() < bottom) { // 点击EditText的事件,忽略它。 return false; } else { return true; } } // 如果焦点不是EditText则忽略,这个发生在视图刚绘制完,第一个焦点不在EditView上,和用户用轨迹球选择其他的焦点 return false; } /** * 多种隐藏软件盘方法的其中一种 * * @param token */ private void hideSoftInput(IBinder token) { if (token != null) { InputMethodManager im = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); im.hideSoftInputFromWindow(token, InputMethodManager.HIDE_NOT_ALWAYS); } } } <file_sep>/20140304/PMIS/src/com/jiyuan/pmis/project/SelectProjectsActivity.java package com.jiyuan.pmis.project; import com.jiyuan.pmis.R; import android.content.Context; import android.os.Bundle; import android.os.IBinder; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.view.MotionEvent; import android.view.View; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.RadioButton; import android.widget.RadioGroup; public class SelectProjectsActivity extends FragmentActivity{ private RadioGroup radioGroup; private Fragment selectProjectsDepartmentFragment; private Fragment selectProjectsRecentFragment; private Fragment selectProjectsSearchFragment; @Override protected void onCreate(Bundle b){ super.onCreate(b); this.setContentView(R.layout.activity_select_projects); this.selectProjectsDepartmentFragment = new SelectProjectsDepartmentFragment(); //this.selectProjectsDepartmentFragment.setRetainInstance(false); this.selectProjectsRecentFragment = new SelectProjectsRecentFragment(); //this.selectProjectsRecentFragment.setRetainInstance(false); this.selectProjectsSearchFragment = new SelectProjectsSearchFragment(); //this.selectProjectsSearchFragment.setRetainInstance(false); radioGroup = (RadioGroup)this.findViewById(R.id.radiogroup_select_projects); radioGroup.setOnCheckedChangeListener(listener); RadioButton rb = (RadioButton)this.findViewById(R.id.radiobutton_recent_projects); rb.setChecked(true); } public void back(View v){ //Intent it = new Intent(); //this.setResult(Activity.RESULT_OK,it); this.finish(); } private RadioGroup.OnCheckedChangeListener listener = new RadioGroup.OnCheckedChangeListener(){ @Override public void onCheckedChanged(RadioGroup group, int checkedId) { // TODO Auto-generated method stub Fragment fr; if (checkedId==R.id.radiobutton_department_projects){ fr = selectProjectsDepartmentFragment; } else if (checkedId==R.id.radiobutton_recent_projects){ fr = selectProjectsRecentFragment; } else fr = selectProjectsSearchFragment; selectFrag(fr); } }; public void selectFrag(Fragment fr) { FragmentManager fm = getSupportFragmentManager(); FragmentTransaction fragmentTransaction = fm.beginTransaction(); fragmentTransaction.replace(R.id.fragment_place, fr); fragmentTransaction.commit(); } @Override public boolean dispatchTouchEvent(MotionEvent ev) { if (ev.getAction() == MotionEvent.ACTION_DOWN) { // 获得当前得到焦点的View,一般情况下就是EditText(特殊情况就是轨迹求或者实体案件会移动焦点) View v = getCurrentFocus(); if (isShouldHideInput(v, ev)) { hideSoftInput(v.getWindowToken()); } } return super.dispatchTouchEvent(ev); } /** * 根据EditText所在坐标和用户点击的坐标相对比,来判断是否隐藏键盘,因为当用户点击EditText时没必要隐藏 * * @param v * @param event * @return */ private boolean isShouldHideInput(View v, MotionEvent event) { if (v != null && (v instanceof EditText)) { int[] l = { 0, 0 }; v.getLocationInWindow(l); int left = l[0], top = l[1], bottom = top + v.getHeight(), right = left + v.getWidth(); if (event.getX() > left && event.getX() < right && event.getY() > top && event.getY() < bottom) { // 点击EditText的事件,忽略它。 return false; } else { return true; } } // 如果焦点不是EditText则忽略,这个发生在视图刚绘制完,第一个焦点不在EditView上,和用户用轨迹球选择其他的焦点 return false; } /** * 多种隐藏软件盘方法的其中一种 * * @param token */ private void hideSoftInput(IBinder token) { if (token != null) { InputMethodManager im = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); im.hideSoftInputFromWindow(token, InputMethodManager.HIDE_NOT_ALWAYS); } } } <file_sep>/JYPMIS0310/JYPMIS/src/com/jypmis/func/login/LoginImpl.java package com.jypmis.func.login; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import com.google.gson.Gson; import com.jypmis.dbconn.OracleDBCONN; import com.jypmis.exception.MyRuntimeException; import com.jypmis.po.Sxtyh; import com.jypmis.vo.SxtyhVO; import com.jypmis.ztools.MD5Util; //Generated by MyEclipse public class LoginImpl implements ILogin { private static final String QUERYUSERBASICINFO = "select yhid,bmid from sxtyh where yhxm=? and dlkl=?"; private static final String QUERYUSERNAMEBYUSERID = "select yhxm from sxtyh where yhid=?"; private static final String QUERYSHRBYDEPID="select bmzr from sbm a,sxtyh b where a.bmid=b.bmid and yhid=?"; private static final String QUERYPROJECTMANAGERBYXMID="select xmjl from bxmjbxx where xmid=?"; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; public String login(String username, String password) { // TODO Auto-generated method stub //System.out.println("客户端传来: " + username + "," + password); List<Sxtyh> listsxtyh = new ArrayList<Sxtyh>(); listsxtyh = findUserBasicInfo(username, password); Sxtyh sxtyh = new Sxtyh(); if (listsxtyh == null || listsxtyh.size() == 0) { //System.out.println("登陆失败"); return "-1"; } else { Iterator<Sxtyh> itsxtyh = listsxtyh.iterator(); while (itsxtyh.hasNext()) { sxtyh = itsxtyh.next(); // System.out.println("用户ID:" + sxtyh.getYhid() + "\t部门ID:" // + sxtyh.getBmid()+new Gson().toJson(sxtyh)); } //System.out.println("登录成功"); SxtyhVO sxtyhvo = new SxtyhVO(); sxtyhvo.yhid = sxtyh.getYhid(); sxtyhvo.bmid = sxtyh.getBmid(); sxtyhvo.shqx = sxtyh.getShqx(); Gson gson = new Gson(); String jsonString = gson.toJson(sxtyhvo); //System.out.println(jsonString); return jsonString; } } /** * 查询用户基本信息,根据username和password * @param username * @param password * @return */ public List<Sxtyh> findUserBasicInfo(String username, String password) { // TODO Auto-generated method stub List<Sxtyh> listsxtyh = new ArrayList<Sxtyh>(); conn = OracleDBCONN.getInstance().getOracleDBCONN(); String dlkl = MD5Util.MD5(password);// 使用MD5对密码加密后,在数据库中查询 Sxtyh sxtyh = new Sxtyh(); try { ps = conn.prepareStatement(QUERYUSERBASICINFO); ps.setString(1, username); ps.setString(2, dlkl); rs = ps.executeQuery(); while (rs.next()) { sxtyh.setYhid(rs.getString("yhid")); sxtyh.setBmid(rs.getString("bmid")); sxtyh.setShqx("1"); listsxtyh.add(sxtyh); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询用户信息失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return listsxtyh; } /** * 根据用户id查询用户名 * @param userid * @return */ public String findUsernameByUserid(String userid) { // TODO Auto-generated method stub String username=null; conn = OracleDBCONN.getInstance().getOracleDBCONN(); try { ps = conn.prepareStatement(QUERYUSERNAMEBYUSERID); ps.setString(1, userid); rs = ps.executeQuery(); while(rs.next()) { username=rs.getString("yhxm").toString(); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询姓名失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return username; } /** * 查询部门主任作为报工时的审核人 * @param userid * @return */ public String findDepManagerbyUserid(String userid) { // TODO Auto-generated method stub //System.out.println("查询部门主任"); String bmzr=null; conn = OracleDBCONN.getInstance().getOracleDBCONN(); try { ps = conn.prepareStatement(QUERYSHRBYDEPID); ps.setString(1,userid); rs = ps.executeQuery(); while(rs.next()) { bmzr=rs.getString("bmzr").toString(); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询部门主任失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return bmzr; } /** * 根据项目id查询项目经理 * @param xmid * @return */ public String findProjectManagerByXMID(String xmid) { // TODO Auto-generated method stub String xmjl=null; conn = OracleDBCONN.getInstance().getOracleDBCONN(); try { ps = conn.prepareStatement(QUERYPROJECTMANAGERBYXMID); ps.setString(1, xmid); rs = ps.executeQuery(); while(rs.next()) { xmjl=rs.getString("xmjl").toString(); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new MyRuntimeException("查询姓名失败"); } finally { OracleDBCONN.getInstance().close(conn, ps, rs);// 关闭资源 } return xmjl; } //函数测试 public static void main(String []args) { LoginImpl log=new LoginImpl(); System.out.println(log.findUsernameByUserid("2")); System.out.println(log.findDepManagerbyUserid("2")); System.out.println(log.findProjectManagerByXMID("20140217155529928002")); } }<file_sep>/DataServer/src/com/project/service/PhoneService.java package com.project.service; import java.util.List; import com.project.po.Phone; public interface PhoneService { public void savePhone(Phone arg0); public void deletePhone(Phone arg0); public void updatePhone(Phone arg0); public Phone getPhoneById(Integer arg0); public List<Phone> getPhonesByHql(String hql); @SuppressWarnings("rawtypes") public List getList(String hql); } <file_sep>/PMIS/src/com/jiyuan/pmis/fragment/FragmentPage3.java package com.jiyuan.pmis.fragment; import java.util.ArrayList; import java.util.List; import org.ksoap2.serialization.PropertyInfo; import com.google.gson.Gson; import com.jiyuan.pmis.MainApplication; import com.jiyuan.pmis.R; import com.jiyuan.pmis.adapter.SimpleBaseExpandableListAdapter; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.exception.PmisException; import com.jiyuan.pmis.reports.ReviewReportDetailsActivity; import com.jiyuan.pmis.soap.Soap; import com.jiyuan.pmis.structure.ExpandListItem; import com.jiyuan.pmis.structure.Item; import com.jiyuan.pmis.structure.Project; import com.jiyuan.pmis.structure.Report; import com.jiyuan.pmis.structure.ReportSearchField; import com.jiyuan.pmis.structure.ReportSort; import com.jiyuan.pmis.structure.ReportType; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.ExpandableListView; import android.widget.Toast; public class FragmentPage3 extends Fragment implements OnClickListener{ private ExpandableListView review_reports_listView; //private Button button_review_reports_select_all,button_review_reports_pass,button_review_reports_refuse; private Context context; private Activity activity; private MainApplication app; private Project project; private boolean selectedAll = false; private SimpleBaseExpandableListAdapter expandableadapter; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_3, null); this.context = this.getActivity(); this.activity = this.getActivity(); this.app = (MainApplication) this.activity.getApplication(); this.review_reports_listView = (ExpandableListView) v .findViewById(R.id.review_reports_listView); this.review_reports_listView.setGroupIndicator(null); this.initData(v); return v; } public void search() { ReportSearchField r = this.getReportSearchField(); List<ExpandListItem> values = listReports(r); this.expandableadapter.setValues(values); this.expandableadapter.notifyDataSetChanged(); } public void pass(View v){ boolean hadChecked = false; //SeparatedListAdapter adapter = (SeparatedListAdapter) this.review_reports_listView.getAdapter(); int count = expandableadapter.getGroupCount(); for(int i=0;i<count;i++){ List<Item> items = expandableadapter.getGroup(i).items; for(int j=0;j<items.size();j++){ //Toast.makeText(this, i+"", Toast.LENGTH_SHORT).show(); Item item = items.get(j); if(item.isChecked){ hadChecked = true; try { Report report = this.showReport(item.key); report.shxx = "无"; report.zt = "1"; this.updateReport(app.getUser().yhid, report,item.firstLineText); //this.search(v); } catch (PmisException e) { // TODO Auto-generated catch block Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); } } } } if (!hadChecked) Toast.makeText(this.context, "请选择报工!", Toast.LENGTH_SHORT).show(); else this.search(); } public void refuse(View v){ boolean hadChecked = false; int count = expandableadapter.getGroupCount(); for(int i=0;i<count;i++){ List<Item> items = expandableadapter.getGroup(i).items; for(int j=0;j<items.size();j++){ //Toast.makeText(this, i+"", Toast.LENGTH_SHORT).show(); Item item = items.get(j); if(item.isChecked){ hadChecked = true; try { Report report = this.showReport(item.key); report.shxx = "无"; report.zt = "-1"; this.updateReport(app.getUser().yhid, report,item.firstLineText); //this.search(v); } catch (PmisException e) { // TODO Auto-generated catch block Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); } } } } if (!hadChecked) Toast.makeText(this.context, "请选择报工!", Toast.LENGTH_SHORT).show(); else this.search(); } private void selectAll(View v){ int count = expandableadapter.getGroupCount(); for(int i=0;i<count;i++){ List<Item> items = expandableadapter.getGroup(i).items; for(int j=0;j<items.size();j++){ //Toast.makeText(this, i+"", Toast.LENGTH_SHORT).show(); Item item = items.get(j); if(selectedAll){ item.isChecked = false; ((Button)v).setText("全选"); } else{ item.isChecked = true; ((Button)v).setText("取消全选"); } } } expandableadapter.notifyDataSetChanged(); selectedAll = !selectedAll; } private List<ExpandListItem> listReports(ReportSearchField r){ List<ReportSort> sorts = new ArrayList<ReportSort>(); List<ExpandListItem> values = new ArrayList<ExpandListItem>(); try { sorts = this.getReports(r); } catch (PmisException e) { Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); return values; } for (int i=0;i<sorts.size();i++){ ExpandListItem expandListItem = new ExpandListItem(); List<Report> reports = sorts.get(i).list; List<Item> items = new ArrayList<Item>(); expandListItem.title = sorts.get(i).title; expandListItem.count = sorts.get(i).count; for(int j=0;j<reports.size();j++){ Item item = new Item(); item.key = reports.get(j).bgid; item.firstLineText = reports.get(j).gzrq+" "+reports.get(j).bgr+" "+reports.get(j).gzxs+"小时"; item.secondLineText = reports.get(j).gznr; item.showCheckbox = true; items.add(item); } expandListItem.items = items; values.add(expandListItem); } return values; } private List<ReportSort> getReports(ReportSearchField r) throws PmisException{ Report[] reports = new Report[]{}; final String METHOD_NAME = "getReports"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("reportSearchFieldStr"); arg0.setValue(new Gson().toJson(r)); args.add(arg0); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("获取报工列表失败!"); } try{ reports = new Gson().fromJson(ret, Report[].class); }catch(Exception e){ throw new PmisException("当前没有报工!"); } List<Report> listReports = new ArrayList<Report>(); for (int i=0;i<reports.length;i++){ listReports.add(reports[i]); } List<ReportSort> sorts = new ArrayList<ReportSort>(); while(listReports.size()>0){ List<Report> list = new ArrayList<Report>(); Report b = listReports.get(0); list.add(b); listReports.remove(0); int i = 0; while(listReports.size()!=i){ if (b.xmjc.equals((listReports).get(i).xmjc)){ list.add((listReports).get(i)); listReports.remove(i); i--; } i++; } ReportSort sort = new ReportSort(); sort.title = b.xmjc; sort.list = list; sort.count = list.size(); if (b.xmid.equals("-1")){ ReportType[] types = app.getReportTypes(); for (int j=0;j<types.length;j++){ ReportSort typeSort = new ReportSort(); typeSort.title = types[j].bgxmc; List<Report> typeList = new ArrayList<Report>(); for(int k=0;k<list.size();k++){ if (types[j].bgxid.equals(list.get(k).bgxid)){ typeList.add(list.get(k)); } } typeSort.list = typeList; typeSort.count = typeList.size(); } }else sorts.add(sort); } Log.e("pmis.....",new Gson().toJson(sorts)); return sorts; } private void initData(View v){ project = new Project(); project.xmid = "-1"; project.xmjc = "全部"; List<ExpandListItem> values = this.listReports(this.getReportSearchField()); expandableadapter = new SimpleBaseExpandableListAdapter(this.context,values); this.review_reports_listView.setAdapter(expandableadapter); //this.review_reports_listView.setOnItemClickListener(item_listener); this.review_reports_listView.setOnGroupCollapseListener(onGroupCollapseListener); this.review_reports_listView.setOnGroupExpandListener(onGroupExpandListener); this.review_reports_listView.setOnChildClickListener(onChildClickListener); //expandableadapter.notifyDataSetChanged(); } private ReportSearchField getReportSearchField(){ ReportSearchField r = new ReportSearchField(); r.xmid = project.xmid; r.xzdy = "1"; r.xzeq = "1"; r.xzxy = "1"; r.kssj = Constant.getBeforeCurrentDataString("yyyy-MM-dd", 2); r.jssj = Constant.getCurrentDataString("yyyy-MM-dd"); r.type = "1"; r.yhid = app.getUser().yhid; return r; } private void updateReport(String yhid,Report report,String firstLine) throws PmisException{ final String METHOD_NAME = "updateReport"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("yhid"); arg0.setValue(yhid); arg0.setType(String.class); PropertyInfo arg1 = new PropertyInfo(); arg1.setName("reportStr"); arg1.setValue(new Gson().toJson(report)); arg1.setType(String.class); args.add(arg0); args.add(arg1); PropertyInfo arg2 = new PropertyInfo(); arg2.setName("type"); arg2.setValue("1"); arg2.setType(String.class); args.add(arg2); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("更新"+firstLine+"失败!"); } if(ret.equals("1")){ //Toast.makeText(this, "更新"+firstLine+"成功!", Toast.LENGTH_SHORT).show(); }else throw new PmisException("更新"+firstLine+"失败!"); } /** * 调用soap * @param bgid * @return * @throws PmisException */ private Report showReport(String bgid) throws PmisException{ final String METHOD_NAME = "showReport"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("bgid"); arg0.setValue(bgid); arg0.setType(String.class); args.add(arg0); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); return new Gson().fromJson(ret, Report.class); } catch (Exception e) { // TODO Auto-generated catch block //e.printStackTrace(); throw new PmisException("获取报工失败!"); } } private ExpandableListView.OnGroupExpandListener onGroupExpandListener = new ExpandableListView.OnGroupExpandListener(){ @Override public void onGroupExpand(int groupPosition) { // TODO Auto-generated method stub expandableadapter.notifyDataSetChanged(); } }; private ExpandableListView.OnGroupCollapseListener onGroupCollapseListener = new ExpandableListView.OnGroupCollapseListener(){ @Override public void onGroupCollapse(int groupPosition) { // TODO Auto-generated method stub expandableadapter.notifyDataSetChanged(); } }; private ExpandableListView.OnChildClickListener onChildClickListener = new ExpandableListView.OnChildClickListener(){ @Override public boolean onChildClick(ExpandableListView parent, View v, int groupPosition, int childPosition, long id) { // TODO Auto-generated method stub Intent it = new Intent(context,ReviewReportDetailsActivity.class); it.putExtra("bgid", ((Item)expandableadapter.getChild(groupPosition, childPosition)).key); startActivity(it); return false; } }; @Override public void onClick(View v) { // TODO Auto-generated method stub if (v.getId()==R.id.button_review_reports_select_all){ this.selectAll(v); }else if(v.getId()==R.id.button_review_reports_pass){ this.pass(v); }else if(v.getId()==R.id.button_review_reports_refuse){ this.refuse(v); } } }<file_sep>/Android1219/src/com/example/android1219/Provider/Note.java package com.example.android1219.provider; import android.net.Uri; public class Note { public static final String table_name = "note"; public static final String NOTE_ID = "_id"; public static final String TITLE = "title"; public static final String TEXT = "text"; public static final Uri CONTENT_URI = Uri.parse("content://" + NotesContentProvider.AUTHORITY + "/"+table_name); public static final String CONTENT_TYPE = "vnd.android.cursor.dir/vnd.jwei512.notes"; }<file_sep>/20140304/版本控制.sql prompt PL/SQL Developer import file prompt Created on 2014Äê3ÔÂ19ÈÕ by Administrator set feedback off set define off prompt Loading BZDBB... insert into BZDBB (ZDBBID, PTLX, BBH, WJDX) values (1, 'iphone', '1.1.1.051021_beta', '0.556'); insert into BZDBB (ZDBBID, PTLX, BBH, WJDX) values (2, 'android', '1.1.1.051021_beta', '0.774'); commit; prompt 2 records loaded set feedback on set define on prompt Done. <file_sep>/20140304/JYPMIS/src/com/jypmis/ztools/BHGenerator.java package com.jypmis.ztools; import java.text.NumberFormat; import java.util.Calendar; import java.util.TimeZone; public class BHGenerator { /** * 产生普通编号时所使用的流水号,由getBh()函数调用 */ public static int serialNumber = 0; /** * 产生普通编号同步线程使用,由getBh()函数调用 */ private static final Object synObj = new Object(); /** * @FunName: getNumberFormat * @Description : 获取指定位数为bit的数字格式器 * @param bit : 指定的位数 * @return NumberFormat: 返回数字格式器; */ public static NumberFormat getNumberFormat(int bit) { NumberFormat formatter = NumberFormat.getNumberInstance(); formatter.setMinimumIntegerDigits(bit); formatter.setMaximumIntegerDigits(bit); formatter.setGroupingUsed(false); return formatter; } /** * @FunName: getCurrentTimeString * @Description : 获取当前时间字符串,精确到毫秒,其中年4位、月2位、日2位、时2位、分2 位、秒2位、毫秒3位 * @return String: 返回当前时间字符串; */ public static String getCurrentTimeString() { NumberFormat formatter2 = getNumberFormat(2); NumberFormat formatter3 = getNumberFormat(3); NumberFormat formatter4 = getNumberFormat(4); Calendar Cld = Calendar.getInstance(TimeZone.getTimeZone("Asia/Shanghai")); StringBuffer sb = new StringBuffer(); sb.append(formatter4.format(Cld.get(Calendar.YEAR))); sb.append(formatter2.format(Cld.get(Calendar.MONTH) + 1)); sb.append(formatter2.format(Cld.get(Calendar.DATE))); sb.append(formatter2.format(Cld.get(Calendar.HOUR_OF_DAY))); sb.append(formatter2.format(Cld.get(Calendar.MINUTE))); sb.append(formatter2.format(Cld.get(Calendar.SECOND))); sb.append(formatter3.format(Cld.get(Calendar.MILLISECOND))); return sb.toString(); } /** * @FunName: getBh * @Description : 产生除咨询、申请、受理等表(具体参照数据库说明)以外其它表的编号 * @return String: 返回当前自动生成的编号; */ public static String getBh() { long temp; synchronized (synObj) { temp = serialNumber++; if (serialNumber == 1000) {// 流水号从0-999循环 serialNumber = 0; } } return getCurrentTimeString() + getNumberFormat(3).format(temp); } @SuppressWarnings("static-access") public static void main(String []args) { BHGenerator getbh=new BHGenerator(); System.out.println(getbh.getBh()); // System.out.println(getbh.string2timestamp("2014-02-10 21:15:22 123")); // System.out.println(getbh.string2timestamp(System.currentTimeMillis())); } } <file_sep>/Android1219/src/com/example/android1219/sql/SqlActivity.java package com.example.android1219.sql; import java.util.List; import com.example.android1219.R; import android.app.Activity; import android.os.Bundle; import android.view.View; import android.widget.Toast; public class SqlActivity extends Activity{ private DBHelper dbhelper; @Override protected void onCreate(Bundle b){ super.onCreate(b); this.setContentView(R.layout.sql_activity); this.dbhelper = new DBHelper(this); } public void insert(View v){ Student student = new Student(); student.setName("zhangsan"); student.setScore("100"); this.dbhelper.insert(student); } public void show(View v){ Student student = this.dbhelper.show(1); if (student!=null){ Toast.makeText(this, "name:"+student.getName()+"--score:"+student.getScore(), Toast.LENGTH_SHORT).show(); }else Toast.makeText(this, "there is no recored!", Toast.LENGTH_SHORT).show(); } public void clear(View v){ this.dbhelper.clear(); } public void update(View v){ } public void delete(View v){ } public void getRecords(View v){ List<Object> list = this.dbhelper.getCustomizeObject(); String content = ""; for (int i=0;i<list.size();i++){ Student student = (Student)list.get(i); content += "name:"+student.getName()+"--"; } if(content.length()==0){ Toast.makeText(this, "there is no record!", Toast.LENGTH_SHORT).show(); }else{ Toast.makeText(this, content, Toast.LENGTH_SHORT).show(); } } } <file_sep>/PMIS2/src/com/jiyuan/pmis/adapter/SimpleBaseExpandableListAdapter.java package com.jiyuan.pmis.adapter; import java.util.List; import com.jiyuan.pmis.R; import com.jiyuan.pmis.structure.ExpandListItem; import com.jiyuan.pmis.structure.Item; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseExpandableListAdapter; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.ImageView; import android.widget.TextView; import android.widget.CompoundButton.OnCheckedChangeListener; public class SimpleBaseExpandableListAdapter extends BaseExpandableListAdapter { private class GroupViewHolder { public TextView textview_group_view_title; public ImageView imageview_group_view_icon; } private class ChildViewHolder { public TextView firstLine; public TextView secondLine; public ImageView icon; public TextView count; public CheckBox checkBox; } private List<ExpandListItem> list; private Context context; private LayoutInflater inflater; public SimpleBaseExpandableListAdapter(Context context,List<ExpandListItem> expandListItem){ this.context = context; this.list = expandListItem; inflater = (LayoutInflater)this.context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); } public List<ExpandListItem> getValues(){ return this.list; } public void setValues(List<ExpandListItem> values){ this.list = values; } @Override public Item getChild(int groupPosition, int childPosition) { // TODO Auto-generated method stub return list.get(groupPosition).items.get(childPosition); } @Override public long getChildId(int groupPosition, int childPosition) { // TODO Auto-generated method stub return childPosition; } @Override public View getChildView(int groupPosition, int childPosition, boolean isLastChild, View convertView, ViewGroup parent) { // TODO Auto-generated method stub ChildViewHolder holder; final Item item = list.get(groupPosition).items.get(childPosition); if (convertView == null){ convertView = inflater.inflate(R.layout.list_item, parent,false); holder = new ChildViewHolder(); holder.firstLine = (TextView)convertView.findViewById(R.id.firstLine); holder.secondLine = (TextView)convertView.findViewById(R.id.secondLine); holder.icon = (ImageView)convertView.findViewById(R.id.icon); holder.checkBox = (CheckBox)convertView.findViewById(R.id.checkbox); holder.count = (TextView)convertView.findViewById(R.id.count); convertView.setTag(holder); }else{ holder = (ChildViewHolder) convertView.getTag(); } holder.icon.setVisibility(View.GONE); holder.firstLine.setText(item.firstLineText); holder.secondLine.setText(item.secondLineText); if (item.showCheckbox) holder.checkBox.setVisibility(View.VISIBLE); else holder.checkBox.setVisibility(View.INVISIBLE); holder.checkBox.setChecked(item.isChecked); holder.count.setVisibility(View.GONE); holder.checkBox.setOnCheckedChangeListener(new OnCheckedChangeListener(){ @Override public void onCheckedChanged(CompoundButton arg0, boolean arg1) { // TODO Auto-generated method stub item.isChecked = arg1; } }); return convertView; } @Override public int getChildrenCount(int groupPosition) { // TODO Auto-generated method stub return list.get(groupPosition).items.size(); } @Override public ExpandListItem getGroup(int groupPosition) { // TODO Auto-generated method stub return list.get(groupPosition); } @Override public int getGroupCount() { // TODO Auto-generated method stub return list.size(); } @Override public long getGroupId(int groupPosition) { // TODO Auto-generated method stub return groupPosition; } @Override public View getGroupView(int groupPosition, boolean isExpanded, View convertView, ViewGroup parent) { // TODO Auto-generated method stub ExpandListItem item = list.get(groupPosition); //item.isExpanded = isExpanded; GroupViewHolder holder; if (convertView == null){ holder = new GroupViewHolder(); convertView = inflater.inflate(R.layout.group_view, parent,false); holder.textview_group_view_title = (TextView)convertView.findViewById(R.id.textview_group_view_title); holder.imageview_group_view_icon = (ImageView)convertView.findViewById(R.id.imageview_group_view_icon); convertView.setTag(holder); holder.textview_group_view_title.setText(item.title); if (isExpanded){ holder.imageview_group_view_icon.setImageResource(R.drawable.arrow_up); }else{ holder.imageview_group_view_icon.setImageResource(R.drawable.arrow_down); } } else { holder = (GroupViewHolder) convertView.getTag(); holder.textview_group_view_title.setText(item.title); if (isExpanded){ holder.imageview_group_view_icon.setImageResource(R.drawable.arrow_up); }else{ holder.imageview_group_view_icon.setImageResource(R.drawable.arrow_down); } } return convertView; } @Override public boolean hasStableIds() { // TODO Auto-generated method stub return true; } @Override public boolean isChildSelectable(int groupPosition, int childPosition) { // TODO Auto-generated method stub return true; } } <file_sep>/PMIS2/src/com/jiyuan/pmis/sqlite/UserInfo.java package com.jiyuan.pmis.sqlite; public class UserInfo { final static String key_id = "id"; final static String table_name = "userinfo"; final static String key_username = "username"; final static String key_password = "<PASSWORD>"; private int id; private String username; private String password; public UserInfo(){ } public UserInfo(int id,String username,String password){ this.id = id; this.username = username; this.password = password; } public UserInfo(String username,String password){ this.username = username; this.password = <PASSWORD>; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = <PASSWORD>; } } <file_sep>/20140304/PMIS/src/com/jiyuan/update/UpdateManager.java package com.jiyuan.update; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.ksoap2.serialization.PropertyInfo; import com.google.gson.Gson; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.soap.Soap; import com.jiyuan.pmis.structure.Version; import android.content.Context; import android.content.Intent; import android.content.pm.PackageInfo; import android.content.pm.PackageManager.NameNotFoundException; import android.net.Uri; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; public class UpdateManager { private String curVersion; private String newVersion; private int curVersionCode; private int newVersionCode; private String updateInfo; private UpdateCallback callback; private Context ctx; private int progress; private Boolean hasNewVersion; private Boolean canceled; public static final String UPDATE_DOWNURL = "http://192.168.3.11:8080/pmis/download/PMIS.apk"; //public static final String UPDATE_CHECKURL = "http://www.www.baidu.com/test_update/update_version.txt"; public static final String UPDATE_APKNAME = "pmis.apk"; //public static final String UPDATE_VERJSON = "ver.txt"; public static final String UPDATE_SAVENAME = "pmis.apk"; private static final int UPDATE_CHECKCOMPLETED = 1; private static final int UPDATE_DOWNLOADING = 2; private static final int UPDATE_DOWNLOAD_ERROR = 3; private static final int UPDATE_DOWNLOAD_COMPLETED = 4; private static final int UPDATE_DOWNLOAD_CANCELED = 5; private String savefolder = "/mnt/sdcard/pmis/"; public UpdateManager(Context context, UpdateCallback updateCallback) { ctx = context; callback = updateCallback; //savefolder = context.getFilesDir(); canceled = false; getCurVersion(); } public String getNewVersionName() { return newVersion; } public String getUpdateInfo() { return updateInfo; } private void getCurVersion() { try { PackageInfo pInfo = ctx.getPackageManager().getPackageInfo( ctx.getPackageName(), 0); curVersion = pInfo.versionName; curVersionCode = pInfo.versionCode; } catch (NameNotFoundException e) { Log.e("update", e.getMessage()); curVersion = "1.1.1000"; curVersionCode = 111000; } } public void checkUpdate() { hasNewVersion = false; new Thread(){ @Override public void run() { /*Log.i("@@@@@", ">>>>>>>>>>>>>>>>>>>>>>>>>>>getServerVerCode() "); try { String verjson = NetHelper.httpStringGet(UPDATE_CHECKURL); Log.i("@@@@", verjson + "**************************************************"); JSONArray array = new JSONArray(verjson); if (array.length() > 0) { JSONObject obj = array.getJSONObject(0); try { newVersionCode = Integer.parseInt(obj.getString("verCode")); newVersion = obj.getString("verName"); updateInfo = ""; Log.i("newVerCode", newVersionCode + "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); Log.i("newVerName", newVersion + "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); if (newVersionCode > curVersionCode) { hasNewVersion = true; } } catch (Exception e) { newVersionCode = -1; newVersion = ""; updateInfo = ""; } } } catch (Exception e) { Log.e("update", e.getMessage()); }*/ Looper.prepare(); Soap soap = new Soap(Constant.login_namespace, "getVersion"); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("pingtai"); arg0.setValue("android"); arg0.setType(String.class); args.add(arg0); soap.setPropertys(args); Version version = null; try { String ret = soap.getResponse(Constant.login_url, Constant.login_url + "/getVersion"); version = new Gson().fromJson(ret, Version.class); } catch (Exception e) { // TODO Auto-generated catch block newVersionCode = -1; newVersion = ""; updateInfo = ""; e.printStackTrace(); } if (version!=null){ if (version.zdbbid!=null&&version.bbh!=null&&version.wjdx!=null){ try{ newVersionCode = Integer.valueOf(version.zdbbid); } catch(Exception e){ newVersionCode = -1; } if (curVersionCode < newVersionCode){ newVersion = version.bbh; updateInfo = ""; hasNewVersion = true; versionInfo = new Version(); versionInfo.bbh = newVersion; versionInfo.wjdx = version.wjdx; } } } updateHandler.sendEmptyMessage(UPDATE_CHECKCOMPLETED); }; // *************************************************************** }.start(); } public void update() { Intent intent = new Intent(Intent.ACTION_VIEW); intent.setDataAndType( Uri.fromFile(new File(savefolder, UPDATE_SAVENAME)), "application/vnd.android.package-archive"); ctx.startActivity(intent); } // +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ public void downloadPackage() { new Thread() { @Override public void run() { try { URL url = new URL(UPDATE_DOWNURL); HttpURLConnection conn = (HttpURLConnection)url.openConnection(); conn.connect(); int length = conn.getContentLength(); InputStream is = conn.getInputStream(); File ApkFile = new File(savefolder,UPDATE_SAVENAME); File folder = new File(savefolder); if (!folder.exists()) folder.mkdirs(); if(ApkFile.exists()) { ApkFile.delete(); } FileOutputStream fos = new FileOutputStream(ApkFile); int count = 0; byte buf[] = new byte[512]; do{ int numread = is.read(buf); count += numread; progress =(int)(((float)count / length) * 100); updateHandler.sendMessage(updateHandler.obtainMessage(UPDATE_DOWNLOADING)); if(numread <= 0){ updateHandler.sendEmptyMessage(UPDATE_DOWNLOAD_COMPLETED); break; } fos.write(buf,0,numread); }while(!canceled); if(canceled) { updateHandler.sendEmptyMessage(UPDATE_DOWNLOAD_CANCELED); } fos.close(); is.close(); } catch (MalformedURLException e) { e.printStackTrace(); updateHandler.sendMessage(updateHandler.obtainMessage(UPDATE_DOWNLOAD_ERROR,e.getMessage())); } catch(IOException e){ e.printStackTrace(); updateHandler.sendMessage(updateHandler.obtainMessage(UPDATE_DOWNLOAD_ERROR,e.getMessage())); } } }.start(); } public void cancelDownload() { canceled = true; } Handler updateHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case UPDATE_CHECKCOMPLETED: callback.checkUpdateCompleted(hasNewVersion, versionInfo); break; case UPDATE_DOWNLOADING: callback.downloadProgressChanged(progress); break; case UPDATE_DOWNLOAD_ERROR: callback.downloadCompleted(false, msg.obj.toString()); break; case UPDATE_DOWNLOAD_COMPLETED: callback.downloadCompleted(true, ""); break; case UPDATE_DOWNLOAD_CANCELED: callback.downloadCanceled(); default: break; } } }; private Version versionInfo; public interface UpdateCallback { public void checkUpdateCompleted(Boolean hasUpdate, Version version); public void downloadProgressChanged(int progress); public void downloadCanceled(); public void downloadCompleted(Boolean sucess, CharSequence errorMsg); } } <file_sep>/DataServer/src/com/constant/Constant.java package com.constant; import java.io.IOException; import java.io.PrintWriter; import java.util.Map; import javax.servlet.http.HttpServletResponse; import org.apache.struts2.ServletActionContext; import com.google.gson.Gson; public class Constant { public static void flush(Object dataMap){ HttpServletResponse response = ServletActionContext.getResponse(); response.setContentType("text/html;charset=utf-8"); PrintWriter out; try { out = response.getWriter(); Gson gson = new Gson(); String jsonString = gson.toJson(dataMap); out.println(jsonString); out.flush(); out.close(); System.out.println(jsonString); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } <file_sep>/20140304/1.sql prompt PL/SQL Developer import file prompt Created on 2014年2月24日 by Administrator set feedback off set define off prompt Disabling triggers for JY_ALARM... alter table JY_ALARM disable all triggers; prompt Disabling triggers for JY_CABINET... alter table JY_CABINET disable all triggers; prompt Disabling triggers for JY_DEVICE... alter table JY_DEVICE disable all triggers; prompt Disabling triggers for JY_CONSTANT... alter table JY_CONSTANT disable all triggers; prompt Disabling triggers for JY_ALARM_TYPE... alter table JY_ALARM_TYPE disable all triggers; prompt Disabling triggers for JY_ALARM_TYPE_COLLECT... alter table JY_ALARM_TYPE_COLLECT disable all triggers; prompt Disabling triggers for JY_LINE... alter table JY_LINE disable all triggers; prompt Disabling triggers for JY_USER_GROUP... alter table JY_USER_GROUP disable all triggers; prompt Disabling triggers for JY_CABINET_HISTORY... alter table JY_CABINET_HISTORY disable all triggers; prompt Disabling triggers for JY_DETECTOR... alter table JY_DETECTOR disable all triggers; prompt Disabling triggers for JY_HISTORY... alter table JY_HISTORY disable all triggers; prompt Disabling triggers for JY_HISTORY_CHART... alter table JY_HISTORY_CHART disable all triggers; prompt Disabling triggers for JY_HISTORY_MONTH_CHART... alter table JY_HISTORY_MONTH_CHART disable all triggers; prompt Disabling triggers for JY_KEY_GENERATOR... alter table JY_KEY_GENERATOR disable all triggers; prompt Disabling triggers for JY_USER... alter table JY_USER disable all triggers; prompt Disabling foreign key constraints for JY_ALARM... alter table JY_ALARM disable constraint FK11047F41807F4191; prompt Disabling foreign key constraints for JY_CABINET... alter table JY_CABINET disable constraint FK38F2EB484CDFBDA3; alter table JY_CABINET disable constraint FK38F2EB485F790DD; alter table JY_CABINET disable constraint FK38F2EB48A286E91; alter table JY_CABINET disable constraint FK38F2EB48A2C603DB; alter table JY_CABINET disable constraint FK38F2EB48AF055F4C; alter table JY_CABINET disable constraint FK38F2EB48D53330F2; prompt Disabling foreign key constraints for JY_DEVICE... alter table JY_DEVICE disable constraint FK4AEBCB464CDFBDA3; alter table JY_DEVICE disable constraint FK4AEBCB46D2758917; prompt Disabling foreign key constraints for JY_ALARM_TYPE... alter table JY_ALARM_TYPE disable constraint FK1D4F2978A288F1CD; prompt Disabling foreign key constraints for JY_ALARM_TYPE_COLLECT... alter table JY_ALARM_TYPE_COLLECT disable constraint FK995139639C859685; alter table JY_ALARM_TYPE_COLLECT disable constraint FK995139639C860AE4; alter table JY_ALARM_TYPE_COLLECT disable constraint FK995139639C867F43; alter table JY_ALARM_TYPE_COLLECT disable constraint FK995139639C86F3A2; prompt Disabling foreign key constraints for JY_CABINET_HISTORY... alter table JY_CABINET_HISTORY disable constraint FK52CBFABDD2758917; prompt Disabling foreign key constraints for JY_DETECTOR... alter table JY_DETECTOR disable constraint FK3B8448B6807F4191; alter table JY_DETECTOR disable constraint FK3B8448B6AF65B483; prompt Disabling foreign key constraints for JY_HISTORY... alter table JY_HISTORY disable constraint FKFE90D9A4AC8401D1; alter table JY_HISTORY disable constraint FKFE90D9A4D40CE0B1; prompt Disabling foreign key constraints for JY_HISTORY_CHART... alter table JY_HISTORY_CHART disable constraint FK86084983AC8401D1; prompt Disabling foreign key constraints for JY_HISTORY_MONTH_CHART... alter table JY_HISTORY_MONTH_CHART disable constraint FK9BE05F44AC8401D1; prompt Disabling foreign key constraints for JY_USER... alter table JY_USER disable constraint FKBB4090BBAC020558; prompt Deleting JY_USER... delete from JY_USER; commit; prompt Deleting JY_KEY_GENERATOR... delete from JY_KEY_GENERATOR; commit; prompt Deleting JY_HISTORY_MONTH_CHART... delete from JY_HISTORY_MONTH_CHART; commit; prompt Deleting JY_HISTORY_CHART... delete from JY_HISTORY_CHART; commit; prompt Deleting JY_HISTORY... delete from JY_HISTORY; commit; prompt Deleting JY_DETECTOR... delete from JY_DETECTOR; commit; prompt Deleting JY_CABINET_HISTORY... delete from JY_CABINET_HISTORY; commit; prompt Deleting JY_USER_GROUP... delete from JY_USER_GROUP; commit; prompt Deleting JY_LINE... delete from JY_LINE; commit; prompt Deleting JY_ALARM_TYPE_COLLECT... delete from JY_ALARM_TYPE_COLLECT; commit; prompt Deleting JY_ALARM_TYPE... delete from JY_ALARM_TYPE; commit; prompt Deleting JY_CONSTANT... delete from JY_CONSTANT; commit; prompt Deleting JY_DEVICE... delete from JY_DEVICE; commit; prompt Deleting JY_CABINET... delete from JY_CABINET; commit; prompt Deleting JY_ALARM... delete from JY_ALARM; commit; prompt Loading JY_ALARM... insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('1770118834540', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device4', '3', 15); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('1770223089018', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '3', 12); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('1770295560075', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device6', '3', 10); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('1811655363008', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device1', '3', 4); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('1811729352533', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device2', '3', 10); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23324522699384', '离线', null, null, '0', to_timestamp('17-01-2014 15:04:11.631000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23405851302545', '离线', null, null, '0', to_timestamp('17-01-2014 15:05:32.985000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23416083375203', '离线', null, null, '0', to_timestamp('17-01-2014 15:05:43.203000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23467028963345', '离线', null, null, '0', to_timestamp('17-01-2014 15:06:34.168000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('5230700389330', '离线', null, null, '0', to_timestamp('24-01-2014 09:59:13.038000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('5576497909661', '离线', null, null, '0', to_timestamp('24-01-2014 10:04:58.881000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23404079830599', '离线', null, null, '0', to_timestamp('27-01-2014 14:57:52.037000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23414268347937', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:02.207000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23414429919373', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:02.363000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23424690736130', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:12.638000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23424768436064', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:12.720000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23434908451404', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:22.865000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23434989584053', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:22.945000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23445134250717', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:33.089000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23445237660541', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:33.196000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23455419868875', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:43.373000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23455498515226', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:43.458000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23465643670493', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:53.603000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23465754736883', '离线', null, null, '0', to_timestamp('27-01-2014 14:58:53.703000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23475897078778', '离线', null, null, '0', to_timestamp('27-01-2014 14:59:03.854000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23475973010370', '离线', null, null, '0', to_timestamp('27-01-2014 14:59:03.934000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23486097541233', '离线', null, null, '0', to_timestamp('27-01-2014 14:59:14.055000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23486160319363', '离线', null, null, '0', to_timestamp('27-01-2014 14:59:14.125000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25378145242277', '离线', null, null, '0', to_timestamp('26-01-2014 15:30:45.523000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('11013687828246', 'A相C相温度高于设定值(15.0℃)<br>', null, null, '0', to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device7', '3', 2); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('11014436595132', 'A相温度高于设定值(15.0℃)<br>三相之间温差超出设定值(25.0℃)<br>', null, null, '0', to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device8', '3', 2); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('11014547370591', 'A相B相C相温度高于设定值(15.0℃)<br>', null, null, '0', to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device9', '3', 2); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('12238744173552', '离线', null, null, '0', to_timestamp('21-01-2014 11:55:36.284000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26633078234290', '温度无法解析', null, null, '0', to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '6', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26633108022445', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device6', '3', 8); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('27837264271929', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '3', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('27855908625740', '温度无法解析', null, null, '0', to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '6', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('27875151794826', '温度无法解析', null, null, '0', to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '6', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('27881632689505', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '3', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('22942405893043', '离线', null, null, '0', to_timestamp('07-02-2014 14:55:11.727000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('2265079943366', '离线', null, null, '0', to_timestamp('21-01-2014 09:09:30.272000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('2265285885249', '离线', null, null, '0', to_timestamp('21-01-2014 09:09:30.506000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('2316533606654', '离线', null, null, '0', to_timestamp('21-01-2014 09:10:21.777000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('2436998728734', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device3', '3', 4); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('2438367160566', '离线', null, null, '0', to_timestamp('21-01-2014 09:12:23.603000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('4796912714285', '离线', null, null, '0', to_timestamp('22-01-2014 09:52:31.479000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('4797122799361', '离线', null, null, '0', to_timestamp('22-01-2014 09:52:31.775000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('3113846457270', '离线', null, null, '0', to_timestamp('26-01-2014 09:19:41.437000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25486042863651', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device4', '3', 3); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25486115595450', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '3', 3); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25565445680057', '终端重复(号码:18765432123:18765434567)', null, null, '0', to_timestamp('26-01-2014 15:33:52.836000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '3', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25570035162910', '离线', null, null, '0', to_timestamp('26-01-2014 15:33:57.426000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25666136000317', '终端重复(号码:18766565656:18765432345)', null, null, '0', to_timestamp('26-01-2014 15:35:33.523000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '3', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25671023743431', '离线', null, null, '0', to_timestamp('26-01-2014 15:35:38.423000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25673258717771', 'A相C相温度高于设定值(15.0℃)<br>三相之间温差超出设定值(25.0℃)<br>三相与环境温差超出设定值(35.0℃)', null, null, '0', to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device7', '3', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25673323233900', 'A相B相温度高于设定值(15.0℃)<br>三相之间温差超出设定值(25.0℃)<br>三相与环境温差超出设定值(35.0℃)', null, null, '0', to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device8', '3', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25673388641116', 'A相B相温度高于设定值(15.0℃)<br>三相之间温差超出设定值(25.0℃)<br>三相与环境温差超出设定值(35.0℃)', null, null, '0', to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device9', '3', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25852700596988', '离线', null, null, '0', to_timestamp('26-01-2014 15:38:40.101000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('25983942726797', '离线', null, null, '0', to_timestamp('26-01-2014 15:40:51.345000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('5087341272978', 'C相:15分钟内温度变化超过设定值(15.0℃)<br>', null, null, '0', to_timestamp('21-01-2014 09:56:32.695000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device1', '4', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('5331785916484', '离线', null, null, '0', to_timestamp('21-01-2014 10:00:37.064000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('12006488969574', '离线', null, null, '0', to_timestamp('21-01-2014 11:51:43.958000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('7908894205107', '离线', null, null, '0', to_timestamp('24-01-2014 10:43:51.298000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('9089195189254', '离线', null, null, '0', to_timestamp('24-01-2014 11:03:31.569000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('9221687098537', '离线', null, null, '0', to_timestamp('24-01-2014 11:05:44.105000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('9231884524715', '离线', null, null, '0', to_timestamp('24-01-2014 11:05:54.306000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '0', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26058188010843', '间隔3故障', null, null, '0', to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device6', '6', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('28646001530995', 'C相:15分钟内温度变化超过设定值(15.0℃)<br>', null, null, '0', to_timestamp('24-01-2014 16:29:28.867000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device4', '4', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('28646243497952', 'A相:15分钟内温度变化超过设定值(15.0℃)<br>', null, null, '0', to_timestamp('24-01-2014 16:29:29.117000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device5', '4', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('12954532509951', '离线', null, null, '0', to_timestamp('26-01-2014 12:03:42.526000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('12998157709464', '三相之间温差超出设定值(15.0℃)<br>', null, null, '0', to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), '0', 'Device1', '3', 3); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('22405244787006', '离线', null, null, '0', to_timestamp('26-01-2014 14:41:13.754000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('22828108534579', '离线', null, null, '0', to_timestamp('26-01-2014 14:48:16.631000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('22848627971387', '离线', null, null, '0', to_timestamp('26-01-2014 14:48:37.122000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23007366311287', '离线', null, null, '0', to_timestamp('26-01-2014 14:51:15.886000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23149871574591', '离线', null, null, '0', to_timestamp('27-01-2014 14:53:37.824000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23149962521691', '离线', null, null, '0', to_timestamp('27-01-2014 14:53:37.894000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23363425209597', '离线', null, null, '0', to_timestamp('27-01-2014 14:57:11.380000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('23403987595729', '离线', null, null, '0', to_timestamp('27-01-2014 14:57:51.936000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26034460956600', '离线', null, null, '0', to_timestamp('26-01-2014 15:41:41.857000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device7', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26405259440190', '故障', null, null, '0', to_timestamp('26-01-2014 15:47:52.678000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '2', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26408531572012', '离线', null, null, '0', to_timestamp('26-01-2014 15:47:55.948000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26451837744072', '故障', null, null, '0', to_timestamp('26-01-2014 15:48:39.260000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '2', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('26459145372125', '离线', null, null, '0', to_timestamp('26-01-2014 15:48:46.570000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('29225714682142', '离线', null, null, '0', to_timestamp('26-01-2014 16:34:53.221000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('30620161617842', '离线', null, null, '0', to_timestamp('26-01-2014 16:58:07.718000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('18594736003084', '离线', null, null, '0', to_timestamp('07-02-2014 13:42:43.891000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('18605082768018', '离线', null, null, '0', to_timestamp('07-02-2014 13:42:54.249000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19131924090718', '离线', null, null, '0', to_timestamp('07-02-2014 13:51:41.104000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19142277981226', '离线', null, null, '0', to_timestamp('07-02-2014 13:51:51.446000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19331018605736', '离线', null, null, '0', to_timestamp('07-02-2014 13:55:00.180000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19341457859882', '离线', null, null, '0', to_timestamp('07-02-2014 13:55:10.633000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19546667793651', '离线', null, null, '0', to_timestamp('07-02-2014 13:58:35.848000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19597922696749', '离线', null, null, '0', to_timestamp('07-02-2014 13:59:27.107000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19608249076004', '离线', null, null, '0', to_timestamp('07-02-2014 13:59:37.438000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19835613152212', '离线', null, null, '0', to_timestamp('07-02-2014 14:03:24.812000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('19846054813230', '离线', null, null, '0', to_timestamp('07-02-2014 14:03:35.243000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('20538958729328', '离线', null, null, '0', to_timestamp('07-02-2014 14:15:08.181000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('20868930946778', '离线', null, null, '0', to_timestamp('07-02-2014 14:20:38.170000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('20879076425103', '离线', null, null, '0', to_timestamp('07-02-2014 14:20:48.301000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); commit; prompt 100 records committed... insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('21439327616603', '离线', null, null, '0', to_timestamp('07-02-2014 14:30:08.577000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('21449431983108', '离线', null, null, '0', to_timestamp('07-02-2014 14:30:18.688000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('22211022137064', '离线', null, null, '0', to_timestamp('07-02-2014 14:43:00.315000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('22211157941941', '离线', null, null, '0', to_timestamp('07-02-2014 14:43:00.452000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device1', '1', 1); insert into JY_ALARM (ID, ALARM_TEXT, NOTE, REPAIR_USER, STATUS, C_DATE, ISCABINET, DEVICE_ID, TYPE, TIMES) values ('24449218628671', '离线', null, null, '0', to_timestamp('07-02-2014 15:20:18.536000', 'dd-mm-yyyy hh24:mi:ss.ff'), '1', 'Device4', '1', 1); commit; prompt 105 records loaded prompt Loading JY_CABINET... insert into JY_CABINET (CAB_ID, LINE_ID, ALARM_ID, CONSTANT_CABTYPE_ID, CONSTANT_POWERLEVEL_ID, ALARM_TYPE_COLLECT_ID, USER_GROUP_ID, CAB_NUMBER, SIM_NUMBER, SIM_S_NUMBER, TAG, STATUS, CREATE_TIME, DETECT_TIME, NOTE) values ('Cab1', 'Line1', '24449218628671', 1, 7, '#00120140117090003', 1, '#001', null, null, 1, 1, to_timestamp('17-01-2014 09:00:03.631000', 'dd-mm-yyyy hh24:mi:ss.ff'), to_timestamp('07-02-2014 15:20:18.536000', 'dd-mm-yyyy hh24:mi:ss.ff'), null); insert into JY_CABINET (CAB_ID, LINE_ID, ALARM_ID, CONSTANT_CABTYPE_ID, CONSTANT_POWERLEVEL_ID, ALARM_TYPE_COLLECT_ID, USER_GROUP_ID, CAB_NUMBER, SIM_NUMBER, SIM_S_NUMBER, TAG, STATUS, CREATE_TIME, DETECT_TIME, NOTE) values ('Cab2', 'Line1', '22211157941941', 1, 7, '#00220140117090016', 1, '#002', null, null, 1, 1, to_timestamp('17-01-2014 09:00:16.735000', 'dd-mm-yyyy hh24:mi:ss.ff'), to_timestamp('07-02-2014 14:43:00.452000', 'dd-mm-yyyy hh24:mi:ss.ff'), null); insert into JY_CABINET (CAB_ID, LINE_ID, ALARM_ID, CONSTANT_CABTYPE_ID, CONSTANT_POWERLEVEL_ID, ALARM_TYPE_COLLECT_ID, USER_GROUP_ID, CAB_NUMBER, SIM_NUMBER, SIM_S_NUMBER, TAG, STATUS, CREATE_TIME, DETECT_TIME, NOTE) values ('Cab3', 'Line1', '26034460956600', 1, 7, '-1', 1, '#003', null, null, 1, 1, to_timestamp('21-01-2014 11:17:09.980000', 'dd-mm-yyyy hh24:mi:ss.ff'), to_timestamp('26-01-2014 15:41:41.857000', 'dd-mm-yyyy hh24:mi:ss.ff'), null); commit; prompt 3 records loaded prompt Loading JY_DEVICE... insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device1', 'Cab2', '12998157709464', '001', '间隔1', null, null, 1, 1, to_timestamp('17-01-2014 09:00:31.010000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device2', 'Cab2', '1811729352533', '002', '间隔2', null, null, 1, 2, to_timestamp('17-01-2014 09:00:39.200000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device3', 'Cab2', '2436998728734', '003', '间隔3', null, null, 1, 3, to_timestamp('17-01-2014 09:00:48.232000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device4', 'Cab1', '25486042863651', '004', '间隔1', null, null, 1, 1, to_timestamp('17-01-2014 09:00:57.463000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device5', 'Cab1', '25486115595450', '005', '间隔2', null, null, 1, 2, to_timestamp('17-01-2014 09:01:05.325000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device6', 'Cab1', '26633108022445', '006', '间隔3', null, null, 1, 3, to_timestamp('17-01-2014 09:01:13.063000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device7', 'Cab3', '25673258717771', '011', '间隔1', null, null, 1, 1, to_timestamp('21-01-2014 11:17:32.519000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device8', 'Cab3', '25673323233900', '022', '间隔2', null, null, 1, 2, to_timestamp('21-01-2014 11:17:55.879000', 'dd-mm-yyyy hh24:mi:ss.ff')); insert into JY_DEVICE (DEVICE_ID, CAB_ID, ALARM_ID, DEVICE_NUMBER, NAME, STATUS, NOTE, TAG, POSITION_NUMBER, CREATE_TIME) values ('Device9', 'Cab3', '25673388641116', '033', '间隔3', null, null, 1, 3, to_timestamp('21-01-2014 11:18:03.534000', 'dd-mm-yyyy hh24:mi:ss.ff')); commit; prompt 9 records loaded prompt Loading JY_CONSTANT... insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (1, 'CabType', '1000', '环网柜', '10'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (2, 'CabType', '1001', '分段柜', '2'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (3, 'CabType', '1002', '高分箱', '3'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (4, 'CabType', '1003', '变电柜', '4'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (5, 'CabType', '1004', '配电', '5'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (6, 'CabType', '1005', '其他', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (7, 'PowerLevel', '1002', '10KV', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (8, 'PowerLevel', '1002', '0.4KV', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (9, 'PowerLevel', '1003', '35KV', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (10, 'PowerLevel', '1004', '66KV', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (11, 'PowerLevel', '1005', '110KV', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (12, 'PowerLevel', '1006', '220KV', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (13, 'PowerLevel', '1007', '其他', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (14, 'AlarmType', '1000', '报警温度超出设定值(T1℃)', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (15, 'AlarmType', '1001', '三相之间温差超出设定值(T2℃)', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (16, 'AlarmType', '1002', '三相与环境温差超出设定值(T3℃)', '6'); insert into JY_CONSTANT (ID, TYPE, KEY, VALUE, SUB_VALUE) values (17, 'AlarmType', '1003', '特定间隔(T4m)时间内温度变化超过设定值(T5℃)', '6'); commit; prompt 17 records loaded prompt Loading JY_ALARM_TYPE... insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#001201401170900031000', 14, 75, 0, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#001201401170900031001', 15, 15, 1, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#001201401170900031002', 16, 15, 0, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#001201401170900031003', 17, 15, 1, 15); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#002201401170900161000', 14, 75, 0, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#002201401170900161001', 15, 15, 1, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#002201401170900161002', 16, 15, 0, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('#002201401170900161003', 17, 15, 1, 15); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('-11000', 14, 15, 1, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('-11001', 15, 25, 1, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('-11002', 16, 35, 1, null); insert into JY_ALARM_TYPE (ID, ALARM_TYPE_ID, VALUE, ENABLE, SUBVALUE) values ('-11003', 17, 10, 1, 15); commit; prompt 12 records loaded prompt Loading JY_ALARM_TYPE_COLLECT... insert into JY_ALARM_TYPE_COLLECT (ID, ALARM_TYPE1_ID, ALARM_TYPE2_ID, ALARM_TYPE3_ID, ALARM_TYPE4_ID) values ('#00120140117090003', '#001201401170900031000', '#001201401170900031001', '#001201401170900031002', '#001201401170900031003'); insert into JY_ALARM_TYPE_COLLECT (ID, ALARM_TYPE1_ID, ALARM_TYPE2_ID, ALARM_TYPE3_ID, ALARM_TYPE4_ID) values ('#00220140117090016', '#002201401170900161000', '#002201401170900161001', '#002201401170900161002', '#002201401170900161003'); insert into JY_ALARM_TYPE_COLLECT (ID, ALARM_TYPE1_ID, ALARM_TYPE2_ID, ALARM_TYPE3_ID, ALARM_TYPE4_ID) values ('-1', '-11000', '-11001', '-11002', '-11003'); commit; prompt 3 records loaded prompt Loading JY_LINE... insert into JY_LINE (LINE_ID, NAME, TAG) values ('Line1', '线路1', 1); commit; prompt 1 records loaded prompt Loading JY_USER_GROUP... insert into JY_USER_GROUP (ID, GROUP_NAME, LEADER_NAME, NOTE) values (1, '--', '--', null); commit; prompt 1 records loaded prompt Loading JY_CABINET_HISTORY... insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('1770036197823', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('1811598353735', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('24088227866712', to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('24098885179326', to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('5324505963658', to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('5356637695587', to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('5408157576679', to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('2505402368239', to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('4687788012801', to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('4715563810030', to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('5407512457866', to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('11013621711286', to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab3'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('26633006302919', to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('27837067639138', to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('27855873052286', to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('27875123089657', to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('27881597967001', to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('20070623739923', to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('2436768227198', to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('2572747919078', to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('13104971915810', to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('25486000462722', to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('25621385625164', to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('25673215585054', to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab3'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('12181541409163', to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab3'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('8114630358875', to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('8124847286397', to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('8554436231188', to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('8632242839325', to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('9220570302428', to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('9226373464820', to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('22489901636081', to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('26057735261320', to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('12998098418041', to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab2'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('22278632400495', to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); insert into JY_CABINET_HISTORY (ID, C_DATE, CAB_ID) values ('23761992850777', to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Cab1'); commit; prompt 36 records loaded prompt Loading JY_DETECTOR... insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector1', 'Device1', 381, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector2', 'Device1', 382, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector3', 'Device1', 383, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector4', 'Device1', 384, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector5', 'Device2', 385, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector6', 'Device2', 386, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector7', 'Device2', 387, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector8', 'Device2', 388, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector9', 'Device3', 389, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector10', 'Device3', 390, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector11', 'Device3', 391, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector12', 'Device3', 392, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector13', 'Device4', 417, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector14', 'Device4', 418, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector15', 'Device4', 419, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector16', 'Device4', 420, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector17', 'Device5', 421, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector18', 'Device5', 422, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector19', 'Device5', 423, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector20', 'Device5', 424, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector21', 'Device6', 425, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector22', 'Device6', 426, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector23', 'Device6', 427, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector24', 'Device6', 428, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector25', 'Device7', 393, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector26', 'Device7', 394, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector27', 'Device7', 395, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector28', 'Device7', 396, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector29', 'Device8', 397, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector30', 'Device8', 398, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector31', 'Device8', 399, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector32', 'Device8', 400, '环境', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector33', 'Device9', 401, 'A相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector34', 'Device9', 402, 'B相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector35', 'Device9', 403, 'C相', '℃', 1); insert into JY_DETECTOR (DETECTOR_ID, DEVICE_ID, HISTORY_ID, NAME, UNIT, TAG) values ('Detector36', 'Device9', 404, '环境', '℃', 1); commit; prompt 36 records loaded prompt Loading JY_HISTORY... insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (1, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '1770036197823', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (2, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '1770036197823', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (3, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '1770036197823', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (4, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '1770036197823', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (5, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '1770036197823', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (6, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '1770036197823', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (7, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '1770036197823', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (8, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '1770036197823', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (9, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '1770036197823', 135.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (10, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '1770036197823', 11.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (11, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '1770036197823', 20.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (12, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '1770036197823', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (13, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '1811598353735', 23.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (14, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '1811598353735', 3.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (15, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '1811598353735', 24); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (16, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '1811598353735', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (17, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '1811598353735', 135.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (18, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '1811598353735', 11.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (19, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '1811598353735', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (20, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '1811598353735', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (21, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '1811598353735', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (22, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '1811598353735', 1.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (23, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '1811598353735', .4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (24, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '1811598353735', 3.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (253, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '24088227866712', 223.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (254, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '24088227866712', 11.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (255, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '24088227866712', 121); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (256, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '24088227866712', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (257, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '24088227866712', 115.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (258, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '24088227866712', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (259, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '24088227866712', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (260, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '24088227866712', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (261, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '24088227866712', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (262, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '24088227866712', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (263, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '24088227866712', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (264, to_timestamp('24-01-2014 15:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '24088227866712', 110.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (265, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '24098885179326', 223.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (266, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '24098885179326', 11.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (267, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '24098885179326', 121); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (268, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '24098885179326', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (269, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '24098885179326', 115.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (270, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '24098885179326', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (271, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '24098885179326', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (272, to_timestamp('24-01-2014 15:13:41.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '24098885179326', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (25, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '5324505963658', 3.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (26, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '5324505963658', 3.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (27, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '5324505963658', 4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (28, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '5324505963658', .3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (29, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '5324505963658', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (30, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '5324505963658', 1.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (31, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '5324505963658', .4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (32, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '5324505963658', 3.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (33, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '5324505963658', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (34, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '5324505963658', 1.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (35, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '5324505963658', .4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (36, to_timestamp('17-01-2014 10:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '5324505963658', 3.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (37, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '5356637695587', 23.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (38, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '5356637695587', 33.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (39, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '5356637695587', 34); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (40, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '5356637695587', 30.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (41, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '5356637695587', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (42, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '5356637695587', 31.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (43, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '5356637695587', 30.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (44, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '5356637695587', 33.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (45, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '5356637695587', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (46, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '5356637695587', 31.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (47, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '5356637695587', 30.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (48, to_timestamp('17-01-2014 09:04:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '5356637695587', 33.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (49, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '5408157576679', 23.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (50, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '5408157576679', 33.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (51, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '5408157576679', 34); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (52, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '5408157576679', 30.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (53, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '5408157576679', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (54, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '5408157576679', 31.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (55, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '5408157576679', 30.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (56, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '5408157576679', 33.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (57, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '5408157576679', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (58, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '5408157576679', 31.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (59, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '5408157576679', 30.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (60, to_timestamp('17-01-2014 10:05:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '5408157576679', 33.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (85, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '2505402368239', 23.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (86, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '2505402368239', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (87, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '2505402368239', 24); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (88, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '2505402368239', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (89, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '2505402368239', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (90, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '2505402368239', 11.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (91, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '2505402368239', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (92, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '2505402368239', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (93, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '2505402368239', 15.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (94, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '2505402368239', 21.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (95, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '2505402368239', 20.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (96, to_timestamp('21-01-2014 09:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '2505402368239', 23.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (109, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '4687788012801', 63.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (110, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '4687788012801', 53.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (111, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '4687788012801', 44); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (112, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '4687788012801', 30.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (113, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '4687788012801', 65.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (114, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '4687788012801', 51.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (115, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '4687788012801', 40.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (116, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '4687788012801', 43.2); commit; prompt 100 records committed... insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (117, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '4687788012801', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (118, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '4687788012801', 41.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (119, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '4687788012801', 50.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (120, to_timestamp('21-01-2014 09:49:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '4687788012801', 63.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (121, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '4715563810030', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (122, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '4715563810030', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (123, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '4715563810030', 14); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (124, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '4715563810030', 30.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (125, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '4715563810030', 65.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (126, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '4715563810030', 51.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (127, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '4715563810030', 40.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (128, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '4715563810030', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (129, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '4715563810030', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (130, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '4715563810030', 41.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (131, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '4715563810030', 50.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (132, to_timestamp('21-01-2014 09:50:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '4715563810030', 63.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (133, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '5407512457866', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (134, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '5407512457866', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (135, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '5407512457866', 14); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (136, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '5407512457866', 30.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (137, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '5407512457866', 65.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (138, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '5407512457866', 51.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (139, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '5407512457866', 40.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (140, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '5407512457866', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (141, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '5407512457866', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (142, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '5407512457866', 41.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (143, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '5407512457866', 50.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (144, to_timestamp('21-01-2014 10:01:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '5407512457866', 63.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (145, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector25', '11013621711286', 23.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (146, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector26', '11013621711286', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (147, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector27', '11013621711286', 24); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (148, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector28', '11013621711286', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (149, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector29', '11013621711286', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (150, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector30', '11013621711286', 11.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (151, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector31', '11013621711286', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (152, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector32', '11013621711286', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (153, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector33', '11013621711286', 15.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (154, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector34', '11013621711286', 21.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (155, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector35', '11013621711286', 30.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (156, to_timestamp('21-01-2014 11:35:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector36', '11013621711286', 13.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (285, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '26633006302919', 223.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (286, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '26633006302919', 11.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (287, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '26633006302919', 121); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (288, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '26633006302919', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (289, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '26633006302919', null); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (290, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '26633006302919', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (291, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '26633006302919', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (292, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '26633006302919', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (293, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '26633006302919', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (294, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '26633006302919', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (295, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '26633006302919', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (296, to_timestamp('24-01-2014 15:55:55.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '26633006302919', 222.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (297, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '27837067639138', 3.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (298, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '27837067639138', 11.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (299, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '27837067639138', 121); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (300, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '27837067639138', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (301, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '27837067639138', 0); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (302, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '27837067639138', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (303, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '27837067639138', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (304, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '27837067639138', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (305, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '27837067639138', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (306, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '27837067639138', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (307, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '27837067639138', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (308, to_timestamp('24-01-2014 16:15:59.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '27837067639138', 222.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (309, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '27855873052286', 3.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (310, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '27855873052286', 11.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (311, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '27855873052286', 121); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (312, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '27855873052286', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (313, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '27855873052286', null); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (314, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '27855873052286', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (315, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '27855873052286', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (316, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '27855873052286', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (317, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '27855873052286', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (318, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '27855873052286', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (319, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '27855873052286', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (320, to_timestamp('24-01-2014 16:16:18.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '27855873052286', 222.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (321, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '27875123089657', 3.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (322, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '27875123089657', .5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (323, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '27875123089657', 1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (324, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '27875123089657', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (325, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '27875123089657', null); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (326, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '27875123089657', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (327, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '27875123089657', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (328, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '27875123089657', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (329, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '27875123089657', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (330, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '27875123089657', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (331, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '27875123089657', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (332, to_timestamp('24-01-2014 16:16:37.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '27875123089657', 222.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (333, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '27881597967001', 3.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (334, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '27881597967001', .5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (335, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '27881597967001', 1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (336, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '27881597967001', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (337, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '27881597967001', 222.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (338, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '27881597967001', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (339, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '27881597967001', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (340, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '27881597967001', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (341, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '27881597967001', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (342, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '27881597967001', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (343, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '27881597967001', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (344, to_timestamp('24-01-2014 16:16:44.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '27881597967001', 222.2); commit; prompt 200 records committed... insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (61, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '20070623739923', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (62, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '20070623739923', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (63, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '20070623739923', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (64, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '20070623739923', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (65, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '20070623739923', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (66, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '20070623739923', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (67, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '20070623739923', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (68, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '20070623739923', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (69, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '20070623739923', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (70, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '20070623739923', 11.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (71, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '20070623739923', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (72, to_timestamp('20-01-2014 14:13:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '20070623739923', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (73, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '2436768227198', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (74, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '2436768227198', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (75, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '2436768227198', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (76, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '2436768227198', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (77, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '2436768227198', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (78, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '2436768227198', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (79, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '2436768227198', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (80, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '2436768227198', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (81, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '2436768227198', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (82, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '2436768227198', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (83, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '2436768227198', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (84, to_timestamp('21-01-2014 01:11:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '2436768227198', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (97, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '2572747919078', 63.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (98, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '2572747919078', 53.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (99, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '2572747919078', 44); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (100, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '2572747919078', 30.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (101, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '2572747919078', 65.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (102, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '2572747919078', 51.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (103, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '2572747919078', 40.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (104, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '2572747919078', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (105, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '2572747919078', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (106, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '2572747919078', 41.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (107, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '2572747919078', 50.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (108, to_timestamp('21-01-2014 09:14:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '2572747919078', 63.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (357, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '13104971915810', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (358, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '13104971915810', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (359, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '13104971915810', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (360, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '13104971915810', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (361, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '13104971915810', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (362, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '13104971915810', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (363, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '13104971915810', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (364, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '13104971915810', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (365, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '13104971915810', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (366, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '13104971915810', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (367, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '13104971915810', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (368, to_timestamp('26-01-2014 12:06:12.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '13104971915810', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (369, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '25486000462722', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (370, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '25486000462722', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (371, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '25486000462722', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (372, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '25486000462722', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (373, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '25486000462722', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (374, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '25486000462722', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (375, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '25486000462722', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (376, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '25486000462722', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (377, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '25486000462722', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (378, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '25486000462722', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (379, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '25486000462722', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (380, to_timestamp('26-01-2014 15:32:33.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '25486000462722', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (381, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '25621385625164', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (382, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '25621385625164', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (383, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '25621385625164', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (384, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '25621385625164', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (385, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '25621385625164', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (386, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '25621385625164', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (387, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '25621385625164', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (388, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '25621385625164', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (389, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '25621385625164', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (390, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '25621385625164', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (391, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '25621385625164', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (392, to_timestamp('26-01-2014 15:34:48.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '25621385625164', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (393, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector25', '25673215585054', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (394, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector26', '25673215585054', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (395, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector27', '25673215585054', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (396, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector28', '25673215585054', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (397, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector29', '25673215585054', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (398, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector30', '25673215585054', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (399, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector31', '25673215585054', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (400, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector32', '25673215585054', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (401, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector33', '25673215585054', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (402, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector34', '25673215585054', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (403, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector35', '25673215585054', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (404, to_timestamp('26-01-2014 15:35:40.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector36', '25673215585054', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (157, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector25', '12181541409163', 23.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (158, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector26', '12181541409163', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (159, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector27', '12181541409163', 24); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (160, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector28', '12181541409163', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (161, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector29', '12181541409163', 35.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (162, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector30', '12181541409163', 11.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (163, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector31', '12181541409163', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (164, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector32', '12181541409163', 43.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (165, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector33', '12181541409163', 15.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (166, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector34', '12181541409163', 21.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (167, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector35', '12181541409163', 30.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (168, to_timestamp('21-01-2014 11:54:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector36', '12181541409163', 13.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (169, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '8114630358875', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (170, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '8114630358875', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (171, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '8114630358875', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (172, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '8114630358875', 10.3); commit; prompt 300 records committed... insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (173, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '8114630358875', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (174, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '8114630358875', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (175, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '8114630358875', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (176, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '8114630358875', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (177, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '8114630358875', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (178, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '8114630358875', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (179, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '8114630358875', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (180, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '8114630358875', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (181, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '8124847286397', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (182, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '8124847286397', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (183, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '8124847286397', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (184, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '8124847286397', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (185, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '8124847286397', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (186, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '8124847286397', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (187, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '8124847286397', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (188, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '8124847286397', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (189, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '8124847286397', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (190, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '8124847286397', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (191, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '8124847286397', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (192, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '8124847286397', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (193, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '8554436231188', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (194, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '8554436231188', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (195, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '8554436231188', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (196, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '8554436231188', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (197, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '8554436231188', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (198, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '8554436231188', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (199, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '8554436231188', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (200, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '8554436231188', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (201, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '8554436231188', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (202, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '8554436231188', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (203, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '8554436231188', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (204, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '8554436231188', 103.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (205, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '8632242839325', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (206, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '8632242839325', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (207, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '8632242839325', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (208, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '8632242839325', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (209, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '8632242839325', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (210, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '8632242839325', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (211, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '8632242839325', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (212, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '8632242839325', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (213, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '8632242839325', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (214, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '8632242839325', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (215, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '8632242839325', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (216, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '8632242839325', 103.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (217, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '9220570302428', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (218, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '9220570302428', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (219, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '9220570302428', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (220, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '9220570302428', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (221, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '9220570302428', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (222, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '9220570302428', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (223, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '9220570302428', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (224, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '9220570302428', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (225, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '9220570302428', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (226, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '9220570302428', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (227, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '9220570302428', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (228, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '9220570302428', 103.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (229, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '9226373464820', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (230, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '9226373464820', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (231, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '9226373464820', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (232, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '9226373464820', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (233, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '9226373464820', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (234, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '9226373464820', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (235, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '9226373464820', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (236, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '9226373464820', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (237, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '9226373464820', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (238, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '9226373464820', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (239, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '9226373464820', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (240, to_timestamp('24-01-2014 10:46:30.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '9226373464820', 103.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (241, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '22489901636081', 223.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (242, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '22489901636081', 11.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (243, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '22489901636081', 121); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (244, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '22489901636081', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (245, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '22489901636081', 115.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (246, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '22489901636081', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (247, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '22489901636081', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (248, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '22489901636081', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (249, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '22489901636081', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (250, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '22489901636081', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (251, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '22489901636081', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (252, to_timestamp('24-01-2014 14:46:52.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '22489901636081', 100.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (273, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '26057735261320', 223.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (274, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '26057735261320', 11.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (275, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '26057735261320', 121); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (276, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '26057735261320', 11.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (277, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '26057735261320', 115.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (278, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '26057735261320', 111); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (279, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '26057735261320', 10.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (280, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '26057735261320', 141.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (281, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '26057735261320', 5.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (282, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '26057735261320', 110.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (283, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '26057735261320', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (284, to_timestamp('24-01-2014 15:46:20.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '26057735261320', null); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (345, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', '12998098418041', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (346, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', '12998098418041', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (347, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', '12998098418041', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (348, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', '12998098418041', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (349, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', '12998098418041', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (350, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', '12998098418041', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (351, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', '12998098418041', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (352, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', '12998098418041', 143.2); commit; prompt 400 records committed... insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (353, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', '12998098418041', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (354, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', '12998098418041', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (355, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', '12998098418041', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (356, to_timestamp('26-01-2014 12:04:25.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', '12998098418041', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (405, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '22278632400495', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (406, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '22278632400495', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (407, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '22278632400495', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (408, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '22278632400495', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (409, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '22278632400495', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (410, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '22278632400495', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (411, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '22278632400495', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (412, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '22278632400495', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (413, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '22278632400495', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (414, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '22278632400495', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (415, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '22278632400495', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (416, to_timestamp('07-02-2014 14:44:07.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '22278632400495', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (417, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', '23761992850777', 123.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (418, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', '23761992850777', 13.5); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (419, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', '23761992850777', 124); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (420, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', '23761992850777', 10.3); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (421, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', '23761992850777', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (422, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', '23761992850777', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (423, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', '23761992850777', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (424, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', '23761992850777', 143.2); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (425, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', '23761992850777', 235.6); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (426, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', '23761992850777', 111.1); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (427, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', '23761992850777', 10.4); insert into JY_HISTORY (ID, C_DATE, DETECTOR_ID, CABINETHISTORY_ID, VALUE) values (428, to_timestamp('07-02-2014 15:08:51.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', '23761992850777', 143.2); commit; prompt 428 records loaded prompt Loading JY_HISTORY_CHART... insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (25, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 23.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (26, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (27, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 24); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (28, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (29, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 135.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (30, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (31, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (32, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (33, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (34, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (35, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (36, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (37, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (38, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (39, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (40, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (41, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (42, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (43, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (44, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (45, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 135.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (46, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (47, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 20.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (48, to_timestamp('17-01-2014 10:00:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (145, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 23.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (146, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 33.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (147, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 34); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (148, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (149, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (150, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (151, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (152, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (153, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (154, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (155, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (156, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (157, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (158, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (159, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (160, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', .3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (161, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (162, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (163, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (164, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (165, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (166, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (167, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (168, to_timestamp('17-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (289, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (290, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', .5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (291, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (292, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 11.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (293, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 222.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (294, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (295, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (296, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 141.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (297, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (298, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 110.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (299, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (300, to_timestamp('24-01-2014 17:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 222.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (1, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 23.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (2, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (3, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 24); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (4, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (5, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 135.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (6, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (7, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (8, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (9, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (10, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (11, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (12, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (13, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (14, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (15, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (16, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (17, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (18, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (19, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (20, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (21, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 135.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (22, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (23, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 20.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (24, to_timestamp('17-01-2014 09:30:00.027000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (73, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 23.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (74, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 33.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (75, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 34); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (76, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (77, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (78, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (79, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (80, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (81, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (82, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (83, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (84, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (85, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (86, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (87, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (88, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', .3); commit; prompt 100 records committed... insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (89, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (90, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (91, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (92, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (93, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (94, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (95, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (96, to_timestamp('17-01-2014 11:00:00.045000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (97, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 23.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (98, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 33.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (99, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 34); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (100, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (101, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (102, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (103, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (104, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (105, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (106, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (107, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (108, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (109, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (110, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (111, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (112, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', .3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (113, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (114, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (115, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (116, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (117, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (118, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (119, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (120, to_timestamp('17-01-2014 11:30:00.046000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (169, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (170, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (171, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (172, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (173, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (174, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (175, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (176, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (177, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (178, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (179, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (180, to_timestamp('20-01-2014 15:00:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (181, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (182, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (183, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (184, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (185, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (186, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (187, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (188, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (189, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (190, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (191, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (192, to_timestamp('20-01-2014 15:30:00.001000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (205, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (206, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (207, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (208, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (209, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (210, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (211, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (212, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (213, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (214, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (215, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (216, to_timestamp('20-01-2014 16:30:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (217, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (218, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (219, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (220, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (221, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (222, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (223, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (224, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (225, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (226, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (227, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (228, to_timestamp('20-01-2014 17:00:00.004000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (241, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (242, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (243, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 14); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (244, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (245, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 65.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (246, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 51.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (247, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 40.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (248, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (249, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (250, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 41.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (251, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 50.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (252, to_timestamp('21-01-2014 11:30:00.002000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 63.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (253, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (254, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (255, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 14); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (256, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (257, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 65.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (258, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 51.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (259, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 40.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (260, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 43.2); commit; prompt 200 records committed... insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (261, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (262, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 41.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (263, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 50.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (264, to_timestamp('21-01-2014 12:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 63.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (277, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 223.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (278, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 11.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (279, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 121); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (280, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 11.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (281, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', null); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (282, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (283, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (284, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 141.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (285, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (286, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 110.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (287, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (288, to_timestamp('24-01-2014 16:00:00.000000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 222.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (49, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 23.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (50, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 33.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (51, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 34); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (52, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (53, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (54, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (55, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (56, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (57, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (58, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (59, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (60, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (61, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (62, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (63, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (64, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', .3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (65, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (66, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (67, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (68, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (69, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (70, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (71, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (72, to_timestamp('17-01-2014 10:30:00.030000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (121, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 23.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (122, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 33.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (123, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 34); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (124, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (125, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (126, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (127, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (128, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (129, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (130, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 31.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (131, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 30.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (132, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 33.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (133, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (134, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 3.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (135, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (136, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', .3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (137, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (138, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (139, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (140, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (141, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 5.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (142, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 1.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (143, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', .4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (144, to_timestamp('17-01-2014 12:00:00.013000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 3.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (193, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (194, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (195, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (196, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (197, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (198, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (199, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (200, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (201, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (202, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 11.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (203, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (204, to_timestamp('20-01-2014 16:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (229, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector1', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (230, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector2', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (231, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector3', 14); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (232, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector4', 30.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (233, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector5', 65.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (234, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector6', 51.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (235, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector7', 40.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (236, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector8', 43.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (237, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector9', 35.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (238, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector10', 41.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (239, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector11', 50.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (240, to_timestamp('21-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector12', 63.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (265, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector13', 123.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (266, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector14', 13.5); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (267, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector15', 124); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (268, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector16', 10.3); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (269, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector17', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (270, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector18', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (271, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector19', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (272, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector20', 143.2); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (273, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector21', 235.6); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (274, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector22', 111.1); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (275, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector23', 10.4); insert into JY_HISTORY_CHART (ID, C_DATE, DETECTOR_ID, VALUE) values (276, to_timestamp('24-01-2014 11:00:00.003000', 'dd-mm-yyyy hh24:mi:ss.ff'), 'Detector24', 103.2); commit; prompt 300 records loaded prompt Loading JY_HISTORY_MONTH_CHART... prompt Table is empty prompt Loading JY_KEY_GENERATOR... insert into JY_KEY_GENERATOR (ID, USER_ID, LINE_ID, DEVICE_ID, DETECTOR_ID, CAB_ID) values (1, 3, 2, 10, 37, 4); commit; prompt 1 records loaded prompt Loading JY_USER... insert into JY_USER (USER_ID, ISFIRSTLOGIN, USERNAME, PASSWORD, CONTACT, COMPANY, JOB_LEVEL, USER_LEVEL, CAN_RECMES, GROUP_ID) values ('YH1', 0, 'admin', '<PASSWORD>', null, null, null, 'super_admin', null, null); insert into JY_USER (USER_ID, ISFIRSTLOGIN, USERNAME, PASSWORD, CONTACT, COMPANY, JOB_LEVEL, USER_LEVEL, CAN_RECMES, GROUP_ID) values ('YH2', 1, '--', '<PASSWORD>', null, null, null, 'com_admin', null, 1); commit; prompt 2 records loaded prompt Enabling foreign key constraints for JY_ALARM... alter table JY_ALARM enable constraint FK11047F41807F4191; prompt Enabling foreign key constraints for JY_CABINET... alter table JY_CABINET enable constraint FK38F2EB484CDFBDA3; alter table JY_CABINET enable constraint FK38F2EB485F790DD; alter table JY_CABINET enable constraint FK38F2EB48A286E91; alter table JY_CABINET enable constraint FK38F2EB48A2C603DB; alter table JY_CABINET enable constraint FK38F2EB48AF055F4C; alter table JY_CABINET enable constraint FK38F2EB48D53330F2; prompt Enabling foreign key constraints for JY_DEVICE... alter table JY_DEVICE enable constraint FK4AEBCB464CDFBDA3; alter table JY_DEVICE enable constraint FK4AEBCB46D2758917; prompt Enabling foreign key constraints for JY_ALARM_TYPE... alter table JY_ALARM_TYPE enable constraint FK1D4F2978A288F1CD; prompt Enabling foreign key constraints for JY_ALARM_TYPE_COLLECT... alter table JY_ALARM_TYPE_COLLECT enable constraint FK995139639C859685; alter table JY_ALARM_TYPE_COLLECT enable constraint FK995139639C860AE4; alter table JY_ALARM_TYPE_COLLECT enable constraint FK995139639C867F43; alter table JY_ALARM_TYPE_COLLECT enable constraint FK995139639C86F3A2; prompt Enabling foreign key constraints for JY_CABINET_HISTORY... alter table JY_CABINET_HISTORY enable constraint FK52CBFABDD2758917; prompt Enabling foreign key constraints for JY_DETECTOR... alter table JY_DETECTOR enable constraint FK3B8448B6807F4191; alter table JY_DETECTOR enable constraint FK3B8448B6AF65B483; prompt Enabling foreign key constraints for JY_HISTORY... alter table JY_HISTORY enable constraint FKFE90D9A4AC8401D1; alter table JY_HISTORY enable constraint FKFE90D9A4D40CE0B1; prompt Enabling foreign key constraints for JY_HISTORY_CHART... alter table JY_HISTORY_CHART enable constraint FK86084983AC8401D1; prompt Enabling foreign key constraints for JY_HISTORY_MONTH_CHART... alter table JY_HISTORY_MONTH_CHART enable constraint FK9BE05F44AC8401D1; prompt Enabling foreign key constraints for JY_USER... alter table JY_USER enable constraint FKBB4090BBAC020558; prompt Enabling triggers for JY_ALARM... alter table JY_ALARM enable all triggers; prompt Enabling triggers for JY_CABINET... alter table JY_CABINET enable all triggers; prompt Enabling triggers for JY_DEVICE... alter table JY_DEVICE enable all triggers; prompt Enabling triggers for JY_CONSTANT... alter table JY_CONSTANT enable all triggers; prompt Enabling triggers for JY_ALARM_TYPE... alter table JY_ALARM_TYPE enable all triggers; prompt Enabling triggers for JY_ALARM_TYPE_COLLECT... alter table JY_ALARM_TYPE_COLLECT enable all triggers; prompt Enabling triggers for JY_LINE... alter table JY_LINE enable all triggers; prompt Enabling triggers for JY_USER_GROUP... alter table JY_USER_GROUP enable all triggers; prompt Enabling triggers for JY_CABINET_HISTORY... alter table JY_CABINET_HISTORY enable all triggers; prompt Enabling triggers for JY_DETECTOR... alter table JY_DETECTOR enable all triggers; prompt Enabling triggers for JY_HISTORY... alter table JY_HISTORY enable all triggers; prompt Enabling triggers for JY_HISTORY_CHART... alter table JY_HISTORY_CHART enable all triggers; prompt Enabling triggers for JY_HISTORY_MONTH_CHART... alter table JY_HISTORY_MONTH_CHART enable all triggers; prompt Enabling triggers for JY_KEY_GENERATOR... alter table JY_KEY_GENERATOR enable all triggers; prompt Enabling triggers for JY_USER... alter table JY_USER enable all triggers; set feedback on set define on prompt Done. <file_sep>/JYPMIS0310/PMIS/src/com/jiyuan/pmis/structure/Department.java package com.jiyuan.pmis.structure; public class Department { /** * 部门id */ public String bmid; /** * 部门名称 */ public String bmmc; /** * 部门简称 */ public String bmjc; /** * 部门类型 */ public String bmlx; /** * 状态 */ public String zt; /** * 备注 */ public String bz; } <file_sep>/20140304/PMIS/src/com/jiyuan/pmis/fragment/FragmentPage3.java package com.jiyuan.pmis.fragment; import java.util.ArrayList; import java.util.List; import org.ksoap2.serialization.PropertyInfo; import com.google.gson.Gson; import com.jiyuan.pmis.MainApplication; import com.jiyuan.pmis.R; import com.jiyuan.pmis.TabHostActivity; import com.jiyuan.pmis.adapter.SimpleBaseExpandableListAdapter; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.exception.PmisException; import com.jiyuan.pmis.reports.ReviewReportDetailsActivity; import com.jiyuan.pmis.soap.Soap; import com.jiyuan.pmis.structure.ExpandListItem; import com.jiyuan.pmis.structure.Item; import com.jiyuan.pmis.structure.Project; import com.jiyuan.pmis.structure.Report; import com.jiyuan.pmis.structure.ReportSearchField; import com.jiyuan.pmis.structure.ReportSort; import com.jiyuan.pmis.structure.ReportType; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.ExpandableListView; import android.widget.Toast; public class FragmentPage3 extends Fragment{ private ExpandableListView review_reports_listView; private Button button_review_reports_select_all,button_review_reports_pass; private Context context; private TabHostActivity activity; private MainApplication app; private Project project; private boolean selectedAll = false; private SimpleBaseExpandableListAdapter expandableadapter; private boolean isFirst = false; private View v = null; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) { if (v!=null){ ((ViewGroup)v.getParent()).removeView(v); return v; } v = inflater.inflate(R.layout.fragment_3, null); this.context = this.getActivity(); this.activity = (TabHostActivity) this.getActivity(); this.app = (MainApplication) this.activity.getApplication(); this.review_reports_listView = (ExpandableListView) v .findViewById(R.id.review_reports_listView); this.review_reports_listView.setGroupIndicator(null); this.initData(v); this.isFirst = true; return v; } @Override public void onResume(){ super.onResume(); if(!isFirst){ this.search(); } this.isFirst = false; } private void search() { ReportSearchField r = this.getReportSearchField(); List<ExpandListItem> values = listReports(r); this.expandableadapter.setValues(values); this.expandableadapter.notifyDataSetChanged(); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == Constant.REQUEST_CODE) { if (resultCode == Activity.RESULT_OK) { this.isFirst = true; } if (resultCode == Activity.RESULT_CANCELED) { // Write your code if there's no result } } } private void pass(View v){ boolean hadChecked = false; //SeparatedListAdapter adapter = (SeparatedListAdapter) this.review_reports_listView.getAdapter(); int count = expandableadapter.getGroupCount(); for(int i=0;i<count;i++){ List<Item> items = expandableadapter.getGroup(i).items; for(int j=0;j<items.size();j++){ //Toast.makeText(this, i+"", Toast.LENGTH_SHORT).show(); Item item = items.get(j); if(item.isChecked){ hadChecked = true; try { //带修改 //Report report = this.showReport(item.key); Report report = new Report(); report.bgid = item.key; report.shxx = ""; report.zt = "1"; this.updateReport(app.getUser().yhid, report,item.firstLineText); //this.search(v); } catch (PmisException e) { // TODO Auto-generated catch block Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); } } } } if (!hadChecked) Toast.makeText(this.context, "请选择报工!", Toast.LENGTH_SHORT).show(); else this.search(); this.activity.setCount(); } private void selectAll(View v){ int count = expandableadapter.getGroupCount(); for(int i=0;i<count;i++){ List<Item> items = expandableadapter.getGroup(i).items; for(int j=0;j<items.size();j++){ //Toast.makeText(this, i+"", Toast.LENGTH_SHORT).show(); Item item = items.get(j); if(selectedAll){ expandableadapter.getGroup(i).isChecked = false; item.isChecked = false; ((Button)v).setText("全选"); } else{ expandableadapter.getGroup(i).isChecked = true; item.isChecked = true; ((Button)v).setText("反选"); } } } expandableadapter.notifyDataSetChanged(); selectedAll = !selectedAll; } private List<ExpandListItem> listReports(ReportSearchField r){ List<ReportSort> sorts = new ArrayList<ReportSort>(); List<ExpandListItem> values = new ArrayList<ExpandListItem>(); try { sorts = this.getReports(r); } catch (PmisException e) { Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); return values; } for (int i=0;i<sorts.size();i++){ ExpandListItem expandListItem = new ExpandListItem(); List<Report> reports = sorts.get(i).list; List<Item> items = new ArrayList<Item>(); expandListItem.title = sorts.get(i).title; expandListItem.count = sorts.get(i).count; expandListItem.showCheckBox = true; for(int j=0;j<reports.size();j++){ Item item = new Item(); item.key = reports.get(j).bgid; item.firstLineText = reports.get(j).gzrq.substring(5)+" "+reports.get(j).gzxs+"小时"+" "+reports.get(j).bgr; item.secondLineText = reports.get(j).gznr; item.showCheckbox = true; try{ if (Float.valueOf(reports.get(j).gzxs)>8){ item.showRed = true; } }catch(Exception e){ } items.add(item); } expandListItem.items = items; values.add(expandListItem); } return values; } private List<ReportSort> getReports(ReportSearchField r) throws PmisException{ Report[] reports = new Report[]{}; final String METHOD_NAME = "getReports"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("reportSearchFieldStr"); arg0.setValue(new Gson().toJson(r)); args.add(arg0); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("获取报工列表失败!"); } try{ reports = new Gson().fromJson(ret, Report[].class); }catch(Exception e){ throw new PmisException("当前没有报工!"); } List<Report> listReports = new ArrayList<Report>(); for (int i=0;i<reports.length;i++){ listReports.add(reports[i]); } List<ReportSort> sorts = new ArrayList<ReportSort>(); while(listReports.size()>0){ List<Report> list = new ArrayList<Report>(); Report b = listReports.get(0); list.add(b); listReports.remove(0); int i = 0; while(listReports.size()!=i){ if (b.xmjc.equals((listReports).get(i).xmjc)){ list.add((listReports).get(i)); listReports.remove(i); i--; } i++; } ReportSort sort = new ReportSort(); sort.title = b.xmjc; sort.list = list; sort.count = list.size(); if (b.xmjc.equals("--")){ ReportType[] types = app.getReportTypes(); for (int j=0;j<types.length;j++){ ReportSort typeSort = new ReportSort(); typeSort.title = types[j].bgxmc; List<Report> typeList = new ArrayList<Report>(); for(int k=0;k<list.size();k++){ if (types[j].bgxid.equals(list.get(k).bgxid)){ typeList.add(list.get(k)); } } typeSort.list = typeList; typeSort.count = typeList.size(); if(typeSort.count>0) //sorts.add(typeSort); sorts.add(0, typeSort); } }else sorts.add(sort); } Log.e("pmis.....",new Gson().toJson(sorts)); return sorts; } private void initData(View v){ project = new Project(); project.xmid = "-1"; project.xmjc = "全部"; List<ExpandListItem> values = this.listReports(this.getReportSearchField()); expandableadapter = new SimpleBaseExpandableListAdapter(this.context,values); this.review_reports_listView.setAdapter(expandableadapter); //this.review_reports_listView.setOnItemClickListener(item_listener); //this.review_reports_listView.setOnGroupCollapseListener(onGroupCollapseListener); //this.review_reports_listView.setOnGroupExpandListener(onGroupExpandListener); this.review_reports_listView.setOnChildClickListener(onChildClickListener); this.button_review_reports_pass = (Button)v.findViewById(R.id.button_review_reports_pass); this.button_review_reports_select_all = (Button)v.findViewById(R.id.button_review_reports_select_all); this.button_review_reports_pass.setOnClickListener(pass_listener); this.button_review_reports_select_all.setOnClickListener(select_all_listener); //expandableadapter.notifyDataSetChanged(); } private ReportSearchField getReportSearchField(){ ReportSearchField r = new ReportSearchField(); r.xmid = project.xmid; r.xzdy = "1"; r.xzeq = "1"; r.xzxy = "1"; r.kssj = Constant.getBeforeCurrentDataString("yyyy-MM-dd", 2); r.jssj = Constant.getCurrentDataString("yyyy-MM-dd"); r.type = "1"; r.yhid = app.getUser().yhid; r.bgxid = "-1"; return r; } private void updateReport(String yhid,Report report,String firstLine) throws PmisException{ final String METHOD_NAME = "updateReport"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("yhid"); arg0.setValue(yhid); arg0.setType(String.class); PropertyInfo arg1 = new PropertyInfo(); arg1.setName("reportStr"); arg1.setValue(new Gson().toJson(report)); arg1.setType(String.class); args.add(arg0); args.add(arg1); PropertyInfo arg2 = new PropertyInfo(); arg2.setName("type"); arg2.setValue("1"); arg2.setType(String.class); args.add(arg2); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("更新"+firstLine+"失败!"); } if(ret.equals("1")){ //Toast.makeText(this, "更新"+firstLine+"成功!", Toast.LENGTH_SHORT).show(); }else throw new PmisException("更新"+firstLine+"失败!"); } /** * 调用soap * @param bgid * @return * @throws PmisException */ private Report showReport(String bgid) throws PmisException{ final String METHOD_NAME = "showReport"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("bgid"); arg0.setValue(bgid); arg0.setType(String.class); args.add(arg0); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); return new Gson().fromJson(ret, Report.class); } catch (Exception e) { // TODO Auto-generated catch block //e.printStackTrace(); throw new PmisException("获取报工失败!"); } } private ExpandableListView.OnChildClickListener onChildClickListener = new ExpandableListView.OnChildClickListener(){ @Override public boolean onChildClick(ExpandableListView parent, View v, int groupPosition, int childPosition, long id) { // TODO Auto-generated method stub Intent it = new Intent(context,ReviewReportDetailsActivity.class); it.putExtra("bgid", ((Item)expandableadapter.getChild(groupPosition, childPosition)).key); startActivityForResult(it,Constant.REQUEST_CODE); return false; } }; private OnClickListener select_all_listener = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub selectAll(v); } }; private OnClickListener pass_listener = new OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub pass(v); } }; }<file_sep>/DataServer/src/com/project/service/impl/CourseServiceImpl.java package com.project.service.impl; import java.util.List; import com.project.dao.CourseDAO; import com.project.po.Course; import com.project.service.CourseService; public class CourseServiceImpl implements CourseService { private CourseDAO courseDAO; public void setCourseDAO(CourseDAO courseDAO) { this.courseDAO = courseDAO; } @Override public void saveCourse(Course arg0) { // TODO Auto-generated method stub courseDAO.saveCourse(arg0); } @Override public void deleteCourse(Course arg0) { // TODO Auto-generated method stub courseDAO.deleteCourse(arg0); } @Override public void updateCourse(Course arg0) { // TODO Auto-generated method stub courseDAO.updateCourse(arg0); } @Override public Course getCourseById(Integer arg0) { // TODO Auto-generated method stub return courseDAO.getCourseById(arg0); } @Override public List<Course> getCoursesByHql(String hql) { // TODO Auto-generated method stub return courseDAO.getCoursesByHql(hql); } @SuppressWarnings("rawtypes") @Override public List getList(String hql) { // TODO Auto-generated method stub return this.courseDAO.getList(hql); } } <file_sep>/JYPMIS0310/PMIS/src/com/jiyuan/pmis/fragment/FragmentPage2.java package com.jiyuan.pmis.fragment; import java.util.ArrayList; import java.util.List; import org.ksoap2.serialization.PropertyInfo; import com.google.gson.Gson; import com.jiyuan.pmis.MainApplication; import com.jiyuan.pmis.R; import com.jiyuan.pmis.TabHostActivity; import com.jiyuan.pmis.adapter.SimpleBaseExpandableListAdapter; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.exception.PmisException; import com.jiyuan.pmis.reports.MyReportDetailsActivity; import com.jiyuan.pmis.soap.Soap; import com.jiyuan.pmis.structure.ExpandListItem; import com.jiyuan.pmis.structure.Item; import com.jiyuan.pmis.structure.Project; import com.jiyuan.pmis.structure.Report; import com.jiyuan.pmis.structure.ReportSearchField; import com.jiyuan.pmis.structure.ReportSort; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ExpandableListView; import android.widget.Toast; public class FragmentPage2 extends Fragment{ private ExpandableListView my_reports_listView; private Button button_my_reports_delete,button_my_reports_more; private Context context; private Project project; private MainApplication app; private SimpleBaseExpandableListAdapter expandableadapter; private TabHostActivity activity; private int day = 7; private boolean isFirst = false; private View v = null; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) { if (v!=null){ ((ViewGroup)v.getParent()).removeView(v); return v; } v = inflater.inflate(R.layout.fragment_2, null); this.context = this.getActivity(); this.activity = (TabHostActivity) this.getActivity(); this.my_reports_listView = (ExpandableListView)v.findViewById(R.id.my_reports_listView); this.button_my_reports_delete = (Button)v.findViewById(R.id.button_my_reports_delete); this.button_my_reports_more = (Button)v.findViewById(R.id.button_my_reports_more); this.button_my_reports_delete.setOnClickListener(onClickListener); this.my_reports_listView.setGroupIndicator(null); this.button_my_reports_more.setOnClickListener(moreOnClickListener); isFirst = true; this.initData(v); return v; } @Override public void onResume(){ super.onResume(); if(!isFirst){ this.search(day); } this.isFirst = false; } public void search(int day){ ReportSearchField r = this.getReportSearchField(day); List<ExpandListItem> values = listReports(r); this.expandableadapter.setValues(values); this.expandableadapter.notifyDataSetChanged(); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == Constant.REQUEST_CODE) { if (resultCode == Activity.RESULT_OK) { this.isFirst = true; } if (resultCode == Activity.RESULT_CANCELED) { // Write your code if there's no result } } } private ReportSearchField getReportSearchField(int day){ ReportSearchField r = new ReportSearchField(); r.xmid = project.xmid; r.kssj = Constant.getBeforeCurrentDataString("yyyy-MM-dd",day); r.jssj = Constant.getCurrentDataString("yyyy-MM-dd"); r.xzwtg = "1"; r.xzdsh = "1"; r.xzysh = "1"; r.type = "0"; r.yhid = app.getUser().yhid; r.bgxid = "-1"; return r; } public void delete(){ //this.my_reports_listView. boolean hadChecked = false; int count = expandableadapter.getGroupCount(); for(int i=0;i<count;i++){ List<Item> items = expandableadapter.getGroup(i).items; for(int j=0;j<items.size();j++){ //Toast.makeText(this, i+"", Toast.LENGTH_SHORT).show(); Item item = items.get(j); if(item.isChecked){ hadChecked = true; //Toast.makeText(this, "删除!"+item.key+"......"+item.firstLineText, Toast.LENGTH_SHORT).show(); try { this.deleteReport(item.key); } catch (PmisException e) { // TODO Auto-generated catch block Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); } } } } if (!hadChecked){ Toast.makeText(this.context, "请选择报工!", Toast.LENGTH_SHORT).show(); } else this.search(day); this.activity.setCount(); } private List<ExpandListItem> listReports(ReportSearchField r){ List<ReportSort> sorts = new ArrayList<ReportSort>(); List<ExpandListItem> values = new ArrayList<ExpandListItem>(); try { sorts = this.getReports(r); } catch (PmisException e) { Toast.makeText(this.context, e.getMessage(), Toast.LENGTH_SHORT).show(); return values; } for (int i=0;i<sorts.size();i++){ ExpandListItem expandListItem = new ExpandListItem(); List<Report> reports = sorts.get(i).list; List<Item> items = new ArrayList<Item>(); expandListItem.title = sorts.get(i).title; expandListItem.count = sorts.get(i).count; if (sorts.get(i).title.equals("未通过")){ expandListItem.bgColor = Color.parseColor("#ff8974"); //expandListItem.bgImageId = R.drawable.wtg; }else if (sorts.get(i).title.equals("待审核")){ expandListItem.bgColor = Color.parseColor("#009bd9"); //expandListItem.bgImageId = R.drawable.dsh; }else{ expandListItem.bgColor = Color.parseColor("#8ec156"); //expandListItem.bgImageId = R.drawable.ytg; } for(int j=0;j<reports.size();j++){ Item item = new Item(); item.key = reports.get(j).bgid; if (!reports.get(j).xmjc.equals("--")) item.firstLineText = reports.get(j).gzrq.substring(5)+" "+reports.get(j).gzxs+"小时"+" "+reports.get(j).xmjc; else item.firstLineText = reports.get(j).gzrq.substring(5)+" "+reports.get(j).gzxs+"小时"+" "+app.getReportType(reports.get(j).bgxid); item.secondLineText = reports.get(j).gznr; if (reports.get(j).zt.equals("0")) item.showCheckbox = true; else item.showCheckbox = false; items.add(item); } expandListItem.items = items; values.add(expandListItem); } return values; } private List<ReportSort> getReports(ReportSearchField r) throws PmisException{ Report[] reports = new Report[]{}; final String METHOD_NAME = "getReports"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("reportSearchFieldStr"); arg0.setValue(new Gson().toJson(r)); args.add(arg0); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("获取报工列表失败!"); } try{ reports = new Gson().fromJson(ret, Report[].class); }catch(Exception e){ throw new PmisException("当前没有报工!"); } List<Report> listReports = new ArrayList<Report>(); for (int i=0;i<reports.length;i++){ listReports.add(reports[i]); } List<ReportSort> sorts = new ArrayList<ReportSort>(); ReportSort sort1 = new ReportSort(); sort1.title = "未通过"; ReportSort sort2 = new ReportSort(); sort2.title = "待审核"; ReportSort sort3 = new ReportSort(); sort3.title = "已审核"; while(listReports.size()>0){ List<Report> list = new ArrayList<Report>(); Report b = listReports.get(0); list.add(b); listReports.remove(0); int i = 0; while(listReports.size()!=i){ if (b.zt.equals((listReports).get(i).zt)){ list.add((listReports).get(i)); listReports.remove(i); i--; } i++; } if (b.zt.equals("-1")){ sort1.list = list; sort1.count = list.size(); }else if(b.zt.equals("1")){ sort3.list = list; sort3.count = list.size(); }else{ sort2.list = list; sort2.count = list.size(); } } sorts.add(sort1); sorts.add(sort2); sorts.add(sort3); return sorts; } private void deleteReport(String bgid) throws PmisException{ final String METHOD_NAME = "deleteReport"; Soap soap = new Soap(Constant.report_namespace,METHOD_NAME); List<PropertyInfo> args = new ArrayList<PropertyInfo>(); PropertyInfo arg0 = new PropertyInfo(); arg0.setName("bgid"); arg0.setValue(bgid); args.add(arg0); soap.setPropertys(args); String ret = ""; try { ret = soap.getResponse(Constant.report_url, Constant.report_url+"/"+METHOD_NAME); } catch (Exception e) { // TODO Auto-generated catch block throw new PmisException("删除报工失败!"); } if(ret.equals("1")){ //Toast.makeText(this, "删除报工成功!", Toast.LENGTH_SHORT).show(); }else throw new PmisException("删除报工失败!"); } private void initData(View v){ app = (MainApplication) this.activity.getApplication(); project = new Project(); project.xmid = "-1"; project.xmjc = "全部"; List<ExpandListItem> values = this.listReports(this.getReportSearchField(day)); expandableadapter = new SimpleBaseExpandableListAdapter(this.context,values); this.my_reports_listView.setAdapter(expandableadapter); this.my_reports_listView.setOnChildClickListener(onChildClickListener); } private ExpandableListView.OnChildClickListener onChildClickListener = new ExpandableListView.OnChildClickListener(){ @Override public boolean onChildClick(ExpandableListView parent, View v, int groupPosition, int childPosition, long id) { // TODO Auto-generated method stub Intent it = new Intent(context,MyReportDetailsActivity.class); it.putExtra("bgid", ((Item)expandableadapter.getChild(groupPosition, childPosition)).key); startActivityForResult(it,Constant.REQUEST_CODE); return false; } }; private Button.OnClickListener onClickListener = new Button.OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub delete(); } }; private Button.OnClickListener moreOnClickListener = new Button.OnClickListener(){ @Override public void onClick(View v) { // TODO Auto-generated method stub day = day+7; search(day); } }; }<file_sep>/20140304/PMIS/src/com/jiyuan/pmis/structure/Report.java package com.jiyuan.pmis.structure; public class Report { /** * 报工项id */ public String bgxid; /** * 报工id */ public String bgid; /** * 项目id */ public String xmid; /** * 项目简称 */ public String xmjc; /** * 工作日期 */ public String gzrq; /** * 工作小时 */ public String gzxs; /** * 工作地点 */ public String gzdd; /** * 工作内容 */ public String gznr; /** * 报工人 */ public String bgr; /** * 报工时间 */ public String bgsj; /** * 状态 */ public String zt; /** * 审核人 */ public String shr; /** * 审核时间 */ public String shsj; /** * 审核信息 */ public String shxx; /** * 真实位置 */ public String zswz; } <file_sep>/20140304/PMIS/src/com/jiyuan/pmis/setting/ServerConfigeActivity.java package com.jiyuan.pmis.setting; import com.jiyuan.pmis.R; import com.jiyuan.pmis.constant.Constant; import com.jiyuan.pmis.structure.ServerInfo; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.os.IBinder; import android.view.MotionEvent; import android.view.View; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.Toast; public class ServerConfigeActivity extends Activity{ private EditText edittext_server_addr,edittext_server_port; @Override protected void onCreate(Bundle b){ super.onCreate(b); this.setContentView(R.layout.activity_server_config); this.edittext_server_addr = (EditText)this.findViewById(R.id.edittext_server_addr); this.edittext_server_port = (EditText)this.findViewById(R.id.edittext_server_port); ServerInfo info = Constant.getServerInfo(this); this.edittext_server_addr.setText(info.server_addr); this.edittext_server_port.setText(info.server_port); //this.edittext_server_addr.setInputType(EditorInfo.t); this.edittext_server_port.setInputType(EditorInfo.TYPE_CLASS_PHONE); } public void back(View v){ this.finish(); } public void submit(View v){ if(this.edittext_server_addr.getText().toString().length()==0||this.edittext_server_port.getText().toString().length()==0){ Toast.makeText(this, "请填写服务器和端口号!", Toast.LENGTH_LONG).show(); return; } //Constant.configServer(this.edittext_server_addr.getText().toString(),this.edittext_server_port.getText().toString()); ServerInfo info = new ServerInfo(); info.server_addr = this.edittext_server_addr.getText().toString(); info.server_port = this.edittext_server_port.getText().toString(); Constant.saveServerInfo(info, this); //Intent it = new Intent(this,MainActivity.class); //it.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); //startActivity(it); this.finish(); } @Override public boolean dispatchTouchEvent(MotionEvent ev) { if (ev.getAction() == MotionEvent.ACTION_DOWN) { // 获得当前得到焦点的View,一般情况下就是EditText(特殊情况就是轨迹求或者实体案件会移动焦点) View v = getCurrentFocus(); if (isShouldHideInput(v, ev)) { hideSoftInput(v.getWindowToken()); } } return super.dispatchTouchEvent(ev); } /** * 根据EditText所在坐标和用户点击的坐标相对比,来判断是否隐藏键盘,因为当用户点击EditText时没必要隐藏 * * @param v * @param event * @return */ private boolean isShouldHideInput(View v, MotionEvent event) { if (v != null && (v instanceof EditText)) { int[] l = { 0, 0 }; v.getLocationInWindow(l); int left = l[0], top = l[1], bottom = top + v.getHeight(), right = left + v.getWidth(); if (event.getX() > left && event.getX() < right && event.getY() > top && event.getY() < bottom) { // 点击EditText的事件,忽略它。 return false; } else { return true; } } // 如果焦点不是EditText则忽略,这个发生在视图刚绘制完,第一个焦点不在EditView上,和用户用轨迹球选择其他的焦点 return false; } /** * 多种隐藏软件盘方法的其中一种 * * @param token */ private void hideSoftInput(IBinder token) { if (token != null) { InputMethodManager im = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); im.hideSoftInputFromWindow(token, InputMethodManager.HIDE_NOT_ALWAYS); } } } <file_sep>/20140304/JYPMIS/src/com/jypmis/po/Sbgx.java package com.jypmis.po; /** * 报工项 * 报工里面的报工类型从该表选择 * */ public class Sbgx { // Fields private String bgxid; private String bgxmc; private String glbm; private String zt; // Constructors /** default constructor */ public Sbgx() { } /** minimal constructor */ public Sbgx(String bgxid) { this.bgxid = bgxid; } /** full constructor */ public Sbgx(String bgxid, String bgxmc, String glbm, String zt) { this.bgxid = bgxid; this.bgxmc = bgxmc; this.glbm = glbm; this.zt = zt; } public String getBgxid() { return this.bgxid; } public void setBgxid(String bgxid) { this.bgxid = bgxid; } public String getBgxmc() { return this.bgxmc; } public void setBgxmc(String bgxmc) { this.bgxmc = bgxmc; } public String getGlbm() { return this.glbm; } public void setGlbm(String glbm) { this.glbm = glbm; } public String getZt() { return this.zt; } public void setZt(String zt) { this.zt = zt; } } <file_sep>/PMIS2/src/com/jiyuan/pmis/structure/ReportSort.java package com.jiyuan.pmis.structure; import java.util.ArrayList; import java.util.List; public class ReportSort { /** * 项目名称 */ public String xmmc; /** * 项目id */ public String xmid; /** * 项目简称 */ public String xmjc; /** * 项目下面的报工列表 */ public List<Report> list = new ArrayList<Report>(); }
7fe08a1062714cd214ab74aec33cbc466028c6e4
[ "Java", "SQL" ]
57
Java
xiaojue52/Android
5dd292d8401c355f81ed0d7b2bc8820bb9216f8c
7d4302ea29407e84b773ae4b86771e6dcc3e5ac7
refs/heads/master
<repo_name>createvibe/conga-api-example<file_sep>/app-api/src/api-bundle/lib/services/AbstractService.js 'use strict'; // third party libs var Q = require('q'); // local libs var InvalidArgumentError = require('../error/InvalidArgumentError'), HttpError = require('../error/http/HttpError'), AccessDeniedError = require('../error/http/AccessDeniedError'), NotFoundError = require('../error/http/NotFoundError'), ValidationError = require('../error/http/ValidationError'), ConflictError = require('../error/http/ConflictError'); /** * AbstractService for common business logic * @param {Container} container The service container * @abstract * @constructor */ function AbstractService(container) { this.container = container; } AbstractService.prototype = { /** * The service container * @type {Container} */ container: null, /** * Get a service from the container * @param {string} key The name of the service to get * @returns {*|null} */ get: function get(key) { if (this.container && this.container.has(key)) { return this.container.get(key); } else { console.trace(); console.error('Unable to load service, "' + key + '".'); return null; } }, /** * Get a parameter from the service container * @param {string} key The name of the parameter to get * @returns {*} */ getParameter: function getParameter(key) { if (this.container) { return this.container.getParameter(key); } return null; }, /** * Create and return an invalid argument error * @param {string} message The error message * @returns {InvalidArgumentError} */ createInvalidArgumentError: function createInvalidArgumentError(message) { return new InvalidArgumentError(message); }, /** * Create and return a generic http error (500) * @param {string|Array<string>} errors The error array * @param {string} message The error message * @returns {HttpError} */ createHttpError: function createHttpError(errors, message) { return new HttpError(errors, message); }, /** * Create and return an access denied error (401) * @param {string} message The error message * @returns {AccessDeniedError} */ createAccessDeniedError: function createAccessDeniedError(message) { return new AccessDeniedError(message); }, /** * Create and return a resource conflict error * @param {string} message The error message * @returns {ConflictError} */ createConflictError: function createConflictError(message) { return new ConflictError(message); }, /** * Create and return a not found error (404) * @param {string} message The error message * @returns {NotFoundError} */ createNotFoundError: function createNotFoundError(message) { return new NotFoundError(message); }, /** * Create and return a validation error (400) * @param {string|Array<string>} errors The error array * @param {string} message The error message * @returns {ValidationError} */ createValidationError: function createValidationError(errors, message) { return new ValidationError(errors, message); }, /** * Ensure that a good manager is passed in for reuse * @param {Manager|false|null|undefined} manager The manager to ensure * @param {string} [type] The expected manager type * @returns {Promise} */ ensureManager: function ensureManager(manager, type) { var self = this, deferred = Q.defer(); if (!type) { type = 'mongodb.default'; } if (!manager || !manager.definition || manager.definition.managerName !== type) { self.createManager(type, function(manager) { if (manager) { deferred.resolve({ manager: manager, isNew: true }); } else { deferred.reject(new Error('Unable to create manager for ' + type)); } }); } else { deferred.resolve({ manager: manager, isNew: false }); } return deferred.promise; }, /** * Create a Bass manager * @param {string} [type] The manager name, defaults to 'mongodb.default' * @param {Function} [cb] The callback function * @returns {Manager} The Bass manager */ createManager: function createManager(type, cb) { if (!type) { type = 'mongodb.default'; } var manager = this.container.get('bass').createSession().getManager(type); if (typeof cb === 'function') { cb(manager); } return manager; } }; AbstractService.prototype.constructor = AbstractService; module.exports = AbstractService; <file_sep>/app-api/src/api-bundle/lib/listeners/PreControllerListener.js 'use strict'; /** * Format bytes to string * @param {number} bytes Bytes to format * @param {number} [decimals] Decimal places * @returns {string} */ function formatBytes(bytes, decimals) { if (!bytes) { return '0 Byte'; } var k = 1000; // or 1024 for binary var dm = decimals + 1 || 3; var i = Math.floor(Math.log(bytes) / Math.log(k)); return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + formatBytes.sizes[i]; } formatBytes.sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']; /** * The PreControllerListeners fires before controllers are executed * @constructor */ function PreControllerListener() { // empty } PreControllerListener.prototype = { /** * Respond to the pre-controller event * @param {Object} event The event object, contains request, response and container * @param {Function} next The callback MUST be called to continue execution of the events * @returns {void} */ onPreController: function(event, next) { var mem, req = event.request, str = req.protocol + '://' + req.get('host') + req.originalUrl, env = event.container.getParameter('kernel.environment'); if (env === 'development') { mem = process.memoryUsage(); str = '[' + formatBytes(mem.heapUsed) + '] ' + str; } // debug access log event.container.get('logger').debug(str); next(); } }; PreControllerListener.prototype.constructor = PreControllerListener; module.exports = PreControllerListener;<file_sep>/app-api/src/api-bundle/lib/dependency-injection/configuration.js module.exports = { getName: function(){ return 'api'; }, validate: function(config){ } };<file_sep>/app-api/src/api-bundle/lib/services/UserService.js 'use strict'; // core modules var crypto = require('crypto'); // third party libs var Q = require('q'); // local libs var AbstractService = require('./AbstractService'); /** * The UserService handles business logic for user operations * @constructor */ function UserService() { AbstractService.apply(this, arguments); } // UserService extends AbstractService Conga.inherits(UserService, AbstractService, { /** * Get a user from storage by id * @param {string|number} userId The user id to fetch * @param {Manager} [manager] The bass manager if you already have one * @returns {Promise} */ getUserById: function getUserById(userId, manager) { var self = this, deferred = Q.defer(); this.ensureManager(manager).then(function (ensure) { // get the user repository var repo = ensure.manager.getRepository('User'); // validate the id if (!repo.validateIdField(userId)) { deferred.reject(self.createNotFoundError('Could not find user by id ' + userId)); return; } // find the user by id repo.find(userId) .then(deferred.resolve) .fail(deferred.reject); }).fail(deferred.reject); return deferred.promise; }, /** * Get users by criteria * @param {Object} criteria The criteria to search by * @param {Manager} [manager] The bass manager if you already have one * @returns {*|promise} */ getUsersByCriteria: function getUsersByCriteria(criteria, manager) { var deferred = Q.defer(); this.ensureManager(manager).then(function (ensure) { // find the user by criteria ensure.manager.getRepository('User') .findBy(criteria) .then(deferred.resolve) .fail(deferred.reject); }).fail(deferred.reject); return deferred.promise; }, /** * Get one user by criteria * @see UserService.getUsersByCriteria */ getOneUserByCriteria: function getOneUserByCriteria(criteria, manager) { var deferred = Q.defer(); this.getUsersByCriteria(criteria, manager).then(function(users) { if (users) { deferred.resolve(users[0]); } else { deferred.resolve(null); } }).fail(deferred.reject); return deferred.promise; }, /** * Prepare a user for request * @param {User} user The User document to prepare * @param {Object} data Request data for user * @returns {User} * @throws ValidationError */ prepareUserForRequest: function prepareUserForRequest(user, data) { // sanitize data if ('createdAt' in data) { delete data.createdAt; } if ('updatedAt' in data) { delete data.updatedAt; } if ('version' in data) { delete data.version; } // deserialize the user data into the User document we just created this.get('rest.manager').deserialize(user, data); // validate the (new) data inside the user object var errors = this.get('validator').validate(user); if (errors && errors.length !== 0) { throw this.createValidationError(errors, 'Invalid User Data Provided'); } // return the hydrated user return user; }, /** * Create a new User for a request, performs rest deserialization and validation * @param {Object} data The data to create the user with (the request body) * @param {Manager} [manager] The bass manager if you already have one * @returns {Promise} */ createUserForRequest: function createUserForRequest(data, manager) { var self = this, deferred = Q.defer(); this.ensureManager(manager).then(function(ensure) { // create a new empty User document var user = ensure.manager.createDocument('User'); // prepare the new document for the request Q.fcall(self.prepareUserForRequest.bind(self, user, data)).then(function(user) { // save the new user object self.createUser(user, ensure.manager) .then(deferred.resolve) .fail(deferred.reject); }, deferred.reject); }).fail(deferred.reject); return deferred.promise; }, /** * Update a user by id for a request, performs rest deserialization and validation * @param {string|number} userId The user id to update * @param {Object} data The data to update with * @param {Manager} [manager] The bass manager if you already have one * @returns {Promise} */ updateUserForRequest: function updateUserForRequest(userId, data, manager) { var self = this, deferred = Q.defer(); this.ensureManager(manager).then(function (ensure) { self.getUserById(userId, ensure.manager).then(function (user) { // make sure we have a user if (!user) { deferred.reject(self.createNotFoundError('Could not find user by id, ' + userId)); return; } // prepare the user document for the request Q.fcall(self.prepareUserForRequest.bind(self, user, data)).then(function (user) { // update the document self.updateUser(user, ensure.manager) .then(deferred.resolve) .fail(deferred.reject); }, deferred.reject); }).fail(deferred.reject); }).fail(deferred.reject); return deferred.promise; }, /** * Create a new user (does not perform validation) * @param {User} user The User document to create * @param {Manager} [manager] The bass manager if you already have one * @returns {Promise} */ createUser: function createUser(user, manager) { var self = this, deferred = Q.defer(); // encrypt the password before we save the document this.encryptUserPassword(user); this.ensureManager(manager).then(function (ensure) { // see if the user exists by email address self.getUsersByCriteria({email: user.email}, ensure.manager).then(function(users) { // if the user already exists, don't continue if (users && users.length !== 0) { deferred.reject(self.createConflictError('Email address already exists: "' + user.email + '".')); return; } // save the new User document ensure.manager.persist(user); ensure.manager.flush(user).then(function () { // success, resolve with the new User document deferred.resolve(user); }).fail(deferred.reject); }); }).fail(deferred.reject); return deferred.promise; }, /** * Update a user (does not perform validation) * @param {User} user The User document to update * @param {Manager} [manager] The bass manager if you already have one * @returns {Promise} */ updateUser: function updateUser(user, manager) { var deferred = Q.defer(); this.ensureManager(manager).then(function (ensure) { // save the new User document ensure.manager.persist(user); ensure.manager.flush(user).then(function () { // success, resolve with the User document deferred.resolve(user); }).fail(deferred.reject); }).fail(deferred.reject); return deferred.promise; }, /** * Delete a user by id * @param {string|number} userId The user id to delete * @param {Manager} [manager] The bass manager if you already have one * @returns {Promise} */ deleteUserById: function deleteUserById(userId, manager) { var self = this, deferred = Q.defer(); this.ensureManager(manager).then(function(ensure) { // fetch the user self.getUserById(userId).then(function(user) { // make sure we found a user if (!user) { deferred.reject(self.createNotFoundError('Could not find user by id, ' + userId)); return; } // remove the user ensure.manager.remove(user); ensure.manager.flush(user).then(function() { // success deferred.resolve(); }).fail(deferred.reject); }); }).fail(deferred.reject); return deferred.promise; }, /** * Encrypt a user password * @param {string} password The password to encrypt * @param {string} salt The salt stored with the user, used for encryption * @returns {string} */ encryptPassword: function encryptPassword(password, salt) { return crypto.createCipher('blowfish', password + salt).final('hex'); }, /** * Generate a unique salt for a user password * @returns {string} */ generatePasswordSalt: function generatePasswordSalt() { var rand = Math.floor(Math.random() * 10000); return crypto.createHash('sha1').update(Date.now().toString() + rand).digest('hex'); }, /** * Encrypt a password for a User document * @param {User} user The User document * @param {string} [password] The plain text password to encrypt, if not provided, user.password is used * @returns {UserController} self */ encryptUserPassword: function encryptUserPassword(user, password) { user.salt = this.generatePasswordSalt(); user.password = this.encryptPassword(password || user.password, user.salt); return this; }, /** * See if a given plain-text password is a valid password for the given User document * @param {User} user The User document * @param {string} password The <PASSWORD> * @returns {boolean} */ isUserPassword: function isUserPassword(user, password) { return user.password === this.encryptPassword(password, user.salt); } }); module.exports = UserService; <file_sep>/app-api/src/api-bundle/lib/error/http/ConflictError.js 'use strict'; // local libs var HttpError = require('./HttpError'); /** * Error class for all resource conflict errors (ie. document already exists) * * @Rest:Object * * @constructor */ function ConflictError() { HttpError.apply(this, arguments); } Conga.inherits(ConflictError, HttpError, { /** * {@inheritdoc} */ message: 'Resource Conflict', /** * {@inheritdoc} * @Rest:SerializeMethod */ toJSON: function toJSON() { return HttpError.prototype.toJSON.apply(this, arguments); } }); module.exports = ConflictError;<file_sep>/app-api/src/api-bundle/lib/controller/AbstractController.js 'use strict'; // local libs var AbstractService = require('./../services/AbstractService'); /** * The AbstractController provides common functionality for all controllers * @abstract * @constructor */ function AbstractController() { // empty } AbstractController.prototype = { /** * @see AbstractService.prototype.get */ get: function get() { return AbstractService.prototype.get.apply(this, arguments); }, /** * @see AbstractService.createInvalidArgumentError */ createInvalidArgumentError: function createInvalidArgumentError() { return AbstractService.prototype.createInvalidArgumentError.apply(this, arguments); }, /** * @see AbstractService.createHttpError */ createHttpError: function createHttpError() { return AbstractService.prototype.createHttpError.apply(this, arguments); }, /** * @see AbstractService.createAccessDeniedError */ createAccessDeniedError: function createAccessDeniedError() { return AbstractService.prototype.createAccessDeniedError.apply(this, arguments); }, /** * @see AbstractService.createNotFoundError */ createNotFoundError: function createNotFoundError() { return AbstractService.prototype.createNotFoundError.apply(this, arguments); }, /** * @see AbstractService.createValidationError */ createValidationError: function createValidationError() { return AbstractService.prototype.createValidationError.apply(this, arguments); }, /** * Send an error response back to the client * @param {Error} err * @param res */ sendErrorResponse: function sendErrorResponse(err, res) { var json = err.toJSON ? err.toJSON() : {error: err.message}; switch (err.name) { case 'ValidationError' : res.BAD_REQUEST(json); break; case 'InvalidArgumentErorr' : res.BAD_REQUEST(json); break; case 'AccessDeniedError' : res.UNAUTHORIZED(json); break; case 'NotFoundError' : res.NOT_FOUND(json); break; case 'ConflictError' : res.CONFLICT(json); break; default : case 'HttpError' : if (!err.stack) { console.trace(); } this.get('logger').error(err.stack || err); res.INTERNAL_SERVER_ERROR(json); break; } }, /** * Create an error callback for a request * @param {Object} res The express response object * @param {string} [message] The default message * @returns {(function(this:AbstractController))|Function} */ createErrorCallback: function createErrorCallback(res, message) { return (function(err) { if (typeof err === 'string') { err = this.createHttpError(err); } else if (!err) { err = this.createHttpError(message || 'Internal Server Error'); } this.sendErrorResponse(err, res); }.bind(this)); } }; AbstractController.prototype.constructor = AbstractController; module.exports = AbstractController;<file_sep>/app-api/src/api-bundle/lib/error/InvalidArgumentError.js 'use strict'; /** * Error class for all request validation errors * * @Rest:Object * * @constructor */ function InvalidArgumentError() { Error.apply(this, arguments); } Conga.inherits(InvalidArgumentError, Error, { /** * {@inheritdoc} * @Rest:SerializeMethod */ toJSON: function toJSON() { return {error: this.message}; } }); module.exports = InvalidArgumentError;<file_sep>/app-api/src/api-bundle/lib/services/EmailService.js 'use strict'; // third party libs var nodemailer = require('nodemailer'); var Q = require('q'); // local libs var AbstractService = require('./AbstractService'); /** * The EmailService helps with sending emails from twig templates * @extends AbstractService * @constructor */ function EmailService() { AbstractService.apply(this, arguments); this.transport = nodemailer.createTransport( this.getParameter('email.transport.type'), { service: this.getParameter('email.transport.config.service'), auth: { user: this.getParameter('email.transport.config.auth.user'), pass: this.getParameter('email.transport.config.auth.pass') } } ); } // EmailService extends AbstractService Conga.inherits(EmailService, AbstractService, { /** * The nodemailer transport * @type {Object} */ transport: null, /** * Render an email template * @param {string} template The template path * @param {Object} context The data to render the template with * @returns {Promise} */ renderEmailTemplate: function renderEmailTemplate(template, context) { var deferred = Q.defer(); try { this.get('twig').twig({ path: template, async: true, rethrow: false, load: function (template) { deferred.resolve(template.render(context)); }, error: function (err) { deferred.reject(err); } }); } catch (e) { deferred.reject(e); } return deferred.promise; }, /** * Send an email * @param {Object} options Object containing at least "to", "subject", "template" to send the email * @returns {Promise} */ sendEmail: function sendEmail(options) { var self = this, deferred = Q.defer(); // validate the options if (!(options instanceof Object)) { deferred.reject(self.createInvalidArgumentError('Expecting options to be an object.')); return deferred.promise; } if (!options.to) { deferred.reject(self.createInvalidArgumentError('Expecting the "to" property to be set on options.')); return deferred.promise; } if (!options.subject) { deferred.reject(self.createInvalidArgumentError('Expecting the "subject" property to be set on options.')); return deferred.promise; } if (!options.template) { deferred.reject(self.createInvalidArgumentError('Expecting the "template" property to be set on options.')); return deferred.promise; } if (!options.from) { options.from = self.getParameter('email.from'); } // render the email template with the context object self.renderEmailTemplate(options.template, options.context || {}).then(function(html) { // send the email after the template is rendered self.transport.sendMail({ from: options.from, to: options.to, subject: options.subject, html: html }, function(err, response) { if (err) { // reject with an error deferred.reject(err); } else { // resolve with the mailer response deferred.resolve(response); } }); }).fail(deferred.reject); return deferred.promise; } }); module.exports = EmailService;<file_sep>/app-api/src/api-bundle/lib/controller/UserController.js 'use strict'; // local libs var AbstractController = require('./AbstractController'); /** * @Route("/users") * * @extends AbstractController * @constructor */ function UserController() { AbstractController.apply(this, arguments); } // UserController extends AbstractController Conga.inherits(UserController, AbstractController, { /** * @Route("/", name="user.create", methods=["POST"]) */ createUser: function(req, res) { var self = this; // create a callback for error responses var errorCallback = this.createErrorCallback(res, 'Unable to create user.'); // create a new user with the user service - req.body is the HTTP POST body this.get('user.service').createUserForRequest(req.body).then(function (user) { // send a welcome email (background operation) self.get('email.service').sendEmail({ to: user.email, subject: self.getParameter('email.template.welcome.subject'), template: self.getParameter('email.template.welcome'), context: {user: user} }).fail(console.error); // success callback, return the user object res.return(user); }).fail(errorCallback); }, /** * @Route("/:id", name="user.get", methods=["GET"]) */ getUser: function(req, res) { var self = this, userId = req.params.id; // create a callback for error responses var errorCallback = this.createErrorCallback(res, 'Unable to fetch user'); // get the user by id from the user service this.get('user.service').getUserById(userId).then(function(user) { // success callback if (!user) { errorCallback(self.createNotFoundError('Could not find user by id ' + userId)); return; } res.return(user); }).fail(errorCallback); }, /** * @Route("/:id", name="user.update", methods=["PUT"]) */ updateUser: function(req, res) { var userId = req.params.id; // create a callback for error responses var errorCallback = this.createErrorCallback(res, 'Unable to update user with id ' + userId); // get the user by id from the user service this.get('user.service').updateUserForRequest(userId, req.body).then(function (user) { // success callback res.return(user); }).fail(errorCallback); }, /** * @Route("/:id", name="user.delete", methods=["DELETE"]) */ deleteUser: function(req, res) { var userId = req.params.id; // create a callback for error responses var errorCallback = this.createErrorCallback(res, 'Unable to delete user'); this.get('user.service').deleteUserById(userId).then(function() { // success callback res.OK(); }).fail(errorCallback); }, /** * @Route("/login", name="user.login", methods=["POST"]) */ loginUser: function(req, res) { var email = req.body.email; var password = <PASSWORD>; var userService = this.get('user.service'); var accessDeniedError = this.createAccessDeniedError('Email or password is invalid.'); // create a callback for error responses var errorCallback = this.createErrorCallback(res, 'Unable to login'); userService.getOneUserByCriteria({email: email}).then(function (user) { if (!user) { // invalid email errorCallback(accessDeniedError); return; } if (!userService.isUserPassword(user, password)) { // invalid password errorCallback(accessDeniedError); return; } // return the user res.return(user); }).fail(errorCallback); } }); module.exports = UserController;<file_sep>/app-api/src/api-bundle/lib/error/http/ValidationError.js 'use strict'; // local libs var HttpError = require('./HttpError'); /** * Error class for all request validation errors * * @Rest:Object * * @constructor */ function ValidationError() { HttpError.apply(this, arguments); } Conga.inherits(ValidationError, HttpError, { /** * {@inheritdoc} */ message: 'Validation Error', /** * {@inheritdoc} * @Rest:SerializeMethod */ toJSON: function toJSON() { return HttpError.prototype.toJSON.apply(this, arguments); } }); module.exports = ValidationError;<file_sep>/app-api/src/api-bundle/lib/error/http/HttpError.js 'use strict'; /** * The HttpError class used for all HTTP error types * * @Rest:Object * * @param {string|Array<string>} errors Error message(s) * @param {string} [msg] The main error message * @constructor */ function HttpError(errors, msg) { if (arguments.length === 1) { if (errors instanceof Array) { this.errors = errors; if (errors.length !== 0) { this.message = errors[0]; } } else { this.errors = [errors]; this.message = errors; } } else { if (!(errors instanceof Array)) { errors = [errors]; } this.errors = errors; this.message = msg; } Error.call(this, this.message); this.name = this.constructor.name; } Conga.inherits(HttpError, Error, { /** * Array of errors * @type {Array<string>} */ errors: null, /** * The main error message * @type {string} */ message: 'HTTP Error', /** * Serialize this error * * @Rest:SerializeMethod * * @returns {Object} */ toJSON: function toJSON() { return { message: this.message, errors: this.errors }; } }); module.exports = HttpError;
edf89e6e819642407b6e0574f4528ed696246558
[ "JavaScript" ]
11
JavaScript
createvibe/conga-api-example
94146920c64c0ad03b8808156d5fdf8b7d6309d2
e2eff5b95a0725260c227ee998cda59781931f52
refs/heads/master
<repo_name>tiagovalenca/Mars_Mercury<file_sep>/Interface/main.py import sys from PyQt5.QtWidgets import QApplication from recorder import RecorderWindow app = QApplication(sys.argv) recorder = RecorderWindow() sys.exit(app.exec_())<file_sep>/Interface/README.md # Códigos utilizados na interface. Aqui estão os códigos para a interface desenvolvida no PYQT5 que permite ao pai gravar uma mensagem de 20 segundos, escutar essa mensagem e enviar a mensagem para o seu filho por meio do firebase. A mensagem gravada é criada como o arquivo message.wav. O json é necessário para dar direitos adminitrativos do Firebase ao código. <file_sep>/Interface/ui_recorder.py # -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'mainwindow.ui' # # Created by: PyQt5 UI code generator 5.10.1 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Recorder(object): def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") MainWindow.resize(381, 280) self.centralwidget = QtWidgets.QWidget(MainWindow) self.centralwidget.setObjectName("centralwidget") self.pushButton = QtWidgets.QPushButton(self.centralwidget) self.pushButton.setGeometry(QtCore.QRect(0, 120, 181, 81)) self.pushButton.setStyleSheet("QPushButton {\n" " background-color: rgb(73, 220, 107);\n" " color: white; \n" " border: 1px solid gray;\n" "}\n" "\n" "QPushButton:pressed {\n" " background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1,\n" " stop: 0 #dadbde, stop: 1 #f6f7fa);\n" "}") self.pushButton.setObjectName("pushButton") self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget) self.pushButton_3.setGeometry(QtCore.QRect(0, 200, 181, 81)) self.pushButton_3.setStyleSheet("QPushButton {\n" " background-color: rgb(73, 220, 107);\n" " color: white; \n" " border: 1px solid gray;\n" "}\n" "\n" "QPushButton:pressed {\n" " background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1,\n" " stop: 0 #dadbde, stop: 1 #f6f7fa);\n" "}") self.pushButton_3.setObjectName("pushButton_3") self.pushButton_4 = QtWidgets.QPushButton(self.centralwidget) self.pushButton_4.setGeometry(QtCore.QRect(180, 200, 201, 81)) self.pushButton_4.setStyleSheet("QPushButton {\n" " background-color: rgb(73, 220, 107);\n" " color: white; \n" " border: 1px solid gray;\n" "}\n" "\n" "QPushButton:pressed {\n" " background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1,\n" " stop: 0 #dadbde, stop: 1 #f6f7fa);\n" "}") self.pushButton_4.setObjectName("pushButton_4") self.label = QtWidgets.QLabel(self.centralwidget) self.label.setGeometry(QtCore.QRect(0, 0, 381, 121)) self.label.setStyleSheet("QLabel {\n" " qproperty-alignment: \'AlignVCenter | AlignCenter\';\n" " border: 1px solid gray;\n" " font: 16pt \"MS Shell Dlg 2\";\n" " background-color : rgb(170, 0, 255);\n" "}") self.label.setObjectName("label") self.recordBar = QtWidgets.QProgressBar(self.centralwidget) self.recordBar.setGeometry(QtCore.QRect(180, 120, 141, 31)) self.recordBar.setStyleSheet("QProgressBar {\n" " border: 2px solid grey;\n" " border-radius: 5px;\n" "}\n" "\n" "QProgressBar::chunk {\n" " background-color: #05B8CC;\n" " width: 20px;\n" "}\n" "\n" "QProgressBar {\n" " border: 2px solid grey;\n" " border-radius: 5px;\n" " text-align: center;\n" "}") self.recordBar.setProperty("value", 0) self.recordBar.setObjectName("recordBar") self.playBar = QtWidgets.QProgressBar(self.centralwidget) self.playBar.setGeometry(QtCore.QRect(180, 150, 141, 21)) self.playBar.setStyleSheet("QProgressBar {\n" " border: 2px solid grey;\n" " border-radius: 5px;\n" "}\n" "\n" "QProgressBar::chunk {\n" " background-color: #05B8CC;\n" " width: 20px;\n" "}\n" "\n" "QProgressBar {\n" " border: 2px solid grey;\n" " border-radius: 5px;\n" " text-align: center;\n" "}") self.playBar.setProperty("value", 0) self.playBar.setObjectName("playBar") self.uploadBar = QtWidgets.QProgressBar(self.centralwidget) self.uploadBar.setGeometry(QtCore.QRect(180, 170, 141, 31)) self.uploadBar.setStyleSheet("QProgressBar {\n" " border: 2px solid grey;\n" " border-radius: 5px;\n" "}\n" "\n" "QProgressBar::chunk {\n" " background-color: #05B8CC;\n" " width: 20px;\n" "}\n" "\n" "QProgressBar {\n" " border: 2px solid grey;\n" " border-radius: 5px;\n" " text-align: center;\n" "}") self.uploadBar.setProperty("value", 0) self.uploadBar.setObjectName("uploadBar") self.label_2 = QtWidgets.QLabel(self.centralwidget) self.label_2.setGeometry(QtCore.QRect(320, 120, 61, 31)) self.label_2.setStyleSheet("QLabel {\n" " qproperty-alignment: \'AlignVCenter | AlignCenter\';\n" " border: 1px solid gray;\n" " background-color : white;\n" "}") self.label_2.setObjectName("label_2") self.label_3 = QtWidgets.QLabel(self.centralwidget) self.label_3.setGeometry(QtCore.QRect(320, 170, 61, 31)) self.label_3.setStyleSheet("QLabel {\n" " qproperty-alignment: \'AlignVCenter | AlignCenter\';\n" " border: 1px solid gray;\n" " background-color : white;\n" "}") self.label_3.setObjectName("label_3") self.label_4 = QtWidgets.QLabel(self.centralwidget) self.label_4.setGeometry(QtCore.QRect(320, 150, 61, 21)) self.label_4.setStyleSheet("QLabel {\n" " qproperty-alignment: \'AlignVCenter | AlignCenter\';\n" " border: 1px solid gray;\n" " background-color : white;\n" "}") self.label_4.setObjectName("label_4") MainWindow.setCentralWidget(self.centralwidget) self.retranslateUi(MainWindow) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): _translate = QtCore.QCoreApplication.translate MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) self.pushButton.setText(_translate("MainWindow", "Record")) self.pushButton_3.setText(_translate("MainWindow", "Play Message")) self.pushButton_4.setText(_translate("MainWindow", "Send Message")) self.label.setText(_translate("MainWindow", "A Message To Mars")) self.label_2.setText(_translate("MainWindow", "Gravando")) self.label_3.setText(_translate("MainWindow", "Upload")) self.label_4.setText(_translate("MainWindow", "Play")) if __name__ == "__main__": import sys app = QtWidgets.QApplication(sys.argv) MainWindow = QtWidgets.QMainWindow() ui = Ui_Recorder() ui.setupUi(MainWindow) MainWindow.show() sys.exit(app.exec_()) <file_sep>/README.md # Mars_Mercury Repositório para os códigos de Python e Arduíno utilizados no desenvolvimento do projeto da startup ARC para o MARS. Para o funcionamento dos códigos são necessárias algumas bibliotecas externas para Python. Você pode instalar elas por "pip install" ou pelos links providenciados. São elas: 1 - PyQT5 - https://pypi.org/project/PyQt5/ 2 - Pyaudio - https://pypi.org/project/PyAudio/ 3 - PySerial - https://pypi.org/project/pyserial/ 4 - PyGame - https://pypi.org/project/Pygame/ 5 - Pyrebase - https://github.com/thisbejim/Pyrebase 6 - playsound - https://pypi.org/project/playsound/ <file_sep>/Interface/recorder.py import wave import pyaudio import pyrebase from PyQt5 import QtWidgets from playsound import playsound from ui_recorder import Ui_Recorder class RecorderWindow(QtWidgets.QMainWindow, Ui_Recorder): def __init__(self, parent=None): super(RecorderWindow, self).__init__(parent) self.setupUi(self) self.show() self.pushButton.clicked.connect(self.record_message) self.pushButton_3.clicked.connect(self.play_message) self.pushButton_4.clicked.connect(self.upload_message) def record_message(self): FORMAT = pyaudio.paInt16 CHANNELS = 2 RATE = 44100 CHUNK = 1024 RECORD_SECONDS = 20 val = 0 audio = pyaudio.PyAudio() stream = audio.open(format=FORMAT, channels=CHANNELS, rate=RATE, input=True, frames_per_buffer=CHUNK) frames = [] for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)): data = stream.read(CHUNK) frames.append(data) val += (0.006 * RECORD_SECONDS) self.recordBar.setValue(val) stream.stop_stream() stream.close() audio.terminate() WAVE_OUTPUT_FILENAME = "message.wav" waveFile = wave.open(WAVE_OUTPUT_FILENAME, 'wb') waveFile.setnchannels(CHANNELS) waveFile.setsampwidth(audio.get_sample_size(FORMAT)) waveFile.setframerate(RATE) waveFile.writeframes(b''.join(frames)) waveFile.close() def play_message(self): playsound("message.wav") self.playBar.setValue(100) def upload_message(self): self.uploadBar.setValue(0) config = { "apiKey": "<KEY>", "authDomain": "mars-8c17e.firebaseapp.com", "databaseURL": "https://mars-8c17e.firebaseio.com", "storageBucket": "mars-8c17e.appspot.com", } firebase = pyrebase.initialize_app(config) storage = firebase.storage() database = firebase.database() data = {"Message": "sent"} storage.child("message.wav").put("message.wav") database.child().push(data) self.uploadBar.setValue(100)<file_sep>/Inside Mars/Mars_robot.ino #include <SoftwareSerial.h> #include <Servo.h> int led = 9; SoftwareSerial BTSerial(10, 11); //rx e tx Servo myServo; int servopos, pos = 0, switchPin = 12, switchState = 0; char btState = '0'; void setup() { myServo.attach(9); Serial.begin(9600); BTSerial.begin(38400); myServo.write(0); pinMode (switchPin,INPUT); } void loop() { if (BTSerial.available()){ btState = BTSerial.read(); Serial.print(btState); while (btState == 'A') { servopos = servopos + 10; delay(75); Serial.println(servopos); myServo.write(servopos); if (servopos == 60 ) { while (servopos > 0) { servopos = servopos - 10; delay(75); Serial.println(servopos); myServo.write(servopos); } } switchState = digitalRead(switchPin); if (switchState == LOW) { btState = '0'; BTSerial.write ('a'); servopos = 0; myServo.write(servopos); delay(500); } } } } <file_sep>/Inside Mars/README.md # Códigos utilizados na base e dentro do MARS, para controle das mensagens recebidas. Os códigos da base permitem à criança escutar os áudios, parar e dar continuidade quando quiser na mensagem e escutar ele novamente com o clique de um botão. O código do Arduino que fica dentro do MARS permite o controle da folha na cabeça do robô e iniciar a mensagem quando afasta o robô da base.(Por meio de um reed switch) <file_sep>/Inside Mars/Mars.py import pyrebase import pygame import serial import time config = { "apiKey": "<KEY>", "authDomain": "mars-8c17e.firebaseapp.com", "databaseURL": "https://mars-8c17e.firebaseio.com", "storageBucket": "mars-8c17e.appspot.com", "serviceAccount": "mars-8c17e-firebase-adminsdk-vxm71-3ca4b4127e.json" } firebase = pyrebase.initialize_app(config) storage = firebase.storage() db = firebase.database() paused = False pygame.mixer.init() try: arduino = serial.Serial('COM6', 57600) except: print("Failed to connect on COM6") def getMessage(): message = db.child().get() if (message.val() != None): storage.child("message.wav").download("message.wav") db.child().remove() arduino.write(b'A') pygame.mixer.music.load('mensagem.mp3') pygame.mixer.music.play() pygame.mixer.music.set_volume(1) time.sleep(5) while arduino.read() != b'4': arduino.read() playMessage() def playMessage(): pygame.mixer.music.load("message.wav") pygame.mixer.music.play() pygame.mixer.music.set_volume(1) def pause(): pygame.mixer.music.pause() def unpause(): pygame.mixer.music.unpause() while 1: getMessage() if arduino.in_waiting == True: x = arduino.read() if x == b'1': playMessage() elif x == b'2': pause() elif x == b'3': unpause()<file_sep>/Inside Mars/Mars_base.ino #include <SoftwareSerial.h> SoftwareSerial BTSerial(10, 11); // RX, TX char ligar; #define LED = 5 int tempo = 10; int brilho = 128; int button1Pin = 7; int button2Pin = 8; int button3Pin = 9; int button1State = 0; int button2State = 0; int button3State = 0; void setup() { pinMode(button1Pin, INPUT); pinMode(button2Pin, INPUT); pinMode(button3Pin, INPUT); pinMode(5, OUTPUT); BTSerial.begin(38400); Serial.begin(57600); } void loop() { button1State = digitalRead(button1Pin); button2State = digitalRead(button2Pin); button3State = digitalRead(button3Pin); if (button1State == HIGH) { Serial.print("1"); delay(1500); } if (button2State == HIGH) { Serial.print("2"); delay(1500); } if (button3State == HIGH) { Serial.print("3"); delay(1500); } if (Serial.available() > 0) { ligar = Serial.read(); if (ligar == 'A') { BTSerial.write(ligar); while (true) { for (int i = 0; i < brilho; i++) { analogWrite(5, i); delay(tempo); } for (int i = brilho; i > 0; i --) { analogWrite(5, i); delay(tempo); } if (BTSerial.available() > 0) { if (BTSerial.read() == 'a') { Serial.write('4'); digitalWrite(5, LOW); break; } } } } } }
038c0d3d7408b0e1b4fed0683ac6e6acdd3772c0
[ "Markdown", "Python", "C++" ]
9
Python
tiagovalenca/Mars_Mercury
6e2f21a8279af2d03f0e810353aa07dac1e3124e
f7ae4a5ba9a049512225d348d47b0e22bbd66790
refs/heads/master
<repo_name>benjewman/React-Practice-Code-Challenge<file_sep>/sushi-saga-client/src/containers/SushiContainer.js import React, { Fragment } from 'react' import MoreButton from '../components/MoreButton' import Sushi from '../components/Sushi' const SushiContainer = (props) => { let sushiComponentArray = props.allSushi.map(sushi => { return <Sushi details={sushi} eaten={props.eaten}/> }) return ( <Fragment> <div className="belt"> { /* Render Sushi components here! */ sushiComponentArray.slice(props.sliceStart, props.sliceEnd) } <MoreButton handleMoreSushi={props.handleMoreSushi}/> </div> </Fragment> ) } export default SushiContainer
1d6f73a56b4df3ae79541f8f1cc7fe3d8627251b
[ "JavaScript" ]
1
JavaScript
benjewman/React-Practice-Code-Challenge
48f7a7696ae8433aa25b2c02fc13fa4fe5002f08
12b7b4cd5180e32698076e818730a5e368c1a38b
refs/heads/master
<repo_name>ecgaebler/Practice<file_sep>/LeetCode/0059.py class Solution: def generateMatrix(self, n: int) -> List[List[int]]: result = [ [[] for _ in range(n)] for _ in range(n) ] y_start, y_end = 0, n x_start, x_end = 0, n counter = 1 while y_start < y_end and x_start < x_end: for x in range(x_start, x_end): #top side result[y_start][x] = counter counter += 1 for y in range(y_start + 1, y_end): #right side result[y][x_end - 1] = counter counter += 1 for x in range(x_end - 2, x_start - 1, -1): #bottom side result[y_end - 1][x] = counter counter += 1 for y in range(y_end - 2, y_start, -1): #left side result[y][x_start] = counter counter += 1 y_start += 1 y_end -= 1 x_start += 1 x_end -= 1 return result <file_sep>/LeetCode/1342.cpp class Solution { public: int numberOfSteps (int num) { int num_steps = 0; if(num < 0) { return -1; } while (num > 0) { if (num%2 == 0) { num = num/2; } else { num--; } num_steps++; } return num_steps; } };<file_sep>/LeetCode/0561.py def arrayPairSum(nums): nums.sort() min_sum = 0 for i in range(len(nums)//2): min_sum += nums[2*i] return min_sum #The idea behind this algorithm is that if we "use up" as many of the #smallest values as we can by putting them in pairs with other smallest #numbers. For example, when counting the min values in the pairs, any pair #that includes the smallest value will contribute that smallest value. The #second smallest value will also dominate its pair, unless it is paired #with the single smallest value. If you pair it that way, the third #smallest value will get counted while the second smallest won't. <file_sep>/LeetCode/0200.py from collections import deque class Solution: def numIslands(self, grid: List[List[str]]) -> int: #catch edge case with empty arrays if len(grid) == 0 or len(grid[0]) == 0: return 0 height = len(grid) width = len(grid[0]) #function for flooding an island and erasing all land tiles in it def flood(grid, x, y): if y in range(len(grid)) and x in range(len(grid[0])) and grid[y][x] == "1": grid[y][x] = "0" flood(grid, x-1, y) flood(grid, x+1, y) flood(grid, x, y-1) flood(grid, x, y+1) #loop through all elements in the grid, flooding islands as they are found islands = 0 for y in range(height): for x in range(width): if grid[y][x] == "1": islands += 1 flood(grid, x, y) return islands <file_sep>/AlgoExpert/spiral_traverse.py def spiralTraverse(matrix): result = [] if not matrix or not matrix[0]: return result startX, endX, startY, endY = 0, len(matrix[0]) - 1, 0, len(matrix) - 1 while startX <= endX and startY <= endY: for x in range(startX, endX + 1): result.append(matrix[startY][x]) for y in range(startY + 1, endY + 1): result.append(matrix[y][endX]) for x in reversed(range(startX, endX)): if startY == endY: break result.append(matrix[endY][x]) for y in reversed(range(startY + 1, endY)): if startX == endX: break result.append(matrix[y][startX]) startY += 1 endX -= 1 startX += 1 endY -= 1 return result <file_sep>/LeetCode/0003.py class Solution: def lengthOfLongestSubstring(self, s: str) -> int: left, right = 0, 0 #keep two pointers, one at the beginning of the window, one at the end longest = 0 #length of longest string active_chars = set() while right < len(s): if left == right: #if window size is 0 active_chars.add(s[right]) #add new element right += 1 #expand window to include new element elif s[right] in active_chars: #if next char is already in substring active_chars.remove(s[left]) #remove leftmost char left += 1 #shrink window else: #next char isn't in current string active_chars.add(s[right]) #add next char right += 1 #expand window current_size = right - left if current_size > longest: longest = current_size return longest <file_sep>/LeetCode/DEV_0695.py from collections import deque def maxAreaOfIsland(grid): def find_neighbors(x, y, matrix): """ Find viable immediate neighbors of a cell """ result = [] if x > 0 and matrix[y][x - 1] == 1: #check left square result.append((x - 1, y)) if x < len(matrix[0]) - 1 and matrix[y][x + 1] == 1: #right square result.append((x + 1, y)) if y > 0 and matrix[y - 1][x] == 1: #square above result.append((x, y - 1)) if y < len(matrix) - 1 and matrix[y + 1][x] == 1: #square below result.append((x, y + 1)) return result def dfs(x, y, matrix): """ Flood fill an island and return size of island """ area = 0 queue = deque() queue.append((x, y)) while queue: new_x, new_y = queue.popleft() matrix[new_y][new_x] = 2 #mark cell as visited area += 1 for neighbor in find_neighbors(new_x, new_y, matrix): queue.append(neighbor) return area max_area = 0 for y in range(len(grid)): for x in range(len(grid[0])): if grid[y][x] == 1: max_area = max(max_area, dfs(x, y, grid)) for row in grid: print(row) return max_area test1 = [[0,0,1,0,0,0,0,1,0,0,0,0,0], [0,0,0,0,0,0,0,1,1,1,0,0,0], [0,1,1,0,1,0,0,0,0,0,0,0,0], [0,1,0,0,1,1,0,0,1,0,1,0,0], [0,1,0,0,1,1,0,0,1,1,1,0,0], [0,0,0,0,0,0,0,0,0,0,1,0,0],[0,0,0,0,0,0,0,1,1,1,0,0,0],[0,0,0,0,0,0,0,1,1,0,0,0,0]] test2 = [[1,1,0,0,0],[1,1,0,0,0],[0,0,0,1,1],[0,0,0,1,1]] print(maxAreaOfIsland(test1)) print("\n\n") print(maxAreaOfIsland(test2)) <file_sep>/LeetCode/0217.py def containsDuplicate(nums): values = set() for num in nums: if num in values: return True values.add(num) return False <file_sep>/AlgoExpert/bst_construction.py # Do not edit the class below except for # the insert, contains, and remove methods. # Feel free to add new properties and methods # to the class. class BST: def __init__(self, value): self.value = value self.left = None self.right = None def insert(self, value): current_node = self while True: if value < current_node.value: if current_node.left == None: current_node.left = BST(value) break else: current_node = current_node.left else: if current_node.right == None: current_node.right = BST(value) else: current_node = current_node.right break return self def contains(self, value): current = self while current is not None: if current.value > value: current = current.left elif current.value < value: current = current.right else: return True return False def remove(self, value, parent = None): current = self while current is not None: if value < current.value: parent = current current = current.left elif value > current.value: parent = current current = current.right else: #value matches current node's value if current.left is not None and current.right is not None: #current node has two children current.value = current.left.largest_value() current.left.remove(current.value, current) elif parent is None: if current.right is not None: #current only has right child current.value = current.right.value current.left = current.right.left current.right = current.right.right elif current.left is not None: #current node only has left child current.value = current.left.value current.right = current.left.right current.left = current.left.left else: #single-node tree pass elif current.right is not None: #current node only has a right child if parent.left == current: parent.left = current.right else: parent.right = current.right elif current.left is not None: #current_node only has a left child if parent.left == current: parent.left = current.left else: parent.right = current.left break return self def largest_value(self): current = self while current.right is not None: current = current.right return current.value <file_sep>/LeetCode/0344.py def reverseString(s): """ Reverse a list of chars in place. Do not return anything. """ left_idx, right_idx = 0, len(s) - 1 while right_idx > left_idx: s[left_idx], s[right_idx] = s[right_idx], s[left_idx] left_idx += 1 right_idx -= 1 <file_sep>/LeetCode/0205.py class Solution: def isIsomorphic(self, s: str, t: str) -> bool: s_map = {} #maps a unique value for each letter s_pattern = [] #describes the pattern of letters relevant for testing isomorhpism s_num = 0 #counter for distinguishing between groups of different letters t_map, t_pattern, t_num = {}, [], 0 #same as above, but for t for letter in s: if letter not in s_map: #if letter hasn't been identified before s_map[letter] = s_num #add it to the map s_num += 1 #increase counter so next new letter gets a unique "ID" s_pattern.append(s_map[letter]) for letter in t: if letter not in t_map: t_map[letter] = t_num t_num += 1 t_pattern.append(t_map[letter]) return s_pattern == t_pattern<file_sep>/LeetCode/0034.py class Solution: def searchRange(self, nums: List[int], target: int) -> List[int]: if not nums: return [-1, -1] def first_between(array, target, l, r): """ Find the first instance of a value between index l and r. """ l, r = 0, len(array) - 1 while l + 1 < r: mid = l + (r - l) // 2 if array[mid] < target: l = mid else: r = mid #post-processing if array[l] == target: return l elif array[r] == target: return r return -1 def last_between(array, target, l, r): """ Find the last instance of a value between index l and r. """ l, r = 0, len(array) - 1 while l + 1 < r: mid = l + (r - l) // 2 if array[mid] <= target: l = mid else: r = mid #post-processing if array[r] == target: return r elif array[l] == target: return l return -1 #binary search until you find the target, then split from there start, end = 0, len(nums) - 1 while start + 1 < end: mid = start + (end - start) // 2 if nums[mid] < target: start = mid elif nums[mid] > target: end = mid else: #if nums[mid] == target, split binary search from here to find both edges. break first_target = first_between(nums, target, start, end) last_target = last_between(nums, target, start, end) return [first_target, last_target] return [-1,-1]<file_sep>/LeetCode/0349.py class Solution: def intersection(self, nums1: List[int], nums2: List[int]) -> List[int]: other_nums = set() if len(nums1) <= len(nums2): nums = nums1 other_nums = set(nums2) else: nums = nums2 other_nums = set(nums1) result = set() for num in nums: if num in other_nums: result.add(num) return list(result) <file_sep>/LeetCode/0389.py def findTheDifference(s, t): s_sum = 0 for char in s: s_sum += ord(char) t_sum = 0 for char in t: t_sum += ord(char) return chr(t_sum - s_sum) #TEST CODE tests = [("abcd","abcde"), ("","y"), ("a","aa"), ("ae","eaa")] for test in tests: print('input strings: "' + test[0] + '" & "' + test[1] + '"') print(" added char: '" + findTheDifference(test[0], test[1]) + "'") <file_sep>/LeetCode/0290.py class Solution: def wordPattern(self, pattern: str, str: str) -> bool: words = str.split() #words cannot fit pattern if their lengths don't match if len(words) != len(pattern) or len(words) == len(pattern) == 0: return False matched = {} #dictionary matching word to pattern char used_chars = set() #set of chars encountered so far for i in range(len(words)): #check if current word already matches a char other than the current char if words[i] in matched: if matched[words[i]] != pattern[i]: return False #current word has not been seen before. Return false of current char has. elif pattern[i] in used_chars: return False else: matched[words[i]] = pattern[i] used_chars.add(pattern[i]) return True <file_sep>/LeetCode/0709.py def toLowerCase(s): charlist = [] for char in s: if char.isupper(): charlist.append(char.lower()) else: charlist.append(char) return ''.join(charlist) #TESTING ''' tests = [ "", "acdc", "c4s", "C6G", "4G" ] for test in tests: print(toLowerCase(test)) ''' <file_sep>/AlgoExpert/monotonic_array.py def isMonotonic(array): if len(array) < 2: return True direction = 0 for i in range(len(array) - 1): if array[i] < array[i+1]: #positive slope if direction == 0: direction = 1 elif direction < 0: return False elif array[i] > array[i+1]: #negative slope if direction == 0: direction = -1 elif direction > 0: return False return True<file_sep>/LeetCode/0697.py def findShortestSubArray(nums): if len(nums) == 0: return 0 most_frequent = [] max_freq = 0 frequencies = {} endpoints = {} for i, num in enumerate(nums): if num not in frequencies: frequencies[num] = 1 endpoints[num] = [i, i] else: frequencies[num] += 1 endpoints[num][1] = i if frequencies[num] > max_freq: max_freq = frequencies[num] most_frequent = [num] elif frequencies[num] == max_freq: most_frequent.append(num) min_size = len(nums) for num in most_frequent: span = endpoints[num][1] - endpoints[num][0] + 1 if span < min_size: min_size = span return min_size #TESTING ''' tests = [ [1,2,2,3,1], #should be 2 [1,2,2,3,1,4,2],#should be 6 [9], #should be 1 [], #should be 0 [1,2], #should be 1 [1,2,1,0] #should be 3 ] for test in tests: print(findShortestSubArray(test)) ''' <file_sep>/AlgoExpert/balanced_brackets.py """ Write a function that takes in a string made up of brackets (such as "(", "[", and "{") and other optional characters. The function should return wether the string is balanced with regards to brackets. example: "([])(){}(())()()" should return True because it's balanced. """ def balancedBrackets(string): brackets = [] #stack of unresolved brackets matches = {'(':')', '[':']', '{':'}', '<':'>'} for char in string: if char in matches.keys(): #char is an open bracket brackets.append(char) elif char in matches.values(): #char is a close bracket if not brackets: return False if matches[brackets.pop()] != char: return False return not brackets #TEST CODE BELOW #these should return False: test0 = "(" test1 = "]" test2 = "[{]}" test3 = "[([)]]" test4 = "{]" test5 = "[)" #these should return True: test6 = "" test7 = "a" test8 = "()" test9 = "([])" testA = "([]{})" testB = "[[]]" testC = "a[bc(d)e{f}]gh" testD = "()[]{}" testE = "([{}])" testF = "([])(){}(())()()" tests = [test0, test1, test2, test3, test4, test5, test6, test7, test8, test9, testA, testB, testC, testD, testE, testF] for test in tests: print(balancedBrackets(test)) <file_sep>/LeetCode/0110.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right from collections import deque class Solution: def isBalanced(self, root: TreeNode) -> bool: if not root: return True def height(node): if not node: return (0) left_height = height(node.left) right_height = height(node.right) if left_height < 0 or right_height < 0: return -1 if abs(left_height - right_height) > 1: return -1 return 1 + max(left_height, right_height) height = height(root) return height > 0 ''' TEST CASES USED ON LEETCODE [3,9,20,null,null,15,7] [1,2,2,3,3,null,null,4,4] [1,null,null] [] [1,null,2,null,3] [1,2,3,4,5,6,null,8] ''' <file_sep>/LeetCode/0069.cpp class Solution { public: int mySqrt(int x) { //sanitize input before using log2 if(x <= 0) { return 0; } //Make initial guess by right-shifting x by half its binary digit length double guess = 1.0 * (x >> static_cast<int>(log2(x))); //Use Newton's method for improving guess float error = 0.5; while ( abs(pow(guess, 2) - x) > error ) { guess = guess - (pow(guess, 2) - x) / (2 * guess); } return static_cast<int>(guess); } };<file_sep>/LeetCode/0515.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None from collections import deque class Solution: def largestValues(self, root: TreeNode) -> List[int]: if not root or root.val == None: return [] result = [] queue = deque() queue.append((root, 0)) while queue: node, row = queue.popleft() if row + 1 > len(result): result.append(node.val) else: result[row] = max(result[row], node.val) if node.right: queue.append((node.right, row + 1)) if node.left: queue.append((node.left, row + 1)) return result<file_sep>/AlgoExpert/invert_binary_tree.py def invertBinaryTree(tree): def recurse_inverse(root): if root is None: return root.left, root.right = recurse_inverse(root.right), recurse_inverse(root.left) return root return recurse_inverse(tree) <file_sep>/LeetCode/0394.py class Solution: def decodeString(self, s: str) -> str: """ decode a string """ def decode_chars(char_list): """ decode a list of chars """ result = [] digits = set(str(_) for _ in range(10)) count_list = [] bracket_num = 0 #+1 for open brackets, -1 for close brackets temp = [] for char in char_list: if char == "[": if bracket_num > 0: #we still have unclosed brackets temp.append(char) #include bracket in temp string bracket_num += 1 elif char == "]": bracket_num -= 1 if bracket_num > 0: #brackets still not closed temp.append(char) #keep adding to substring else: #this bracket closes a substring decoded_substring = decode_chars(temp) #recurse on substring count = int(''.join(count_list)) count_list = [] #reset count_list temp = [] #reset temp substring result.extend(count*(decoded_substring)) else: if bracket_num > 0: temp.append(char) else: if char in digits: count_list.append(char) #build char list representing an int else: result.append(char) return result return ''.join(decode_chars(list(s))) <file_sep>/AlgoExpert/min_height_bst.py from collections import deque def minHeightBst(array): root_idx = len(array)//2 root = BST(array[root_idx]) #Use a queue to determine what order values are inserted queue = deque() queue.append((0, root_idx)) queue.append((root_idx + 1, len(array))) while queue: min_idx, max_idx = queue.popleft() if min_idx < max_idx: #insert midpoint of subtree as new node node_idx = (min_idx + max_idx)//2 root.insert(array[node_idx]) #enqueue new nodes left and right subtrees queue.append((min_idx, node_idx)) queue.append((node_idx + 1, max_idx)) return root class BST: def __init__(self, value): self.value = value self.left = None self.right = None def insert(self, value): if value < self.value: if self.left is None: self.left = BST(value) else: self.left.insert(value) else: if self.right is None: self.right = BST(value) else: self.right.insert(value) <file_sep>/AlgoExpert/longest_peak.py def longestPeak(array): longest_peak = 0 if len(array) < 3: return longest_peak #find peaks peaks = [] for i in range(1, len(array) - 1): if array[i - 1] < array[i] > array[i+1]: peaks.append(i) #determine size of peaks for peak_idx in peaks: peak_length = 1 left_idx, right_idx = peak_idx, peak_idx #count left slope while left_idx > 0 and array[left_idx - 1] < array[left_idx]: peak_length += 1 left_idx -= 1 #count right slope while right_idx < len(array) - 1 and array[right_idx + 1] < array[right_idx]: peak_length += 1 right_idx += 1 if peak_length > longest_peak: longest_peak = peak_length return longest_peak<file_sep>/LeetCode/0075.py class Solution: def sortColors(self, nums: List[int]) -> None: if len(nums) < 2: return low, unsorted, high = 0, 0, len(nums) - 1 while (high >= unsorted): if nums[unsorted] == 0: #ball is red temp = nums[low] nums[low] = nums[unsorted] nums[unsorted] = temp low += 1 #balls swapped unsorted += 1 #look at next elif nums[unsorted] == 1: #ball is white unsorted += 1 #already sorted, look at next. else: #ball must be blue temp = nums[unsorted] nums[unsorted] = nums[high] nums[high] = temp high -= 1 #one more blue ball at end<file_sep>/LeetCode/0520.py def detectCapitalUse(word): return (word.isupper() or word.islower() or (word[0].isupper() and word[1:].islower())) <file_sep>/LeetCode/0695.py from collections import deque class Solution: def maxAreaOfIsland(self, grid: List[List[int]]) -> int: def find_neighbors(x, y, matrix): """ Find viable immediate neighbors of a cell """ result = [] if x > 0 and matrix[y][x - 1] == 1: #check left square result.append((x - 1, y)) if x < len(matrix[0]) - 1 and matrix[y][x + 1] == 1: #right square result.append((x + 1, y)) if y > 0 and matrix[y - 1][x] == 1: #square above result.append((x, y - 1)) if y < len(matrix) - 1 and matrix[y + 1][x] == 1: #square below result.append((x, y + 1)) return result def dfs(x, y, matrix): """ Flood fill an island and return size of island """ area = 0 queue = deque() queue.append((x, y)) matrix[y][x] = 2 #mark first cell as visited while queue: new_x, new_y = queue.popleft() area += 1 for neighbor in find_neighbors(new_x, new_y, matrix): queue.append(neighbor) matrix[neighbor[1]][neighbor[0]] = 2 #mark cell as visited return area max_area = 0 for y in range(len(grid)): for x in range(len(grid[0])): if grid[y][x] == 1: max_area = max(max_area, dfs(x, y, grid)) return max_area<file_sep>/LeetCode/0747.py class Solution: def dominantIndex(self, nums: List[int]) -> int: largest_index = 0 if len(nums) == 1: return largest_index largest = float("-inf") second_largest = float("-inf") for i, num in enumerate(nums): if num > largest: second_largest = largest largest = num largest_index = i else: second_largest = max(second_largest, num) if largest >= 2*second_largest: return largest_index return -1 <file_sep>/LeetCode/0136.py ''' Given a non-empty array of integers nums, every element appears twice except for one. Find that single one. Follow up: Could you implement a solution with a linear runtime complexity and without using extra memory? ''' def singleNumber(nums): for i in range(1, len(nums)): nums[0] ^= nums[i] return nums[0] ''' Constraints: 1 <= nums.length <= 3 * 104 -3 * 104 <= nums[i] <= 3 * 104 Each element in the array appears twice except for one element which appears only once. ''' #TEST CODE tests = ([2,2,1], [4,1,2,1,2], [-5], [0,1,7,1,0]) answers = (1, 4, -5, 7) for i, test in enumerate(tests): test_str = str(test) if singleNumber(test) == answers[i]: print("test PASS") else: print("test FAIL") print(" input: " + test_str) print(" expected output: " + str(answers[i])) print(" actual output: " + str(test[0])) <file_sep>/LeetCode/0554.py class Solution: def leastBricks(self, wall: List[List[int]]) -> int: num_rows = len(wall) min_bricks = num_rows #initialize with max possible number of bricks crossed edges_at = {} #dict to keep track of how many edges are at each position along the wall wall_width = 0 #If the wall has width 1, the line most cross every brick. if len(wall[0]) == 1 and wall[0][0] == 1: return num_rows #Count the number of edges at each relevant position along the wall. #We only need to record positions where there are edges. for row in wall: position = 0 for brick in row: position += brick if position not in edges_at: edges_at[position] = 0 edges_at[position] += 1 if wall_width == 0: wall_width = position for position in edges_at: if 0 < position < wall_width: #ignore the outside edges bricks_crossed = num_rows - edges_at[position] min_bricks = min(min_bricks, bricks_crossed) return min_bricks<file_sep>/AlgoExpert/three_number_sum.py def threeNumberSum(array, targetSum): sorted_array = sorted(array) triplets = [] for i in range(len(array) - 2): left = i + 1 right = len(array) - 1 while left < right: current_sum = sorted_array[i] + sorted_array[left] + sorted_array[right] if current_sum < targetSum: left += 1 elif current_sum > targetSum: right -= 1 else: triplets.append([sorted_array[i], sorted_array[left], sorted_array[right]]) left += 1 right -= 1 return triplets<file_sep>/LeetCode/0599.py class Solution: def findRestaurant(self, list1: List[str], list2: List[str]) -> List[str]: dict1 = {} for i, location in enumerate(list1): dict1[location] = i least_sum = float("inf") common = [] for i, location in enumerate(list2): if location in dict1: index_sum = i + dict1[location] if index_sum == least_sum: common.append(location) if index_sum < least_sum: least_sum = index_sum common = [location] return common<file_sep>/LeetCode/0537.py class Solution: def complexNumberMultiply(self, a: str, b: str) -> str: split_a = a.split("+") a_real, a_imag = int(split_a[0]), int(split_a[1][:-1]) split_b = b.split("+") b_real, b_imag = int(split_b[0]), int(split_b[1][:-1]) final_real = a_real * b_real - a_imag * b_imag final_imag = a_real * b_imag + a_imag * b_real return str(final_real) + "+" + str(final_imag) + "i"<file_sep>/LeetCode/0143.py # Definition for singly-linked list. # class ListNode: # def __init__(self, x): # self.val = x # self.next = None class Solution: def reorderList(self, head: ListNode) -> None: """ Do not return anything, modify head in-place instead. """ l = [] current_node = head while current_node != None: l.append(current_node) current_node = current_node.next list_len = len(l) if list_len <= 1: #list too small to need any changes return left_iter = 0 #iterator pointing to left side of list right_iter = list_len-1 #iterator pointing to right side while(left_iter < right_iter): l[left_iter].next = l[right_iter] left_iter += 1 l[right_iter].next = l[left_iter] right_iter -= 1 l[left_iter].next = None <file_sep>/LeetCode/0013.py class Solution: def romanToInt(self, s: str) -> int: values = {"I":1, "V":5, "X":10, "L":50, "C":100, "D":500, "M":1000} prefixes = {"I":["V","X"], "X":["L","C"], "C":["D","M"]} result = 0 prev_letter = "" for letter in reversed(s): if letter in prefixes and prev_letter in prefixes[letter]: result -= values[letter] else: result += values[letter] prev_letter = letter return result <file_sep>/LeetCode/0744.py def nextGreatestLetter(letters, target): l, r = 0, len(letters) - 1 while l + 1 < r: mid = l + (r - l) // 2 if ord(letters[mid]) <= ord(target): l = mid else: r = mid if ord(letters[r]) <= ord(target): return letters[0] if ord(letters[l]) > ord(target): return letters[0] if ord(letters[l]) <= ord(target): return letters[r] return letters[l] #TEST CODE tests = [ (["c","f","j"],"a","c"), (["c","f","j"],"c","f"), (["c","f","j"],"d","f"), (["c","f","j"],"g","j"), (["c","f","j"],"j","c"), (["c","f","j"],"k","c"), (["e","e","e","e","e","e","n","n","n","n"],"e","n"), (["e","e","e","e","e","e","n","n","n","n"],"n","e")] do_tests = True if do_tests: for letters, target, answer in tests: if nextGreatestLetter(letters, target) == answer: print("Test PASS") else: print("Test FAIL") <file_sep>/LeetCode/0204.py def countPrimes(n): primes = [] for i in range(2, n): could_be_prime = True for prime in primes: if (i%prime) == 0: #i is not prime could_be_prime = False break if could_be_prime: primes.append(i) return len(primes) <file_sep>/LeetCode/0448.py class Solution: def findDisappearedNumbers(self, nums: List[int]) -> List[int]: uncounted = set(_ for _ in range(1, len(nums) + 1)) for num in nums: uncounted.discard(num) return list(uncounted)<file_sep>/LeetCode/0436.java import java.util.*; class Solution { public int[] findRightInterval(int[][] intervals) { if(intervals.length == 0) { int[] result = {}; return result; } if(intervals.length == 1) { int[] result = {-1}; return(result); } HashMap<Integer, Integer> indexMap = new HashMap<>(); List<Integer> startIndices = new ArrayList<Integer>(); for(int i = 0; i < intervals.length; i++) { indexMap.put(intervals[i][0], i); startIndices.add(intervals[i][0]); } Collections.sort(startIndices); int[] result = new int[intervals.length]; for(int i = 0; i < intervals.length; i++) { int nextIdx = indexMap.get(startIndices.get(nextIndex(startIndices, intervals[i][1]))); if(intervals[nextIdx][0] >= intervals[i][1]) { result[i] = nextIdx; } else { result[i] = -1; } } return result; } private int nextIndex(List<Integer> array, int target) { int l = 0, r = array.size() - 1; int mid; while(l + 1 < r) { mid = l + (r - l) / 2; if(array.get(mid) < target){ l = mid; } else { r = mid; } } if(array.get(l) >= target) { return l; } else { return r; } } }<file_sep>/LeetCode/0485.py def findMaxConsecutiveOnes(nums): max_ones = 0 current_ones = 0 for value in nums: if value == 0: max_ones = max(max_ones, current_ones) current_ones = 0 else: current_ones += 1 return max(max_ones, current_ones) #TESTING CODE tests = [([1,0,1,1,0,1], 2), ([1], 1), ([0], 0), ([1,0], 1), ([0,1], 1), ([1,1,0,1], 2), ([1,0,1,1], 2), ([0,0,0], 0)] for test in tests: print(findMaxConsecutiveOnes(test[0]) == test[1]) <file_sep>/LeetCode/0443.py class Solution: def compress(self, chars: List[str]) -> int: if not chars: return 0 char_count = 0 write_idx = 0 current_char = chars[0] for scan_idx in range(0, len(chars)): if chars[scan_idx] == current_char: char_count += 1 else: if char_count > 1: chars[write_idx] = current_char write_idx += 1 temp = str(char_count) for char in temp: chars[write_idx] = char write_idx += 1 else: chars[write_idx] = current_char write_idx += 1 char_count = 1 current_char = chars[scan_idx] if char_count > 1: chars[write_idx] = current_char write_idx += 1 temp = str(char_count) for char in temp: chars[write_idx] = char write_idx += 1 else: chars[write_idx] = current_char write_idx += 1 del chars[write_idx:] return len(chars)<file_sep>/LeetCode/0074.py class Solution: def searchMatrix(self, matrix: List[List[int]], target: int) -> bool: if len(matrix) == 0 or len(matrix[0]) == 0: return False #find row index l, r = 0, len(matrix) - 1 while l + 1 < r: mid = l + (r - l) // 2 if matrix[mid][0] > target: r = mid else: l = mid if matrix[r][0] <= target: row = r else: row = l #find element index l, r = 0, len(matrix[0]) - 1 while l + 1 < r: mid = l + (r - l) // 2 if matrix[row][mid] > target: r = mid else: l = mid return matrix[row][r] == target or matrix[row][l] == target<file_sep>/LeetCode/0098.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def isValidBST(self, root: TreeNode) -> bool: def evalBST(node, min_val = float("-inf"), max_val = float("inf")): if not node: #empty branch is always balanced return True if node.val == None: return True #invalid BST if not min_val < node.val < max_val: return False #node's value must be strictly less than min_val and greater than max_val return evalBST(node.left, min_val, node.val) and evalBST(node.right, node.val, max_val) return evalBST(root) <file_sep>/AlgoExpert/validate_bst.py from collections import deque # This is an input class. Do not edit. class BST: def __init__(self, value): self.value = value self.left = None self.right = None def validateBst(tree): if tree is None: return True min_value = float("-inf") max_value = float("inf") queue = deque() queue.append((tree, min_value, max_value)) while queue: current, min_value, max_value = queue.popleft() if current is None: continue if current.value >= max_value or current.value < min_value: return False #if current.left is not None and current.left.value >= current.value: # return False #if current.right is not None and current.right.value < current.value: # return False queue.append((current.left, min_value, current.value)) queue.append((current.right, current.value, max_value)) return True ''' #here's a recursive solution: from collections import deque def validateBst(tree): if tree is None: return True min_value = float("-inf") max_value = float("inf") def validate_helper(node, min_val, max_val): if node is None: return True if node.value < min_val or node.value >= max_val: return False if node.left is not None: if not validate_helper(node.left, min_val, node.value): return False if node.right is not None: if not validate_helper(node.right, node.value, max_val): return False return True return validate_helper(tree, min_value, max_value) '''<file_sep>/Other/mock_technical_2020.04.02.py """ Given two series of keystrokes, determine if they result in the same output. The character "<" represents a backspace. Example 1: keys1 = "asd" => "asd" keys2 = "asdd" => "asdd" false Example 2: keys1 = "asd" => "asd" keys2 = "asx<d" => "asd" true """ def compare_keys(keys1, keys2): def next_char(keys, index): """ Determines the index of the next char to process in keys. Returns -1 if nothing left to process. """ backspaces = 0 while index >= 0: if keys[index] == "<": backspaces += 1 else: if backspaces == 0: return index backspaces -= 1 index -= 1 return -1 #initialize index 1 and 2 to the first valid index (if it exists) index1 = next_char(keys1, len(keys1) - 1) index2 = next_char(keys2, len(keys2) - 1) while index1 >= 0 and index2 >= 0: if keys1[index1] != keys2[index2]: return False index1 = next_char(keys1, index1 - 1) index2 = next_char(keys2, index2 - 1) return index1 == index2 <file_sep>/LeetCode/0409.py class Solution: def longestPalindrome(self, s: str) -> int: max_length = 0 unpaired = set() for char in s: if char in unpaired: unpaired.discard(char) max_length += 2 else: unpaired.add(char) if len(unpaired) >= 1: max_length += 1 return max_length <file_sep>/LeetCode/0841.py class Solution: def canVisitAllRooms(self, rooms: List[List[int]]) -> bool: visited = set() queue = deque() queue.append(0) while queue: room = queue.popleft() visited.add(room) keys = rooms[room] for key in keys: if key not in visited: queue.append(key) return len(visited) == len(rooms)<file_sep>/AlgoExpert/permutations.py """ Write a function that takes in an array of unique integers and returns an array of all permutations of those integers, in no particular order. If the input is empty, the function should return an empty array. """ def getPermutations(array): if len(array) == 1: return [array] if len(array) == 0: return [] result = [] for i, num in enumerate(array): #add current num to beginning of all permutations that exclude it for perm in getPermutations(array[:i] + array[i+1:]): result.append([num] + perm) return result #TEST CODE: ''' test0 = [1,2,3] test1 = [] test2 = [5] tests = [test0, test1, test2] for test in tests: print(f"{test} —> {getPermutations(test)}") ''' <file_sep>/LeetCode/0448.cpp class Solution { public: vector<int> findDisappearedNumbers(vector<int>& nums) { //Keep track of elements e that have been found, by making value at index e negative for(int i = 0; i < nums.size(); i++) { int e = abs(nums[i]) - 1; //adjust value to prevent index out-of-bounds if(nums[e] > 0) { nums[e] = -nums[e]; } } //search for positive values; their index corresponds to a value that didn't exist in nums vector<int> result; for(int i = 0; i < nums.size(); i++) { if(nums[i] > 0) { result.push_back(i + 1); //readjst value to original value before pushing } } return result; } };<file_sep>/LeetCode/0017.py class Solution: def letterCombinations(self, digits: str) -> List[str]: valid_digits = set(["2","3","4","5","6","7","8","9"]) for digit in digits: #catch invalid digit edge case if digit not in valid_digits: return [] if len(digits) == 0: #catch empty edge case return [] results = [] #solutions list letters = {} #dictionary mapping digits onto possible letters letters["2"] = ["a","b","c"] letters["3"] = ["d","e","f"] letters["4"] = ["g","h","i"] letters["5"] = ["j","k","l"] letters["6"] = ["m","n","o"] letters["7"] = ["p","q","r","s"] letters["8"] = ["t","u","v"] letters["9"] = ["w","x","y","z"] def permutations(digit_index, substring): """find all letter combinations after substring, given digit_index""" nonlocal results nonlocal digits nonlocal letters if digit_index >= len(digits): #reached end of number results.append(substring) #add current string to results list return for letter in letters[digits[digit_index]]: permutations(digit_index + 1, substring + letter) permutations(0, "") return results <file_sep>/AlgoExpert/single_cycle_checker.py def hasSingleCycle(array): n = len(array) i = 0 numjumps = 0 while numjumps < n: if numjumps > 0 and i == 0: return False i = i + array[i] while i < 0: i += len(array) while i >= len(array): i -= len(array) numjumps += 1 return i == 0<file_sep>/AlgoExpert/number_of_ways_to_make_change.py def numberOfWaysToMakeChange(n, denoms): ways = [0 for i in range(n + 1)] ways[0] = 1 for denom in denoms: for target in range(1, n + 1): if denom <= target: ways[target] += ways[target - denom] return ways[n] <file_sep>/LeetCode/0367.py class Solution: def isPerfectSquare(self, num: int) -> bool: if num <= 0: return False def sqrt_estimate(square): estimate = square >> square.bit_length()//2 while abs(estimate**2 - square) > 1: estimate = (estimate + square/estimate)/2.0 return int(estimate) estimate = sqrt_estimate(num) return estimate**2 == num<file_sep>/LeetCode/0202.py class Solution: def isHappy(self, n: int) -> bool: def nextNum(num): """ returns the sum of the squares of the digits in num """ result = 0 while num > 0: result += (num % 10) ** 2 num = num // 10 return result tortoise, hare = n, nextNum(n) while tortoise != hare: tortoise = nextNum(tortoise) #advance tortoise one step hare = nextNum(nextNum(hare)) #advance hare two steps if tortoise == 1 or hare == 1: return True return hare == 1 <file_sep>/LeetCode/0733.py from collections import deque def floodFill(image, sr, sc, newColor): start_color = image[sr][sc] queue = deque() visited = set() queue.append( (sr, sc) ) while queue: cell_row, cell_col = queue.popleft() if (cell_row, cell_col) not in visited: neighbors = [] if cell_row > 0: neighbors.append( (cell_row - 1, cell_col) ) if cell_row < len(image) - 1: neighbors.append( (cell_row + 1, cell_col) ) if cell_col > 0: neighbors.append( (cell_row, cell_col - 1) ) if cell_col < len(image[0]) - 1: neighbors.append( (cell_row, cell_col + 1) ) visited.add( (cell_row, cell_col) ) for neighbor_row, neighbor_col in neighbors: if image[neighbor_row][neighbor_col] == start_color: queue.append( (neighbor_row, neighbor_col) ) image[cell_row][cell_col] = newColor visited.add( (cell_row, cell_col) ) return image <file_sep>/LeetCode/0133.py """ # Definition for a Node. class Node: def __init__(self, val, neighbors): self.val = val self.neighbors = neighbors """ from collections import deque class Solution: def cloneGraph(self, node: 'Node') -> 'Node': if not node: #edge case where starting node is null return None stack = deque([node]) #for keeping track of nodes to clone clone_dict = {} #map original nodes to cloned nodes cloned = set() #for noting which nodes have been cloned and recorded in clone_dict #traverse graph, cloning nodes as new neighborless nodes while stack: current_node = stack.popleft() cloned.add(current_node) for neighbor in current_node.neighbors: if neighbor not in cloned: #as long as we haven't cloned it yet... stack.append(neighbor) #add neighbor nodes to stack new_node = Node(current_node.val, []) #copy the value (mark neighbors later) clone_dict[current_node] = new_node #map original node to its clone #traverse graph a second time, adding neighborhood for original_node in clone_dict: cloned_node = clone_dict[original_node] #find node clone for neighbor in original_node.neighbors: cloned_neighbor = clone_dict[neighbor] #find neighbor clone cloned_node.neighbors.append(cloned_neighbor) #add neighbor clone to list return clone_dict[node] <file_sep>/LeetCode/0541.java class Solution { public String reverseStr(String s, int k) { int reverseCount = 0; int insertIdx = 0; StringBuilder str = new StringBuilder(); for(int i = 0, n = s.length(); i < n; i++) { if(reverseCount <= 0) { insertIdx = i; } str.insert(insertIdx, s.charAt(i)); reverseCount++; if(reverseCount >= k) { reverseCount = -k; } } return str.toString(); } }<file_sep>/LeetCode/0064.py class Solution: def minPathSum(self, grid: List[List[int]]) -> int: if len(grid) == 0 or len(grid[0]) == 0: return -1 for y in range(len(grid)): for x in range(len(grid[0])): if x > 0: if y > 0: grid[y][x] += min(grid[y-1][x], grid[y][x-1]) else: grid[y][x] += grid[y][x-1] else: if y > 0: grid[y][x] += grid[y-1][x] return grid[-1][-1]<file_sep>/LeetCode/0274.py class Solution: def hIndex(self, citations: List[int]) -> int: sorted_citations = sorted(citations, reverse=True) if not citations or sorted_citations[0] == 0: return 0 if len(citations) == 1 and citations[0] > 0: return 1 l, r = 0, len(citations) - 1 while l + 1 < r: mid = l + (r - l) // 2 if mid + 1 < sorted_citations[mid]: l = mid elif mid + 1 > sorted_citations[mid]: r = mid else: return mid + 1 if r + 1 > sorted_citations[r]: return r else: return r + 1<file_sep>/LeetCode/0539.py class Solution: def findMinDifference(self, timePoints: List[str]) -> int: def difference(time1, time2): hours1, minutes1 = time1.split(":") hours2, minutes2 = time2.split(":") time1 = int(hours1) * 60 + int(minutes1) time2 = int(hours2) * 60 + int(minutes2) return time2 - time1 min_difference = float("inf") timeline = sorted(timePoints) for i in range(1, len(timeline)): min_difference = min(min_difference, difference(timeline[i-1], timeline[i])) #also check time between beginning and end of timeline, due to wraparound. min_difference = min(min_difference, difference(timeline[-1], timeline[0]) + 1440) return min_difference <file_sep>/LeetCode/0049.py class Solution: def groupAnagrams(self, strs: List[str]) -> List[List[str]]: anagrams = {} for word in strs: key = ''.join(sorted(word)) #alphabetize key to ensure all anagrams will produce the same key if key in anagrams: #check if this anagram already exists newlist = anagrams[key] #look up anagrams list for "key" newlist.append(word) #add the newest word to anagram list else: anagrams[key] = [word] #key did not exist; add key:word pair to dictionary result = [] for key in anagrams: result.append(anagrams[key]) return result<file_sep>/LeetCode/0497.py from random import randint from bisect import bisect_left class Solution: def __init__(self, rects: List[List[int]]): self.rects = rects self.weights = [] current_end = 0 for rect in rects: x1, y1, x2, y2 = rect width = 1 + x2 - x1 height = 1 + y2 - y1 current_end += width * height self.weights.append(current_end) def pick(self) -> List[int]: rand_number = randint(1,self.weights[-1]) x1, y1, x2, y2 = self.rects[bisect_left(self.weights, rand_number)] return [randint(x1, x2), randint(y1, y2)] # Your Solution object will be instantiated and called as such: # obj = Solution(rects) # param_1 = obj.pick()<file_sep>/LeetCode/0345.py def reverseVowels(s): stack = [] vowels = "aeiouAEIOU" for char in s: if char in vowels: stack.append(char) result = [] for char in s: if char in vowels: result.append(stack.pop()) else: result.append(char) return ''.join(result) #TEST CODE test_inputs = ["hello","leetcode","","o","x","oxa","xox","xaex"] test_expects = ["holle","leotcede","","o","x","axo","xox","xeax"] for i, test in enumerate(test_inputs): print(reverseVowels(test) == test_expects[i]) <file_sep>/AlgoExpert/remove_kth_node_from_end.py # This is an input class. Do not edit. class LinkedList: def __init__(self, value): self.value = value self.next = None def removeKthNodeFromEnd(head, k): # Determine the length of the list length = 0 node = head while node: length += 1 node = node.next target_idx = length - k #index of node to remove if target_idx == 0: second_node = head.next head.value = second_node.value head.next = second_node.next second_node.next = None return head #progress through list until you get to node with index == target_idx parent = None node = head node_idx = 0 while node_idx < target_idx: parent = node node = node.next node_idx += 1 parent.next = node.next #make parent point past target node node.next = None return head #TEST CODE BELOW: ''' node0 = LinkedList(0) node1 = LinkedList(1) node0.next = node1 node2 = LinkedList(2) node1.next = node2 node3 = LinkedList(3) node2.next = node3 node4 = LinkedList(4) node3.next = node4 head = removeKthNodeFromEnd(node0, 2) while head: print(f"{head.value}-->") head = head.next ''' <file_sep>/AlgoExpert/kadanes_algorithm.py def kadanesAlgorithm(array): max_here = float("-inf") max_total = float("-inf") for i in range(len(array)): if array[i] > max_here + array[i]: max_here = array[i] else: max_here = max_here + array[i] max_total = max(max_total, max_here) return max_total <file_sep>/LeetCode/0355.py from collections import deque from heapq import * class Twitter: def __init__(self): self.following = {} # Maps user ID to list of users they follow self.next_tweet_id = 0 self.k = 10 # Included for extensibility if k is not constant self.tweets = {} # Maps user ID to list (deque) of their tweets def postTweet(self, user_id: int, tweet_id: int) -> None: # Check if user exists yet. If not, subscribe them to self, then post tweet. if user_id not in self.following: self.following[user_id] = set([user_id]) # Subscribe user to self self.tweets[user_id] = deque() self.tweets[user_id].appendleft(self.next_tweet_id) self.next_tweet_id += 1 def getNewsFeed(self, user_id: int) -> List[int]: if user_id not in self.following: return [] heap = [] # Create a heap with format (-tweet_id, followee_id, tweet_index). # Negative tweet_id is used to effectively make the heap a max heap. for followee in self.following[user_id]: if followee in self.tweets: # Followee has tweets latest_tweet = self.tweets[followee][0] heappush(heap, (-latest_tweet, followee, 0)) # The heap is essentially a max heap sorted by largest tweet_id. # Pop from heap to determine latest tweet, its tweeter, and the index of that tweet # in tweeter's list of tweets. Add tweet to result deque, increase index, and push # (next_largest_tweet, tweeter_id, tweet_index) back onto the heap. result = [] while len(result) < self.k and heap: neg_tweet, user_id, tweet_idx = heappop(heap) result.append(-neg_tweet) # If there is at least one more tweet left in list, continue adding back onto heap. if tweet_idx < len(self.tweets[user_id]) - 1: tweet_idx += 1 heappush(heap, (-tweets[user_id][tweet_idx], user_id, tweet_idx)) return reversed(result) def follow(self, follower_id: int, followee_id: int) -> None: # If follower or followee doesn't exist yet, make them susbcribe to self. if follower_id not in self.following: self.following[follower_id] = set([follower_id]) if followee_id not in self.following: self.following[followee_id] = set([followee_id]) self.following[follower_id].add(followee_id) def unfollow(self, follower_id: int, followee_id: int) -> None: if follower_id in self.following and follower_id != followee_id: self.following[follower_id].discard(followee_id) # Your Twitter object will be instantiated and called as such: # obj = Twitter() # obj.postTweet(userId,tweetId) # param_2 = obj.getNewsFeed(userId) # obj.follow(followerId,followeeId) # obj.unfollow(followerId,followeeId)<file_sep>/LeetCode/0739.py class Solution: def dailyTemperatures(self, T: List[int]) -> List[int]: if not T: return [] result = [] for i in range(len(T)): if i >= len(T) - 1: result.append(0) break count = 0 for j in range(i+1, len(T)): if T[j] > T[i]: count = j - i break result.append(count) return result <file_sep>/Coderust/word_break_problem.py def can_segment_string(s, dictionary): calculated = {} #stores whether it is possible to segment starting at a given index def can_use(s, index, word): """ Returns true if given word matches string s, starting at given index. """ if len(word) > len(s) - index: return False for i in range(len(word)): if s[index + i] != word[i]: return False return True def can_segment(s, index, d, memo): """ Returns True if s[index:] can be segmented using dictionary 'd' and memoization 'memo' """ if index in memo: return memo[index] if len(s) - index == 0: return True for word in d: if can_use(s, index, word): #s starts with word #print("can use string:",word) #DEBUG if can_segment(s, index + len(word), dictionary, memo): #can segment string after subtracting word memo[index] = True #print("can segment", s[index+len(word):]) #DEBUG return True else: memo[index + len(word)] = False #Reaching this point means no valid segmentation was found memo[index] = False return False return can_segment(s, 0, dictionary, calculated) <file_sep>/LeetCode/0020.py def isValid(s): open_brackets = "([{" close_brackets = ")]}" unclosed_parens = [] def is_bracket_pair(a,b): if a == '(': return b == ')' if a == '[': return b == ']' if a == '{': return b == '}' return False for char in s: if char in close_brackets: if not unclosed_parens: return False if is_bracket_pair(unclosed_parens[-1], char): unclosed_parens.pop() else: return False elif char in open_brackets: unclosed_parens.append(char) else: return False if unclosed_parens: return False return True <file_sep>/LeetCode/0441.java import java.lang.Math; class Solution { public int arrangeCoins(int n) { return (int)((Math.sqrt(1 + 8 * (double)n) - 1) / 2.0); } }<file_sep>/LeetCode/0048.py class Solution: def rotate(self, matrix: List[List[int]]) -> None: temp = [] n = len(matrix) for ring in range(n // 2): for step in range(ring, n-1 - ring): #cell0 = matrix[ring][step] #cell1 = matrix[step][n-1 - ring] #cell2 = matrix[n-1 - ring][n-1 - step] #cell3 = matrix[n-1 - step][ring] temp = matrix[n-1 - step][ring] matrix[n-1 - step][ring] = matrix[n-1 - ring][n-1 - step] matrix[n-1 - ring][n-1 - step] = matrix[step][n-1 - ring] matrix[step][n-1 - ring] = matrix[ring][step] matrix[ring][step] = temp return <file_sep>/AlgoExpert/min_number_of_coins_for_change.py def minNumberOfCoinsForChange(n, denoms): min_coins = [float("inf") for _ in range(n + 1)] min_coins[0] = 0 for denom in denoms: for target in range(n + 1): if denom <= target: min_coins[target] = min(min_coins[target], 1 + min_coins[target - denom]) if min_coins[n] == float("inf"): return -1 return min_coins[n] <file_sep>/LeetCode/0202.cpp class Solution { public: bool isHappy(int n) { set<int> visited; int new_value; while (n != 1) { new_value = 0; while (n > 0) { new_value += pow((n % 10), 2); n = n / 10; } if (!visited.insert(new_value).second) { return false; } n = new_value; } return true; } };<file_sep>/LeetCode/0124.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def maxPathSum(self, root: TreeNode) -> int: #returns the greatest sum of values possible for a path through a binary tree self.max_sum = float("-inf") def max_contribution(node): """returns the largest contribution a node can contribute to a path""" if not node: return 0 left_sum = max(0,max_contribution(node.left)) #left sum, or 0 if sum is negative right_sum = max(0,max_contribution(node.right)) #same with right subtree new_path_cost = node.val + max(0, left_sum) + max(0, right_sum) self.max_sum = max(new_path_cost, self.max_sum) #update if restarting at this node gives better sum return node.val + max(left_sum, right_sum) max_contribution(root) return self.max_sum<file_sep>/AlgoExpert/powerset.py """ Write a function that takes in an array of unique integers and returns its powerset. The powerset of a set S is the set of all subsets of S. """ def powerset(array): result = [[]] for i in range(len(array)): current_len = len(result) for j in range(current_len): #current_len is used because the length of result changes as we #append new elements to it, and we don't want an infinite loop result.append(result[j] + [array[i]]) return result #TESTING CODE: """ for i in range(4): print(f"{[_+1 for _ in range(i)]} —> {powerset([_+1 for _ in range(i)])}") """ <file_sep>/LeetCode/0219.py class Solution: def containsNearbyDuplicate(self, nums: List[int], k: int) -> bool: active = set() #numbers within sliding window left = 0 #index of left side of sliding window for right in range(len(nums)): #advance start of window to maintain max window size if right - left > k: active.discard(nums[left]) left += 1 #check for matches within active window if nums[right] in active: return True active.add(nums[right]) return False<file_sep>/Coderust/check_if_two_binary_trees_are_identical.py def are_identical(root1, root2): if root1 is None and root2 is None: return True if root1 is None or root2 is None: return False if root1.data != root2.data: return False return are_identical(root1.left, root2.left) and are_identical(root1.right, root2.right) <file_sep>/LeetCode/0093.py class Solution: def restoreIpAddresses(self, s: str) -> List[str]: def get_ips(dots, digits, calc_d): if (dots, digits) in calc_d: return calc_d[(dots, digits)] if dots >= len(digits): #too short to fit remaining dots return [] if dots == 0: if (len(digits) > 3 or int(digits) > 255 or (len(digits) > 1 and digits[0] == "0")): calc_d[(dots, digits)] = [] return [] calc_d[(dots, digits)] = [digits] return [digits] result = [] if len(digits) > 1: for ip in get_ips(dots - 1, digits[1:], calc_d): result.append(digits[:1] + "." + ip) if len(digits) > 2 and digits[0] != "0": for ip in get_ips(dots - 1, digits[2:], calc_d): result.append(digits[:2] + "." + ip) if len(digits) > 3 and digits[0] != "0" and int(digits[:3]) <= 255: for ip in get_ips(dots - 1, digits[3:], calc_d): result.append(digits[:3] + "." + ip) calc_d[(dots, digits)] = result return result dp_ips = {} return get_ips(3, s, dp_ips) <file_sep>/LeetCode/0096.py class Solution: def numTrees(self, n: int) -> int: if n == 0: return 1 #include dp dictionary def unique_subtrees(min_num, max_num): if max_num == min_num + 1: return 1 result = 0 for num in range(min_num, max_num): left_subtrees = unique_subtrees(min_num, num) right_subtrees = unique_subtrees(num + 1, max_num) result = result + left_subtrees + right_subtrees return result return unique_subtrees(1, n + 1) #also consider mathematical approach<file_sep>/LeetCode/0022.py class Solution: def generateParenthesis(self, n: int) -> List[str]: result = [] def helper (result, current_string, open_parens, close_parens): if close_parens == 0: result.append(current_string) if open_parens > 0: helper(result, current_string + "(", open_parens - 1, close_parens) if close_parens > open_parens: helper(result, current_string + ")", open_parens, close_parens - 1) helper(result, "", n, n) return result<file_sep>/LeetCode/0714.py class Solution: def maxProfit(self, prices: List[int], fee: int) -> int: with_stock = 0 without_stock = 0 for day, price in enumerate(prices): if day == 0: with_stock = - price - fee without_stock = 0 else: with_stock = max(with_stock, without_stock - price - fee) without_stock = max(without_stock, with_stock + price) #there should be a fee for both selling and buying, but the test cases are faulty. return without_stock<file_sep>/Pramp/array_index_and_element_equality.py def index_equals_value_search(arr): """ returns first index in arr that matches the value at that index, or -1 if impossible """ if arr[0] > 0: #because it's sorted and values are distinct ints, the index can never catch up return -1 l, r = 0, len(arr) - 1 while l + 1 < r: mid = l + (r - l) // 2 if mid > arr[mid]: l = mid else: r = mid if l == arr[l]: return l elif r == arr[r]: return r else: return -1 <file_sep>/AlgoExpert/move_element_to_end.py def moveElementToEnd(array, toMove): checkIndex, moveIndex = 0, len(array) - 1 while checkIndex < moveIndex: if array[moveIndex] == toMove: moveIndex -= 1 elif array[checkIndex] == toMove: array[checkIndex] = array[moveIndex] array[moveIndex] = toMove moveIndex -= 1 checkIndex += 1 else: checkIndex += 1 return array <file_sep>/LeetCode/0686.cpp class Solution { public: int repeatedStringMatch(string A, string B) { string build_str = ""; int num_concats = 0; while (build_str.length() < B.length()) { build_str.append(A); num_concats++; } if (build_str.find(B) != std::string::npos) { return num_concats; } build_str.append(A); num_concats++; if (build_str.find(B) != std::string::npos) { return num_concats; } else { return -1; } } };<file_sep>/AlgoExpert/smallest_difference.py from sys import maxsize def smallestDifference(arrayOne, arrayTwo): arrayOne.sort() arrayTwo.sort() leastDiff, leastPair = float("inf"), [0, 0] idx1, idx2 = 0, 0 while idx1 < len(arrayOne) and idx2 < len(arrayTwo): currentDiff = abs(arrayOne[idx1] - arrayTwo[idx2]) if currentDiff == 0: return [arrayOne[idx1], arrayTwo[idx2]] else: if currentDiff < leastDiff: leastDiff = currentDiff leastPair = [idx1, idx2] if arrayOne[idx1] < arrayTwo[idx2]: idx1 += 1 else: idx2 += 1 return [arrayOne[leastPair[0]], arrayTwo[leastPair[1]]]<file_sep>/AlgoExpert/max_subset_sum_no_adjacent.py def maxSubsetSumNoAdjacent(array): if len(array) == 0: return 0 elif len(array) < 3: return max(array) back2, back1 = array[0], max(array[1], array[0]) for idx in range(2, len(array)): current_max = max(back1, array[idx] + back2) back2 = back1 back1 = current_max return current_max<file_sep>/AlgoExpert/youngest_common_ancestor.py class AncestralTree: def __init__(self, name): self.name = name self.ancestor = None def getYoungestCommonAncestor(topAncestor, descendantOne, descendantTwo): #find distance from descendantOne to root node = descendantOne distance1 = 0 while node.ancestor: node = node.ancestor distance1 += 1 #find distance from descendantTwo to root node = descendantTwo distance2 = 0 while node.ancestor: node = node.ancestor distance2 += 1 depth_difference = abs(distance1 - distance2) farther_node = descendantOne closer_node = descendantTwo if distance2 > distance1: farther_node, closer_node = closer_node, farther_node #move up longer branch until both branches are the same distance from the root for _ in range(depth_difference): farther_node = farther_node.ancestor #move up both branches until both sides meet while farther_node is not closer_node: farther_node = farther_node.ancestor closer_node = closer_node.ancestor return closer_node <file_sep>/LeetCode/0118.py class Solution: def generate(self, numRows: int) -> List[List[int]]: if numRows == 0: return [] result = [[1]] if numRows >= 2: result.append([1,1]) if numRows >= 3: while len(result) < numRows: prev_row_len = len(result[-1]) result.append([1]) for i in range(prev_row_len - 1): result[-1].append(result[-2][i] + result[-2][i + 1]) result[-1].append(1) return result<file_sep>/LeetCode/0179.py class Solution: def largestNumber(self, nums: List[int]) -> str: #compare both concatenation orders to determine num with higher rank def higher_rank(num1, num2): """ Returns True if num1 has higher rank than num2 """ return int(num1 + num2) > int(num2 + num1) def binary_search_insert(num, array): """ Returns the index where num should be inserted in array """ if len(array) == 0: return 0 l, r = 0, len(array) while l + 1 < r: mid = l + (r - l) // 2 if higher_rank(num, array[mid]): r = mid else: l = mid if higher_rank(num, array[l]): return l else: return r #Convert nums into strings, then use binary search to build sorted list sorted_nums = [] for num in nums: str_num = str(num) sorted_nums.insert(binary_search_insert(str_num, sorted_nums), str_num) #Check for edge case with only 0 in nums if len(sorted_nums) > 0 and sorted_nums[0] == "0": return "0" #combine the sorted list to get final, largest number return ''.join(sorted_nums)<file_sep>/Other/mock_technical_2020.05.28.py ''' Given a positive int n, print all jumping numbers smaller than or equal to n. A number is called a jumping number if all adjacent digits in it differ by 1. For example, 8987 and 4343456 are jumping numbers, but 796 and 89098 are not. All single digit numbers are considered as jumping numbers. Example: Input: 105 Output: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 21, 23, 32, 34, 43, 45, 54, 56, 65, 67, 76, 78, 87, 89, 98, 101] ''' def jumping_numbers(n): """ return the list of jumping numbers not larger than n. """ def jumping(x, n, result): """ given a jumping number, create new jumping numbers by appending digits. """ if x <= n: result.append(x) d = x%10 if x <= n//10: #Make sure the next jumping numbers won't be too large. if d > 0: jumping(x*10+d-1, n, result) if d < 9: jumping(x*10+d+1, n, result) if n < 10: return [_ for _ in range(n+1)] result = [0] #Keep 0 outside loop so we don't try appending digits to a leading 0. for x in range(1, 10): jumping(x, n, result) return sorted(result) <file_sep>/LeetCode/0748.py def shortestCompletingWord(licensePlate, words): def letter_count(string): letters = {} for char in string: if char.isalpha(): if char.lower() not in letters: letters[char.lower()] = 0 letters[char.lower()] += 1 return letters plate_letters = letter_count(licensePlate) shortest_len = float("inf") shortest_word = "" for word in words: cont = False word_letters = letter_count(word) for letter in plate_letters: if (letter not in word_letters or word_letters[letter] < plate_letters[letter]): cont = True break if cont: continue if len(word) < shortest_len: shortest_len = len(word) shortest_word = word return shortest_word #TESTING CODE tests = [("1s3 PSt", ["step","steps","stripe","stepple"], "steps"), ("1s3 456", ["looks","pest","stew","show"], "pest"), ("Ah71752", ["suggest","letter","of","husband","easy","education","drug","prevent","writer","old"], "husband"), ("OgEu755", ["enough","these","play","wide","wonder","box","arrive","money","tax","thus"], "enough"), ("iMSlpe4", ["claim","consumer","student","camera","public","never","wonder","simple","thought","use"], "simple")] for test in tests: print(shortestCompletingWord(test[0], test[1]) == test[2]) <file_sep>/AlgoExpert/river_sizes.py from collections import deque def riverSizes(matrix): def flood(matrix, river_sizes, river_id, x_pos, y_pos): queue = deque() queue.append((x_pos,y_pos)) if not river_id in river_sizes: river_sizes[river_id] = 0 while queue: x, y = queue.popleft() if matrix[y][x] != 1: continue matrix[y][x] = river_id river_sizes[river_id] += 1 #check north neighbor if y > 0 and matrix[y - 1][x] == 1: queue.append((x, y - 1)) #check east neighbor if x < len(matrix[0]) - 1 and matrix[y][x + 1] == 1: queue.append((x + 1, y)) #check south neighbor if y < len(matrix) - 1 and matrix[y + 1][x] == 1: queue.append((x, y + 1)) #check west neighbor if x > 0 and matrix[y][x - 1] == 1: queue.append((x - 1, y)) river_id = 2 #unique id for each river. Starts at 2 because matrix already uses 0 and 1. sizes = {} #dictionary mapping river_id to the size of that river for y in range(len(matrix)): for x in range(len(matrix[0])): if matrix[y][x] == 1: flood(matrix, sizes, river_id, x, y) #flood river, marking all connected squares river_id += 1 result = [] for river in sizes: result.append(sizes[river]) return result<file_sep>/Coderust/remove_duplicates_from_string.py def remove_duplicates(string): seen = set() read_index, write_index = 0, 0 while read_index < len(string): if string[read_index] not in seen: seen.add(string[read_index]) string[write_index] = string[read_index] write_index += 1 read_index += 1 while len(string) > write_index: string.pop() return string <file_sep>/LeetCode/0583.py class Solution: def minDistance(self, word1: str, word2: str) -> int: #helper function to find largest common substring length def lcsl(w1, w2, len1, len2, calc_d): if len1 == 0 or len2 == 0: return 0 #memoization if (w1, w2, len1, len2) in calc_d: return calc_d[(w1, w2, len1, len2)] if w1[len1 - 1] == w2[len2 - 1]: result = 1 + lcsl(w1, w2, len1 - 1, len2 - 1, calc_d) calc_d[(w1, w2, len1, len2)] = result return result else: result = max(lcsl(w1, w2, len1 - 1, len2, calc_d), lcsl(w1, w2, len1, len2 - 1, calc_d)) calc_d[(w1, w2, len1, len2)] = result return result if len(word1) == 0: return len(word2) if len(word2) == 0: return len(word1) calculated = {} return len(word1) + len(word2) - 2 * lcsl(word1, word2, len(word1), len(word2), calculated)<file_sep>/LeetCode/0532.py class Solution: def findPairs(self, nums: List[int], k: int) -> int: if k < 0: return 0 if k == 0: doubles = set() counted = set() for num in nums: if num in counted: doubles.add(num) counted.add(num) return len(doubles) counted = set() pairs = set() for num in nums: if num not in counted: counted.add(num) if num - k in counted: pairs.add((num - k, num)) if num + k in counted: pairs.add((num, num + k)) return len(pairs)<file_sep>/LeetCode/0543.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def diameterOfBinaryTree(self, root: TreeNode) -> int: max_diameter = 0 def max_length(Node): nonlocal max_diameter if not Node: return 0 #null node cannot contribute left = max_length(Node.left) #length of left branch right = max_length(Node.right) #length of right branch reset_diameter = left + right #diameter if we use Node as new root max_diameter = max(max_diameter, reset_diameter) return 1 + max(left, right) #max length between using left and right branches max_length(root) return max_diameter<file_sep>/LeetCode/0015.py class Solution: def threeSum(self, nums: List[int]) -> List[List[int]]: if len(nums) < 3: return [] sorted_nums = sorted(nums) #sort now, so all solutions will automatically have elements sorted solutions = set() #Set of all solutions found so far result = [] #List of solutions, to be returned at end of function target_nums = {} #dictionary of all values and their possible indices for index, num in enumerate(sorted_nums): if num not in target_nums: target_nums[num] = [] #initialize list of indices with value "num" target_nums[num].append(index) #add this index to list of indices with value "num" nums_len = len(sorted_nums) for i in range(nums_len-2): for j in range(i+1, nums_len-1): target = -1 * (sorted_nums[i] + sorted_nums[j]) #we want to check if this value exists in target_nums if target in target_nums: largest_index = target_nums[target][-1] if largest_index > j: #check if index is greater than j, to make sure we don't double-count solution = (sorted_nums[i], sorted_nums[j], target) if solution not in solutions: solutions.add(solution) result.append([solution[0], solution[1], solution[2]]) return result <file_sep>/LeetCode/0003.cpp class Solution { public: int lengthOfLongestSubstring(string s) { std::set<char> current_chars; int longest_len = 0; int current_len = 0; auto start_it = s.begin(); auto end_it = s.begin(); while(end_it != s.end()) { if(start_it == end_it) { current_chars.insert(*end_it); end_it++; current_len++; } else if(current_chars.find(*end_it) == current_chars.end()) { current_chars.insert(*end_it); end_it++; current_len++; } else { current_chars.erase(*start_it); start_it++; current_len--; } if(current_len > longest_len) { longest_len = current_len; } } return(longest_len); } };<file_sep>/LeetCode/0001.py class Solution: def twoSum(self, nums: List[int], target: int) -> List[int]: index_of = {} for index, num in enumerate(nums): if num not in index_of: index_of[num] = [] index_of[num].append(index) for index, num in enumerate(nums): second_num = target - num if second_num in index_of: for second_index in index_of[second_num]: if second_index != index: return [index, second_index] return [-1, -1]<file_sep>/LeetCode/0434.py def countSegments(s): segments = 0 last_char_was_space = True for char in s: if char == " ": last_char_was_space = True else: if last_char_was_space: last_char_was_space = False segments += 1 return segments #TESTING CODE BELOW THIS POINT tests = [ "Hallo", "multiple words (and punctuation!) in this sentence.", "", " ", " ", ] answers = [1, 7, 0, 0, 0] for i in range(len(tests)): print(countSegments(tests[i]) == answers[i]) <file_sep>/LeetCode/0066.py ''' Given a non-empty array of decimal digits representing a non-negative integer, increment one to the integer. The digits are stored such that the most significant digit is at the head of the list, and each element in the array contains a single digit. You may assume the integer does not contain any leading zero, except the number 0 itself. Constraints: 1 <= digits.length <= 100 0 <= digits[i] <= 9 ''' def plusOne(digits): increment = True for i in range(len(digits) - 1, -1, -1): if digits[i] < 9: digits[i] += 1 return digits digits[i] = 0 digits.insert(0, 1) return digits #TEST CODE tests = ([1,2,3], [5,9], [9], [0]) answers = ([1,2,4], [6,0], [1,0], [1]) print("test inputs: ", tests) for i, test in enumerate(tests): if plusOne(test) == answers[i]: print("test PASSED") else: print("test FAIL") print("test outputs: ", tests) <file_sep>/LeetCode/0038.py class Solution: def countAndSay(self, n: int) -> str: def next_term(term): """ given a term, return the next term in the sequence """ result = [] prev_digit = term[0] count = 0 for digit in term: if digit == prev_digit: count += 1 else: result.append(str(count)) result.append(prev_digit) prev_digit = digit count = 1 result.append(str(count)) result.append(prev_digit) return result term_num = 1 result = ["1"] for _ in range(n-1): result = next_term(result) return ''.join(result) <file_sep>/Coderust/find_all_palindrome_substrings.py def find_all_palindrome_substrings(string): def find_palindromes_from_index(s, idx): num_palindromes = 0 #count odd-numbered palindromes, then even-numbered ones. for l_idx, r_idx in [[idx - 1, idx + 1], [idx, idx + 1]]: while l_idx >= 0 and r_idx < len(string): if s[l_idx] == s[r_idx]: num_palindromes += 1 l_idx -= 1 r_idx += 1 else: break return num_palindromes palindromes = 0 for i in range(len(string)): palindromes += find_palindromes_from_index(string, i) return palindromes<file_sep>/LeetCode/0628.py class Solution: def maximumProduct(self, nums: List[int]) -> int: most_positive = [float("-inf"), float("-inf"), float("-inf")] most_negative = [0,0] for num in nums: if num > most_positive[0]: if num <= most_positive[1]: most_positive[0] = num elif num <= most_positive[2]: most_positive[0] = most_positive[1] most_positive[1] = num else: most_positive[0] = most_positive[1] most_positive[1] = most_positive[2] most_positive[2] = num if num < most_negative[1]: if num < most_negative[0]: most_negative[1] = most_negative[0] most_negative[0] = num else: most_negative[1] = num return most_positive[2] * max(most_positive[0] * most_positive[1], most_negative[0] * most_negative[1])<file_sep>/LeetCode/0242.py class Solution: def isAnagram(self, s: str, t: str) -> bool: if len(s) != len(t): #edge case with mismatched length return False s_letters = {} #dictionary of letters in s, and their frequency #record how many of what letter are in s for letter in s: if letter not in s_letters: s_letters[letter] = 0 s_letters[letter] += 1 #loop through t, checking against s_letters to determine if anagram for letter in t: if letter not in s_letters: #too many of this letter, or wrong type return False else: s_letters[letter] -= 1 #decrease number of remaining letters of this type if s_letters[letter] == 0: del s_letters[letter] #ran out of this letter, so delete its entry if not s_letters: #if there are no entries, we've used every letter in s return True else: return False <file_sep>/LeetCode/0621.py import heapq #from collections import deque class Solution: def leastInterval(self, tasks: List[str], n: int) -> int: result = 0 temp = {} #temporary dictionary for counting how many of each task are needed for task in tasks: if task not in temp: temp[task] = 1 else: temp[task] += 1 active_heap = [] #This heap keeps track of tasks not on cooldown #Create a max heap by pushing tuples of (inverted) count and task name for task in temp: heapq.heappush(active_heap, (-temp[task], task)) cooldowns = {} #Dictionary of all tasks on cooldown and their cooldown values and counters. #Iterate until there are no tasks left to do. Pick active tasks with largest count. while active_heap or cooldowns: #For any tasks on cooldown, update their timers and/or transfer them to active heap. for task in cooldowns: cooldown, counter = cooldowns[task] cooldown -= 1 if cooldown == 0: del cooldowns[task] heapq.heappush(active_heap, (-counter, task)) #Check for any tasks off cooldown, otherwise idle. if active_heap: neg_count, task = heapq.heappop(active_heap) count = -neg_count count -= 1 #If count is still positive after decrement, move task to cooldown if count > 0: cooldowns[task] = (n, count) result += 1 #Update number of tasks/idles return result <file_sep>/LeetCode/0101.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None from collections import deque class Solution: def isSymmetric(self, root: TreeNode) -> bool: if not root: #check for null root return True left_queue = deque() left_queue.append(root.left) right_queue = deque() right_queue.append(root.right) while right_queue or left_queue: left_node = left_queue.popleft() right_node = right_queue.popleft() if not left_node or not right_node: #check for null nodes if left_node != right_node: return False else: continue #both nodes must be null if left_node.val != right_node.val: return False left_queue.append(left_node.left) left_queue.append(left_node.right) right_queue.append(right_node.right) right_queue.append(right_node.left) return True <file_sep>/LeetCode/0692.py class Solution: def topKFrequent(self, words: List[str], k: int) -> List[str]: word_dict = {} for word in words: if word not in word_dict: word_dict[word] = 1 else: word_dict[word] += 1 #word_list = sorted(word_dict, key=lambda x: word_dict[x], reverse=True) #return word_list[:k] freq_dict = {} for key in word_dict: value = word_dict[key] if value not in freq_dict: freq_dict[value] = [] freq_dict[value].append(key) freq_list = sorted(freq_dict, reverse=True) #create a sorted list of frequencies ordered_list = [] for freq in freq_list: ordered_list += sorted(freq_dict[freq]) return ordered_list[:k] <file_sep>/LeetCode/0257.py #Definition for a binary tree node. class TreeNode: def __init__(self, val=0, left=None, right=None): self.val = val self.left = left self.right = right def binaryTreePaths(root): if not root: return [] def findPaths(node): if not node.right and not node.left: #leaf node return [str(node.val)] result = [] if node.right: for path in findPaths(node.right): result.append(str(node.val) + "->" + path) if node.left: for path in findPaths(node.left): result.append(str(node.val) + "->" + path) return result return findPaths(root) <file_sep>/LeetCode/0001.java class Solution { public int[] twoSum(int[] nums, int target) { HashMap<Integer, ArrayList<Integer>> values = new HashMap<>();// maps values to lists of indices with those values for(int i = 0; i < nums.length; i++) { if(!values.containsKey(nums[i])) { values.put(nums[i], new ArrayList<Integer>()); } values.get(nums[i]).add(i); int currentTarget = target - nums[i]; if(values.containsKey(currentTarget)) { for(int indx: values.get(currentTarget)) { if(indx != i) { return new int[]{indx, i}; } } } } return new int[]{-1, -1}; } }<file_sep>/LeetCode/0450.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def deleteNode(self, root: TreeNode, key: int) -> TreeNode: def largest_node(node): largest = node while(largest.right != None): largest = largest.right return largest def deleteNodeHelper(root, key): if root == None: #base case return root if key < root.val: #check left branch root.left = deleteNodeHelper(root.left, key) elif key > root.val: #check right branch root.right = deleteNodeHelper(root.right, key) else: if root.left == None: #no left branch return root.right elif root.right == None: #no right branch return root.left else: #both branches present new_root = largest_node(root.left) root.val = new_root.val root.left = deleteNodeHelper(root.left, new_root.val) return root return deleteNodeHelper(root, key) <file_sep>/Coderust/in-order_iterator_of_a_binary_tree.py class InorderIterator: def __init__(self, root): self.stack = [] if root is not None: self.stack.append(root) node = root while node.left is not None: self.stack.append(node.left) node = node.left def hasNext(self): if not self.stack: return False return True # getNext returns null if there are no more elements in tree def getNext(self): if not self.hasNext(): return None current_node = self.stack.pop() # If right subtree exists, add its left side to the stack. if current_node.right is not None: node = current_node.right self.stack.append(node) while node.left is not None: self.stack.append(node.left) node = node.left return current_node def inorder_using_iterator(root): iter = InorderIterator(root) result = "" while iter.hasNext(): ptr = iter.getNext() result += str(ptr.data) + " " return result<file_sep>/LeetCode/0472.py from functools import lru_cache class Solution: def findAllConcatenatedWordsInADict(self, words: List[str]) -> List[str]: concats = set() #set of concatenated words words_set = set(words) #set of all input words, for quick lookup @lru_cache(maxsize=15000) def is_concat(word): for i in range(len(word)): word_start = word[:i] word_end = word[i:] if word_start != "" and word_start in words_set: #check if beginning of word is in words_set if word_end in words_set or is_concat(word_end): #check if end is in words_set, or is a concat return True return False for word in words: if is_concat(word): concats.add(word) return list(concats)<file_sep>/AlgoExpert/levenshtein_distance.py def levenshteinDistance(str1, str2): if len(str1) == 0: return len(str2) if len(str2) == 0: return len(str1) #dp table for cost of converting substrings of str1 into substrings of str2 #note that the range we use goes to len + 1, because we need a row and #column for empty strings dp = [[0 for _ in range(len(str1) + 1)] for _ in range(len(str2) + 1)] #initialize first row and first column for i in range(len(str1) + 1): dp[0][i] = i for j in range(len(str2) + 1): dp[j][0] = j for j in range(1, len(str2) + 1): for i in range(1, len(str1) + 1): #note: I use i - 1 and j - 1 because the table is 1 wider and taller #than the string lengths, to account for the empty substrings. if str1[i - 1] == str2[j - 1]: dp[j][i] = dp[j - 1][i - 1] else: add_cost = dp[j][i - 1] + 1 del_cost = dp[j - 1][i] + 1 sub_cost = dp[j - 1][i - 1] + 1 dp[j][i] = min(add_cost, del_cost, sub_cost) return dp[-1][-1] ''' "" a b c -str1(i) "" 0 1 2 3 y 1 1 2 3 a 2 1 2 3 b 3 2 1 2 d 4 3 2 2 str2(j) "" a b b -str1(i) "" 0 1 2 3 b 1 1 1 2 b 2 1 1 1 a 3 2 2 2 str2(j) ''' test0 = ("abc", "yabd") test1 = ("abb", "bba") tests = [test0, test1] for str1, str2 in tests: print((str1, str2)) print(levenshteinDistance(str1, str2)) <file_sep>/README.md # Practice My code for various coding practice such as LeetCode or AlgoExpert questions. <file_sep>/LeetCode/0374.py # NOTE: This won't actually run as is. This is just what I submitted on LeetCode. # The guess API is already defined for you. # @param num, your guess # @return -1 if my number is lower, 1 if my number is higher, otherwise return 0 # def guess(num: int) -> int: class Solution: def guessNumber(self, n: int) -> int: l, r = 1, n while l + 1 < r: mid = l + (r - l) // 2 if guess(mid) < 0: r = mid elif guess(mid) > 0: l = mid else: return mid if guess(l) == 0: return l if guess(r) == 0: return r return -1 <file_sep>/LeetCode/0104.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None from collections import deque class Solution: def maxDepth(self, root: TreeNode) -> int: max_depth = 0 if not root: return max_depth queue = deque() queue.append((root,1)) while(queue): node, depth = queue.popleft() if depth > max_depth: max_depth = depth if node.left: queue.append((node.left, depth+1)) if node.right: queue.append((node.right, depth+1)) return max_depth
5677ba0a684bf0c1878b1b7f530929be19915688
[ "Markdown", "Java", "Python", "C++" ]
119
Python
ecgaebler/Practice
147ed3cbb934561c1cef156058bf92e60a645494
fb325a56fb09e357658a77fb8ef1b2683d689ecb
refs/heads/master
<repo_name>tabaskoFive/Rows<file_sep>/README.md # Rows o/ npm start <file_sep>/index.js "use strict" import './main.css'; const Row = function (num) { this._content = document.createElement("div"); this._content.classList.add("row"); for (let i = 0; i < num; i++) { let block = document.createElement("div"); block.classList.add("block"); this._content.appendChild(block); } document.getElementById("root").appendChild(this._content); } Row.prototype.addText = function (str) { this._content.innerHTML = str; }; const parse = () => { fetch('./testdata.json') .then(response => response.json()) .then(json => { json.rows.forEach(element => { new Row(element); }); }) } parse();
a40fc8e526b3c6f176306f9d0cbc761e3f5b63eb
[ "Markdown", "JavaScript" ]
2
Markdown
tabaskoFive/Rows
adc534c2e59193f8ea1c174cc71635c0d7c8ae57
7e7c3cf765848f2b0e59b72a0e804c1a29f63216
refs/heads/master
<repo_name>Kartikksaxena/FrontEnd-ToDo-List<file_sep>/toDo.js const addBtn = document.querySelector("#addTodoBtn"); var BoxNo= 0; const makeDiv=()=>{ const texts = document.querySelector("#newTodoInput"); if(texts.value != ""){ const lists = document.querySelector("#todoList"); const newelem = document.createElement("LI"); newelem.id = ++BoxNo; newelem.innerHTML = texts.value; lists.append(newelem); texts.value=""; } // console.log(newelem,"Kartik"); }; addBtn.addEventListener("click",makeDiv);
9df351cc3e5f9aff478d374fbeada41637fd25c9
[ "JavaScript" ]
1
JavaScript
Kartikksaxena/FrontEnd-ToDo-List
ade392a58481c687c3a29d6fc16d0c5159199adb
e0fe4ed90ef92cc9c61c88f284463a9fa7851b13
refs/heads/master
<file_sep>flag = 1; flag2 = 1; function insertarOp(tecla){ switch(tecla){ case ' + ': if (flag == 1){ document.getElementById('screen').value = document.getElementById('screen').value + tecla; } flag = 0; flag2 = 0; break; case ' - ': if (flag == 1){ document.getElementById('screen').value = document.getElementById('screen').value + tecla; } flag = 0; flag2 = 0; break; case ' / ': if (flag2 == 1){ document.getElementById('screen').value = document.getElementById('screen').value + tecla; } flag2 = 0; break; case ' * ': if (flag2 == 1){ document.getElementById('screen').value = document.getElementById('screen').value + tecla; } flag2 = 0; break; } } function insertarNum(num){ if (document.getElementById('screen').value != "SyntaxError" && flag != 3){ document.getElementById('screen').value = document.getElementById('screen').value + num; flag = 1; flag2 = 1; } } function result(){ if (document.getElementById('screen').value != "" && document.getElementById('screen').value != "SyntaxError"){ document.getElementById('screen').value = eval(document.getElementById('screen').value); } else { document.getElementById('screen').value = ""; } if (document.getElementById('screen').value.length > 7){ document.getElementById('screen').value = (eval(document.getElementById('screen').value)).toExponential(); } if (document.getElementById('screen').value == "Infinity"){ document.getElementById('screen').value = "SyntaxError"; } } function clearAll(){ document.getElementById('screen').value = ""; flag = 1; flag2 = 0; }
ef54586e84d3646b80576bc301b74020e0c8ace3
[ "JavaScript" ]
1
JavaScript
GabrielAlonso7/html-calculator
c3578b881a1c842d52b7e32296eabe66b3191d06
983b2428c69116a9155baeadd63b89cca8acc297
refs/heads/master
<repo_name>ma34s/MarkdownLocalWiki<file_sep>/README.md MarkdownLocalWiki ====================== This source code provided by Serene diary almost day-to-day http://kazu-s-diary-2.cocolog-nifty.com/blog/2016/07/htajquerymark-1.html Thank you for a wonderful tool. Whit is this? ======== Standalone Markdown wiki by using marked.js(custom) and HTA. <file_sep>/wiki/core/local_wiki.js /*------------------------------------------------------------------** Definition Value of VBScript **------------------------------------------------------------------*/ // 保存データの種類 // StreamTypeEnum // http://msdn.microsoft.com/ja-jp/library/cc389884.aspx var adTypeBinary = 1; // バイナリ var adTypeText = 2; // テキスト // 読み込み方法 // StreamReadEnum // http://msdn.microsoft.com/ja-jp/library/cc389881.aspx var adReadAll = -1; // 全行 var adReadLine = -2; // 一行ごと // 書き込み方法 // StreamWriteEnum // http://msdn.microsoft.com/ja-jp/library/cc389886.aspx var adWriteChar = 0; // 改行なし var adWriteLine = 1; // 改行あり // ファイルの保存方法 // SaveOptionsEnum // http://msdn.microsoft.com/ja-jp/library/cc389870.aspx var adSaveCreateNotExist = 1; // ない場合は新規作成 var adSaveCreateOverWrite = 2; // ある場合は上書き /*------------------------------------------------------------------** Local Wiki **------------------------------------------------------------------*/ //グローバル変数を設定 var fso = new ActiveXObject("Scripting.FileSystemObject"); var oBaseFolder = getBaseFolder(); var pageNameStack = new Array(); //このHTMLファイルが置かれているフォルダのパス名を取得する function getBaseFolder(){ return fso.GetFolder(CONFIG.base_dirctory); } //エクスプローラでフォルダ・ファイルを開く function OpenFolder(Path){ // alert(Path); var ss = Path.split(','); // Shell関連の操作を提供するオブジェクトその2を取得 var sh = new ActiveXObject( "Shell.Application" ); var Path2 = ""; // alert(ss.length); for(var i=0; i < ss.length; i++){ if(!(ss[i] == "")){ Path2 += ss[i] + '\\'; }; } // alert(Path2); //エクスプローラで開く sh.Open(Path2); // オブジェクトを解放 sh = null; } //ページ名を指定してページを開く function open(pagename){ var content = getContent(pagename); if(! content){ return edit(pagename); } // var renderer = new marked.Renderer(); // renderer.link = function(href, title, text) { // renderer.link = function (text, level) { // alert("text=" + text + "level" + level ); // alert(href + " " + title + " " + text ); // }; // renderer.heading = function (text, level) { // alert("text=" + text + "level" + level ); // // var escapedText = text.toLowerCase().replace(/[^\w]+/g, '-'); // return '<h' + level + '><a name="' + // escapedText + // '" class="anchor" href="#' + // escapedText + // '"><span class="header-link"></span></a>' + // text + '</h' + level + '>'; // }; // alert(marked('# heading+', { renderer: renderer })); // var html = marked(content, { renderer: renderer }); //いわゆるmarked.jsを使用(一部改造) var html = marked(content); //ファイル名、更新日時等を付与 if(CONFIG.showFileInfo == true) { id('FileName').innerText = getFilePathAndName(pagename); id('DateCreated').innerText = VALUES.PageCreateDateTitle+":" + getDateCreated(pagename); id('DateLastModified').innerText = VALUES.PageUpdateDateTitle+":" + getDateLastModified(pagename); } //表示履歴 setHistory(pagename); getHistory(); setPageName(pagename); setContent(html); showEditLink(); createToc(); } //履歴保存 function setHistory(pagename) { if(CONFIG.showHistory== false) { return; } //スタックに履歴を保存 if (0 == pageNameStack.length) { //初回は無条件で保存 pageNameStack.push(pagename); } else { //保存されている履歴外のみ保存 var matched = false; for (var i = 0; i < pageNameStack.length; i++) { var tmp = pageNameStack[i]; if (tmp == pagename) { matched = true; } } if (matched == false) { pageNameStack.push(pagename); } } } function getHistory() { if(CONFIG.showHistory== false) { return; } //スタック内の履歴を表示 var html = VALUES.HistoryTitle + ":" for (var i = 0; i < pageNameStack.length; i++) { var tmp = pageNameStack[i]; html = html + "<a href='javascript:open(\"" + tmp + "\")'>" + tmp + "</a> "; } html = html + " "; id('history').innerHTML = html; } //マークダウン(md)ファイル名をフルパスで取得 function getFilePathAndName(pagename){ var s = ''; var filepath = getFilePath(pagename); var f = fso.GetFile(filepath); s = f.Path ; return(s); } //ファイルの作成日時を取得する function getDateCreated(pagename){ var s = ''; var filepath = getFilePath(pagename); var f = fso.GetFile(filepath); var CreateDate = new Date(f.DateCreated); s += CreateDate.toLocaleDateString() + " " + CreateDate.toLocaleTimeString(); return(s); } //ファイルの更新日時を取得する function getDateLastModified(pagename) { var s = ''; var filepath = getFilePath(pagename); var f = fso.GetFile(filepath); var ModDate = new Date(f.DateLastModified); s += ModDate.toLocaleDateString() + " " + ModDate.toLocaleTimeString(); return (s); } //ページ名を指定してページを編集する function edit(pagename){ var content = getContent(pagename); if(checkPageName(pagename) == false) {//Out of work directory window.alert("page name is invalid : "+pagename); // 警告ダイアログを表示 return; } // var html = // "<form onsubmit='save(\"" + pagename + "\"); return false;'>" // + "<textarea cols='120' rows='30' id='textarea' wrap='off'>" + content + "</textarea><br />" // + "<input type='submit' value='保存'><br />" // + "</form>"; //編集方法の修正 textareaへの格納はフォーム生成後のtextareaへテキスト入力 var html = "<form onsubmit='save(\"" + pagename + "\"); return false;'>" + "<textarea cols='120' rows='30' id='textarea' wrap='off'>" + "</textarea><br />" + "<input type='submit' value='保存'><br />" + "</form>"; setPageName(pagename); setContent(html); id('textarea').innerText = content; hideEditLink(); } //いま見ているページを編集する function editCurrentPage(){ pagename = id('headerH1').innerText; edit(pagename); } //ページ名チェック //@todo ファイル名/フォルダ名のWinsows禁則文字チェック function checkPageName(pagename) { var filepath = getFilePath(pagename); var absPath = fso.GetAbsolutePathName(filepath); if(absPath.indexOf(oBaseFolder.Path+"\\")<0) {//Out of work directory return false; } //改修箇所が多くなるので、現時点ではSubdirサポートしない absPath = absPath.replace(oBaseFolder.Path+"\\",""); var dirs = absPath.split("\\"); if(dirs.length>1) { return false; } return true; } //フォームのテキストエリアの中身を、指定したページのコンテンツとして保存する function save(pagename){ //編集方法の修正 textareaへの格納はフォーム生成後のtextareaへテキスト入力 // var content = id('textarea').value; var content = id('textarea').innerText; var filepath = getFilePath(pagename); if(checkPageName(pagename) == false) {//Out of work directory return; } utf8_saveToFile(filepath, content); open(pagename); } //ページのリストを取得する function getPageList(isToc){ var enuFiles = new Enumerator(oBaseFolder.Files); var myFiles = []; for (; !enuFiles.atEnd(); enuFiles.moveNext() ){ var FilePath = enuFiles.item(); var ExtensionName = fso.GetExtensionName(FilePath); // 拡張子を取得 var BaseName = fso.GetBaseName(FilePath); // ベースネームを取得 if(ExtensionName == "md"){ // 拡張子がmdだったら if( isToc == true ) {//目次作成時に、トップと一覧ページは除外する if( BaseName == CONFIG.topPage) { continue; } if( BaseName == VALUES.pagelistTitle) { continue; } } myFiles.push(BaseName); } } return myFiles; } //ページ一覧/検索画面を表示する function viweCreatedList(myFiles,title){ var list = []; var listHeader = "|"+VALUES.ListFileNameTitle + "|"+VALUES.ListCreateDateTitle + "|"+VALUES.ListUpdateDateTitle; + "|" list.push(listHeader); list.push('|:-|:-:|:-:|'); //ファイル保存用、表示用のフォーマット変換 for(var i = 0; i < myFiles.length; i++){ // list.push('<li><a href="javascript:open(\'' + myFiles[i] + '\');">' + myFiles[i] + '</a></li>'); // list.push('<li>[' + myFiles[i] + '](' + myFiles[i] + ')</li>' ); list.push('|[' + myFiles[i] + '](' + myFiles[i] + ')|' + getDateCreated(myFiles[i]) +'|' + getDateLastModified(myFiles[i]) +'|' ); } var content = list.join("\r\n"); if(CONFIG.showHistory==true) {//ファイル保存 var filepath = getFilePath( title ); utf8_saveToFile(filepath, content); } //いわゆるmarked.jsを使用(一部改造) var html = marked(content); //表示履歴 setHistory( title ); getHistory(); //表示 setPageName( title ); setContent( html ); showEditLink(); } //ページ一覧画面を表示する function openIndexPage(){ var openIndexPageName = VALUES.pagelistTitle; var myFiles = getPageList(false); viweCreatedList(myFiles,openIndexPageName); } //サイドのTOC作成 function createToc(){ var myFiles = getPageList(true); var list = []; for(var i = 0; i < myFiles.length; i++){ list.push(' - [' + myFiles[i] + '](' + myFiles[i] + ')' ); } var content = list.join("\r\n"); var html = marked(content); id('toc').innerHTML = html; fixed_side_toc(); } //ページ名を指定して、該当するマークダウン(.md)のパス名を取得する function getFilePath(pagename){ var filename = pagename + ".md"; return oBaseFolder.Path + "\\" + filename; } //ページ名を指定して、該当するマークダウンファイルの中身を取得する function getContent(pagename){ var content = ''; var filepath = getFilePath(pagename); if(fso.fileExists(filepath)){ content = utf8_readAll(filepath); } return content; } //メニューの「編集」リンクを表示する function showEditLink() { if(CONFIG.read_only_mode == false) { id('editLink').style.display = "inline"; } } //メニューの「編集」リンクを非表示にする function hideEditLink() { id('editLink').style.display = "none"; } //コンテンツを画面に表示する function setContent(html) { id('content').innerHTML = html; //ハイライト対応 $('#content pre code').each(function(i, e) { hljs.highlightBlock(e, e.className); }); } //ページ名を画面に表示する function setPageName(pagename) { if(pagename){ id('headerH1').innerText = pagename; } } //画面のページ名を取得する function getPageName() { return id('headerH1').innerText; } //HTMLの要素を取得する function id(s) { return document.getElementById(s); } //検索にヒットしたページの一覧画面を表示する function FindIndexPage() { var enuFiles = new Enumerator(oBaseFolder.Files); var myFiles = []; var FindIndexPageName = VALUES.FindTitle; for (; !enuFiles.atEnd(); enuFiles.moveNext()) { var FilePath = enuFiles.item(); var ExtensionName = fso.GetExtensionName(FilePath); // 拡張子を取得 var BaseName = fso.GetBaseName(FilePath); // ベースネームを取得 if (ExtensionName == "md") { // 拡張子がmdだったら //ファイル名検索 var database = FilePath; var sword = Text1.value; var check = BaseName.indexOf(sword, 0); if (0 <= check) { //ファイル名を格納 myFiles.push(BaseName); } else { //ファイルの中身を検索 if (0 <= openAndSerch(BaseName) ) { //ファイル名を格納 myFiles.push(BaseName); } } } } viweCreatedList(myFiles,FindIndexPageName + Text1.value); } //ページ名を指定してページを開き、検索文字がヒットしたかチェックする function openAndSerch(pagename) { var content = getContent(pagename); if (!content) { return edit(pagename); } //ファイル名検索 var database = content; var sword = Text1.value; var check = database.indexOf(sword, 0); return check } //新規作成 function NewPage() { var pagename = Text2.value; // window.confirm(pagename); if (pagename) { open(pagename); } Text2.value = ''; } //リネーム function ReNamePage() { var srcPagename = getPageName(); var dstPagename = Text2.value; // 「OK」時の処理開始 + 確認ダイアログの表示 if (window.confirm(dstPagename + ' にリネームしますか?')) { var srcFilepath = getFilePath(srcPagename); var dstFilepath = getFilePath(dstPagename); fso.MoveFile(srcFilepath, dstFilepath); window.alert(srcPagename + ' を ' + dstPagename + ' にリネームしました。'); open(dstPagename); } else { window.alert('キャンセルされました。'); // 警告ダイアログを表示 } Text2.value = ''; } //削除 function DeletePage() { var pagename = getPageName(); // 「OK」時の処理開始 + 確認ダイアログの表示 if (window.confirm(pagename + ' を削除しますか?')) { var pagename = getPageName(); var filepath = getFilePath(pagename); fso.DeleteFile(filepath); window.alert(pagename + ' を削除しました。'); openTopPage(); } else { window.alert('キャンセルされました。'); // 警告ダイアログを表示 } Text2.value = ''; } function utf8_readAll(filename){ var sr = new ActiveXObject("ADODB.Stream"); sr.Type = adTypeText; sr.charset = "utf-8"; sr.Open(); sr.LoadFromFile( filename ); var temp = sr.ReadText( adReadAll ); sr.Close(); return temp; } function utf8_saveToFile(filename, text) { // ADODB.Streamのモード var adTypeBinary = 1; var adTypeText = 2; // ADODB.Streamを作成 var pre = new ActiveXObject("ADODB.Stream"); // 最初はテキストモードでUTF-8で書き込む pre.Type = adTypeText; pre.Charset = 'UTF-8'; pre.Open(); pre.WriteText(text); // バイナリモードにするためにPositionを一度0に戻す // Readするためにはバイナリタイプでないといけない pre.Position = 0; pre.Type = adTypeBinary; // Positionを3にしてから読み込むことで最初の3バイトをスキップする // つまりBOMをスキップします pre.Position = 3; var bin = pre.Read(); pre.Close(); // 読み込んだバイナリデータをバイナリデータとしてファイルに出力する // ここは一般的な書き方なので説明を省略 var stm = new ActiveXObject("ADODB.Stream"); stm.Type = adTypeBinary; stm.Open(); stm.Write(bin); stm.SaveToFile(filename, 2); // force overwrite stm.Close(); }; //TopPageを表示する function openTopPage(){ open(CONFIG.topPage); } function makeLink(item,func,text) { id(item).innerHTML = '<a href="javascript:'+func+'" id="'+text+'A">'+ text+'</a>|'; } function initNavigation(){ marked.setOptions({ image_base: CONFIG.base_dirctory +"/" }) //editLink makeLink('topPageLink','openTopPage()',CONFIG.topPage); makeLink('editLink','editCurrentPage()',VALUES.editTitle); makeLink('PagelistLink','openIndexPage()',VALUES.pagelistTitle); makeLink('SarchLink','FindIndexPage()',VALUES.sarchTitle); makeLink('newPageLink','NewPage()',VALUES.newPageTitle); makeLink('renamePageLink','ReNamePage()',VALUES.reNamePageTitle); makeLink('deletePageLink','DeletePage()',VALUES.deletePageTitle); if(CONFIG.showHistory == false) { id('HRhistry').style.display = "none"; } if(CONFIG.showFileInfo == false) { id('HRfileinfo').style.display = "none" } if(CONFIG.read_only_mode == true) { id('editLink').style.display = "none"; id('newPageLink').style.display = "none"; id('renamePageLink').style.display = "none"; id('edittext2').style.display = "none"; id('deletePageLink').style.display = "none"; } openTopPage(); } <file_sep>/wiki/config.js /*------------------------------------------------------------------** Wiki Config **------------------------------------------------------------------*/ var CONFIG = { topPage: 'top_page', base_dirctory: './doc', showHistory: true, showFileInfo: true, read_only_mode: false } /*------------------------------------------------------------------** Value **------------------------------------------------------------------*/ var VALUES = { editTitle: '編集', pagelistTitle: 'ページ一覧', sarchTitle: '検索', newPageTitle: '新規作成', reNamePageTitle: 'リネーム', deletePageTitle: '削除', ListFileNameTitle: 'ファイル名', ListCreateDateTitle: '作成日', ListUpdateDateTitle: '更新日', PageCreateDateTitle: '作成日時', PageUpdateDateTitle: '更新日時', HistoryTitle: '表示履歴', FindTitle: '【検索結果】' };
41e1e24a54dcca51a115ce180ce91d9f62ba29ed
[ "Markdown", "JavaScript" ]
3
Markdown
ma34s/MarkdownLocalWiki
60b9e032f1ed00846d890afaf896423fb930374a
b17d3110a4d7bfaa31bc171befea0d3293b53ad1
refs/heads/master
<file_sep>package com.cg.service; import java.util.ArrayList; import com.cg.model.TdsMaster; public interface TdsService { TdsMaster getById(int id); }
cee732f643b052c395595d0bdba76ecb2e5c73c0
[ "Java" ]
1
Java
JayashreeAlladi/Module3
d5f1fe41fb6652c6b3b735be24eda661c41c2f54
20f4c1967c1d96a8b81f6efc6367579e41261df2
refs/heads/master
<repo_name>AbiHill/WDI_LDN_PROJECT4<file_sep>/src/components/events/ShowRoute.js import React from 'react'; import axios from 'axios'; import Auth from '../../lib/Auth'; import GoogleMap from './GoogleMap'; import { Link } from 'react-router-dom'; class ShowRoute extends React.Component { state = { event: null, message: '', joined: null, checked: true }; componentDidMount() { axios.get(`/api/events/${this.props.match.params.id}`) .then(res => this.setState({ event: res.data })); } handleDelete = () => { axios.delete(`/api/events/${this.props.match.params.id}`, { headers: { Authorization: `Bearer ${Auth.getToken()}` } }) .then(() => this.props.history.push('/events')); } joinEvent = () => { this.setState({ message: 'You\'ve Successfully Joined!'}); console.log(this.state.event._id); console.log(Auth.getToken()); axios.put(`/api/me/join/${this.state.event._id}`, this.state.event._id, { headers: { Authorization: `Bearer ${Auth.getToken()}`} }) .then(res => console.log(res)) .catch(err => console.error(err)); } leaveEvent = () => { this.setState({ message: 'You\'ve Successfully left this event!'}); axios.put(`/api/me/leave/${this.state.event._id}`, this.state.event._id, { headers: { Authorization: `Bearer ${Auth.getToken()}`} }) .then(res => console.log(res)) .catch(err => console.error(err)); } handleToggle = () => { this.setState({ checked: !this.state.checked }); } render() { console.log(this.state.event); const userId = Auth.getPayload().sub; return ( this.state.event ? ( <div className="container"> <div className="show-container"> <i id="falcon-pages" className="fab fa-phoenix-framework"></i> <h1 className="title">{this.state.event.name}</h1> <h2 className="subtitle">{this.state.event.sport}</h2> <div className="event-show-top"> <div className="columns is-multiline"> <div className="column is-half"> <img className="image" src={`${this.state.event.image}`} /> </div> <div className="column is-half"> <h4>{this.state.event.name}</h4> <h5>{this.state.event.sport}</h5> <h6>Event Organiser:<br/><span>{this.state.event.createdBy.username}</span></h6> <h6>Date:<br/><span>{this.state.event.dateTime.split('T')[0].split('-')[2]}/{this.state.event.dateTime.split('T')[0].split('-')[1]}/{this.state.event.dateTime.split('T')[0].split('-')[0]}</span></h6> <h6>Time:<br/> <span>{this.state.event.dateTime.split('T')[1].split(':00.')[0]}</span></h6> <h6>Team Size:<br/> <span>{this.state.event.teamSize}</span></h6> <h6>Address:<br/><span>{this.state.event.address}</span></h6> <h6>Info:<br/> <span>{this.state.event.description}</span></h6> {this.state.event.createdBy && this.state.event.createdBy._id === userId && <div className="delete-edit-container"> <Link className="edit-button-show-page" to={`/events/${this.state.event._id}/edit`}>Edit</Link> <button onClick={this.handleDelete}>Delete</button> </div> } { this.state.event.createdBy && this.state.event.createdBy._id !== userId && Auth.isAuthenticated() && this.state.event.joinedUsers.findIndex(user => user._id === userId) === -1 && !this.state.message && <button className="button" onClick={this.joinEvent}>Join Event</button> } { this.state.event.joinedUsers.findIndex(user => user._id === userId) !== -1 && <button className="button" onClick={this.leaveEvent}>Leave Event</button> } <p>{this.state.message}</p> </div> </div> </div> <div className="ex2"> <button><label htmlFor="item-2">Attendees</label></button> <input onChange={this.handleToggle} checked={this.state.checked} type="checkbox" name="rwo" id="item-2" /> <div className="inner"> <div className="hide2"> <h2 className="Title">Attendees</h2> <ul className="columns"> {this.state.event.joinedUsers.map((user, i) => <li key={i} className="column is-one-third"> <div className="card"> <div className="card-content"> <h5>{user.firstName}</h5> <h6>{user.username}</h6> <img className="image" src={`${user.image}`} /> </div> </div> </li> )} </ul> </div> </div> <p className="follow"></p> </div> <GoogleMap center={this.state.event.location} /> </div> </div> ) : ( <div className="container"> <h1 className="title">LOADING</h1> </div> ) ); } } export default ShowRoute; <file_sep>/src/components/common/AutoComplete.js /* global google */ //Google Autocomplete funcationality which is utilised on the registeration form and new event form import React from 'react'; class AutoComplete extends React.Component { componentDidMount() { this.autocompleteInput = new google.maps.places.Autocomplete(this.input); this.autocompleteInput.addListener('place_changed', () => { const place = this.autocompleteInput.getPlace(); this.props.onChange({ target: { name: this.input.name, value: { address: place.formatted_address, location: place.geometry.location.toJSON() } } }); }); } render() { return ( <input {...this.props} type="text" ref={element => this.input = element} /> ); } } export default AutoComplete; <file_sep>/src/components/events/EditRoute.js import React from 'react'; import axios from 'axios'; import Auth from '../../lib/Auth'; import Form from './Form'; class EditRoute extends React.Component { state = { name: '', sport: '', address: '', image: '', date: 0, time: 0, description: '', teamSize: 0 } handleChange = (e) => { const { name, value } = e.target; this.setState({ [name]: value }, () => console.log(this.state)); } handleSubmit = (e) => { e.preventDefault(); // use Auth.gettoken that we made in auth to get the token for the header axios.put(`/api/events/${this.props.match.params.id}`, this.state, { headers: { Authorization: `Bearer ${Auth.getToken()}`} }) .then(() => this.props.history.push(`/events/${this.props.match.params.id}`)); } componentDidMount() { axios.get(`/api/events/${this.props.match.params.id}`) .then(res => this.setState(res.data)); } render() { return( <div className="container"> <Form handleChange={this.handleChange} handleSubmit={this.handleSubmit} data={this.state}/> </div> ); } } export default EditRoute; <file_sep>/src/components/common/FlashMessages.js //flash messages for when users log in etc import React from 'react'; import Flash from '../../lib/Flash'; const FlashMessage = () => { //get any messages that have been set and store them in a variable const messages = Flash.getMessages(); //clear the messages so that when you navigate away from that page the flash message doesn't reappear Flash.clearMessages(); return ( <div className="container"> {/* if messages exists then iterate over the messages objects (by using the keys and then mapping) and show all the messages from that object */} {messages && Object.keys(messages).map((type, i) => <div key={i} className={`notification is-${type}`}>{messages[type]}</div> )} </div> ); }; export default FlashMessage; <file_sep>/src/app.js import React from 'react'; import ReactDOM from 'react-dom'; import IndexRoute from './components/events/IndexRoute'; import ShowRoute from './components/events/ShowRoute'; import EditRoute from './components/events/EditRoute'; import NewRoute from './components/events/NewRoute'; import Navbar from './components/common/Navbar'; import Login from './components/auth/Login'; import Register from './components/auth/Register'; import NotFound from './components/common/NotFound'; import FlashMessages from './components/common/FlashMessages'; import ShowProfile from './components/auth/ShowProfile'; import './scss/style.scss'; import 'bulma'; import { BrowserRouter, Route, Switch } from 'react-router-dom'; import ProtectedRoute from './components/common/ProtectedRoute'; class App extends React.Component { render() { return ( <BrowserRouter> <section> <Navbar /> <FlashMessages /> <main className="section"> <Switch> <ProtectedRoute exact path="/events/new" component={NewRoute} /> <Route exact path="/events/:id/edit" component={EditRoute} /> <ProtectedRoute exact path="/me" component={ShowProfile} /> <Route exact path="/events/:id" component={ShowRoute} /> <Route exact path="/events" component={IndexRoute} /> <Route path="/register" component={Register} /> <Route path="/" component={Login} /> <Route path="/login" component={Login} /> <Route component={NotFound} /> </Switch> </main> </section> </BrowserRouter> ); } } ReactDOM.render( <App />, document.getElementById('root') ); <file_sep>/src/components/events/Form.js import React from 'react'; import AutoComplete from '../common/AutoComplete'; import ReactFilestack from 'filestack-react'; const apiKey = '<KEY>'; const options = { accept: 'image/*', transformations: { crop: { aspectRatio: 0.95} } }; const Form = ({ handleChange, handleSubmit, data, handleFilestack }) => { console.log('this is the data', data.sport); return( <div className="form-container"> <form onSubmit={handleSubmit}> <div className="field"> <label htmlFor="name">Event Name</label> <input className="input" placeholder="Name" name="name" onChange={handleChange} value={data.name} /> {/* {data.errors.name && <small>{data.errors.name}</small>} */} </div> <div className="field"> <label htmlFor="sport">Sport</label><br/> <select name="sport" onChange={handleChange} value={data.sport}> <option value="football">Football</option> <option value="basketball">Basketball</option> <option value="volleyball">Volleyball</option> <option value="tennis">Tennis</option> <option value="golf">Golf</option> <option value="frisbee">Frisbee</option> <option value="badminton">Badminton</option> <option value="tabletennis">Table Tennis</option> <option value="running">Running</option> </select> {/* {data.errors.sport && <small>{data.errors.sport}</small>} */} </div> <div className="field"> <label htmlFor="address">Address</label> <AutoComplete className="input" placeholder="Address" name="address" value={data.address.adress} onChange={handleChange} /> {/* {data.errors.address && <small>{data.errors.address}</small>} */} </div> <div className="field"> {/* <label htmlFor="name">Image</label> <input className="input" placeholder="Image" name="image" onChange={handleChange} value={data.image} /> */} {/* {data.errors.image && <small>{data.errors.image}</small>} */} </div> <label htmlFor="image">Image</label> <ReactFilestack apikey={apiKey} buttonText="Upload Image" buttonClass="classname" // onSuccess={res => this.setState({ image: res.filesUploaded[0].url}, () => console.log(this.state))} options={options} onSuccess={res => handleFilestack(res)} /> <img className="image" src={`${data.image}`} /> {/* <div className="field"> <label htmlFor="date">date</label> <input type="date" className="input" placeholder="date" name="date" onChange={handleChange} value={data.date} /> {/* {data.errors.date && <small>{data.errors.date}</small>} </div> */} <div className="field"> <label htmlFor="dateTime">Date & time</label> <input type="datetime-local" className="input" placeholder="dateTime" name="dateTime" onChange={handleChange} value={data.dateTime} /> {/* {data.errors.time && <small>{data.errors.time}</small>} */} </div> <div className="field"> <label htmlFor="description">Description</label> <textarea className="input" placeholder="description" name="description" onChange={handleChange} value={data.description} /> {/* {data.errors.description && <small>{data.errors.description}</small>} */} </div> <div className="field"> <label htmlFor="teamSize">Team Size</label> <input className="input" placeholder="Team Size" name="teamSize" onChange={handleChange} value={data.teamSize} /> {/* {data.errors.description && <small>{data.errors.description}</small>} */} </div> <button>Submit</button> </form> </div> ); }; export default Form; <file_sep>/config/environment.js const env = process.env.NODE_ENV || 'dev'; //set up for heroku port or local server 4000 const port = process.env.PORT || 4000; //set up mongo database for heroku or local server database const dbURI = process.env.MONGODB_URI || `mongodb://localhost/events-${env}`; //for authentication const secret = process.env.SECRET || 'a^yd%2GH!)zI*_4fsQ'; module.exports = { env, port, dbURI, secret }; <file_sep>/src/components/auth/ShowProfile.js import React from 'react'; import axios from 'axios'; import Auth from '../../lib/Auth'; import { Link } from 'react-router-dom'; class ShowProfile extends React.Component { state = { user: null }; componentDidMount() { axios.get('/api/me', { headers: { Authorization: `Bearer ${Auth.getToken()}`} }) // .then(res => console.log('USER ========>', res.data)); .then(res => this.setState({ user: res.data })); } render() { //this stops the rendering of the page until the user has been set in the componentDidMount stage if (!this.state.user) return false; console.log(this.state.user); return ( <div className="profile-container"> <div className="container"> <h1>{this.state.user.firstName}</h1> <div className="card"> <div className="card-content"> <img className="image" src={`${this.state.user.image}`} /> <h4>Username:</h4> <p>{this.state.user.username}</p> <h4>Email:</h4> <p>{this.state.user.email}</p> <h4>Address:</h4> <p>{this.state.user.address}</p> <h4>Mobile:</h4> <p>{this.state.user.tel}</p> </div> </div> <h3>Your Events</h3> <div className="columns is-multiline"> {this.state.user.events.map((event, i) => <li key={i} className="column is-one-third"> <Link to={`/events/${event._id}`}> <div className="card"> <div className="card-content"> <h4>{event.name}</h4> <h5>{event.sport}</h5> <img className="image" src={`${event.image}`} /> </div> </div> </Link> </li> )} </div> </div> </div> ); } } export default ShowProfile; <file_sep>/config/router.js const router = require('express').Router(); const secureRoute = require('../lib/secureRoute'); const events = require('../controllers/events'); const auth = require('../controllers/auth'); //EVENTS ROUTES router.route('/events') .get(events.index) .post(secureRoute, events.create); router.route('/events/:id') .get(events.show) .put(secureRoute, events.update) .delete(secureRoute, events.delete); //USERS router.route('/register') .post(auth.register); router.route('/login') .post(auth.login); router.get('/me', secureRoute, auth.show); router.put('/me', secureRoute, auth.update); router.put('/me/join/:eventId', secureRoute, auth.joinEvent); router.put('/me/leave/:eventId', secureRoute, auth.leaveEvent); router.route('/*') .all((req, res) => res.status(404).json({ message: 'Not found' })); module.exports = router; <file_sep>/src/components/common/Navbar.js import React from 'react'; import { Link, withRouter } from 'react-router-dom'; import Auth from '../../lib/Auth'; class Navbar extends React.Component { state = { navIsOpen: false } //to avoid binding the method inside a constructor we can just use an arrow function, because they don't care about 'this' they will just look for something to bind to handleToggle = () => { this.setState({ navIsOpen: !this.state.navIsOpen }); } //will fire everytime the navbar receives new props, which it does whenever a link is clicked. Also fires whenever the state changes which is why we neede this.state.navIsOpen &&, so we don't get stuck in an infinite loop of state changes. This will now only fire when this.state.navIsOpen = true; componentWillUpdate() { this.state.navIsOpen && this.setState({ navIsOpen: false }); } handleLogout = () => { Auth.logout(); this.props.history.push('/events'); } render() { return ( <nav className="navbar"> <div className="navbar-brand"> <Link className="navbar-item" to="/events"> <i id="falcon-logo" className="fab fa-phoenix-framework"></i><p className="logo-text"> R H</p> </Link> <div className={`navbar-burger ${this.state.navIsOpen? 'is-active' : ''}`} onClick={this.handleToggle} > <span></span> <span></span> <span></span> </div> </div> <div className={`navbar-menu ${this.state.navIsOpen ? 'is-active' : ''}`}> <div className="navbar-end"> {Auth.isAuthenticated() && <Link id="navlink" className="navbar-item" to="/events">events</Link>} {Auth.isAuthenticated() && <Link id="navlink" className="navbar-item" to="/events/new">add</Link>} {/* below is the link to profile page....FIX */} {Auth.isAuthenticated() && <Link id="navlink" className="navbar-item" to="/me">profile</Link>} {Auth.isAuthenticated() && <a id="navlink" className="navbar-item" onClick={this.handleLogout}>logout</a>} {!Auth.isAuthenticated() && <Link id="navlink" className="navbar-item" to="/events">events</Link> } {!Auth.isAuthenticated() && <Link id="navlink" className="navbar-item" to="/login">login</Link> } {!Auth.isAuthenticated() && <Link id="navlink" className="navbar-item" to="/register">register</Link> } </div> </div> </nav> ); } } //need with router so that it's as if it's wrapped in Route in the app.js, this passes it history, location and match into the props export default withRouter(Navbar); <file_sep>/controllers/auth.js const User = require('../models/user'); const jwt = require('jsonwebtoken'); const { secret } = require('../config/environment'); const twilio = require('../lib/twilio'); // REGISTER function register(req, res, next) { //corrections for the data being passed through via google places in the front end. req.body.location = req.body.address.location; req.body.address = req.body.address.address; User.create(req.body) .then(user => { const token = jwt.sign({ sub: user._id }, secret, { expiresIn: '24h' }); res.json({ user, token, message: 'Thank you for registering' }); }) .catch(next); } //LOGIN function login(req, res, next) { User.findOne({ email: req.body.email }) .then(user => { if(!user || !user.validatePassword(req.body.password)) { return res.status(401).json({ message: 'Unauthorized' }); } const token = jwt.sign({ sub: user._id }, secret, { expiresIn: '24h' }); res.json({ user, token, message: `Welcome back ${user.username}` }); }) .catch(next); } // PROFILE PAGE function show(req, res, next) { User.findById(req.currentUser._id) .populate('events') .then(user => res.json(user)) .catch(next); } function update(req, res, next) { User.findById(req.params.id) .then(user => Object.assign(user, req.body)) .then(user => user.save()) .then(user => res.json(user)) .catch(next); } //----------------------------------------------------------------- // USER JOIN EVENT function joinEvent(req, res, next) { req.currentUser.events.push(req.params.eventId); req.currentUser.save() .then(user => { res.json(user); return twilio .sendSMS(user.tel, 'You have joined an event! See you there!'); }) .catch(next); } //-------------------------------------------------------------------- // USER LEAVE EVENT function leaveEvent(req, res, next) { User.findById(req.currentUser._id) .then(user => { const index = user.events.indexOf(req.params.eventId); const updatedEvents = [...user.events.slice(0, index), ...user.events.slice(index + 1)]; user.events = updatedEvents; return user.save(); }) .then(user => { res.json(user); }) .catch(next); } module.exports = { register, login, show, update, joinEvent, leaveEvent }; <file_sep>/README.md ![image](https://ga-dash.s3.amazonaws.com/production/assets/logo-9f88ae6c9c3871690e33280fcf557f33.png) # GA WDI-32 Project 4 - MERN Full Stack App - Rec Head For my final project, I was required to build a full-stack application using an express API to serve data from a Mongo database, and utilising React. I play a lot of basketball in my spare time and I try to organise recreational matches at local basketball courts around London. With it sometimes being difficult to organise the teams I thought it would be a good idea to create my own sports recreational meet up app called Rec Head. Rec Head allows users to create their own sports events and join others' events. I've integrated Google Maps API and Google Address Auto Complete to make it easy for the event organiser to display the location of the sports event. A highlight of this project was integrating Twilio which is a CRM tool to send automated text messages when a user has successfully joined an event. ##### [Click here to see website](https://rec-head.herokuapp.com/) --- Users are required to register and log in in order to join events. <p align="center"><img src="https://i.imgur.com/u5gHLe4.png" width="700"></p> It's also possible for the user to filter through the events to find which type of event they are looking for. <p align="center"><img src="https://i.imgur.com/enfs9JY.png" width="700"></p> I worked hard on making sure the styling was sleek and consistent throughout the full website to keep the branding aligned. <p align="center"><img src="https://i.imgur.com/IXItbgo.png" width="700"></p> Another highlight of the project was also including FileStack which is a great tool for users to be able to upload images easily from their computers or social media accounts. <p align="center"><img src="https://i.imgur.com/YkzFieY.png" width="700"></p> Users are able to view their profile which displays what events they are attending. User's are also able to see who are attending certain events. <p align="center"><img src="https://i.imgur.com/2Yz2o7s.png" width="700"></p> --- To improve the app further I'm keen to include a feature that allows users to message each other and also develop the events page further to showcase a calendar of events and improve the usability. In order to run the app: * Run Yarn * yarn start:client * yarn start:server <file_sep>/models/event.js const mongoose = require('mongoose'); const eventSchema = new mongoose.Schema({ location: { lat: { type: Number }, lng: { type: Number } }, name: { type: String }, sport: { type: String }, address: { type: String }, image: { type: String }, dateTime: { type: Date }, description: { type: String }, teamSize: { type: Number }, createdBy: { type: mongoose.Schema.ObjectId, ref: 'User' } }, { timestamps: true }); //virtual set up to tie the user to an event eventSchema .virtual('joinedUsers', { localField: '_id', foreignField: 'events', ref: 'User' }); eventSchema .set('toJSON', { getters: true, virtuals: true }); module.exports = mongoose.model('Event', eventSchema);
7a10e22732a62e1ee4b420670886d42a446fa47a
[ "JavaScript", "Markdown" ]
13
JavaScript
AbiHill/WDI_LDN_PROJECT4
cf2a9f3d0181342aa446bbe0cea19ade59d5696f
1b07fa7c8306490185840148f15d6f352dc4bf5a
refs/heads/master
<file_sep>using Newtonsoft.Json; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Damato_App.Settings { public class ApplicationSettings { public ApplicationSettings() { DownLoadedSettings = new DownLoadedSettings(); } public LoginSettings LoginSettings { get; set; } public SearchSettings SearchSettings { get; set; } public DownLoadedSettings DownLoadedSettings { get; set; } } public class SearchSettings { public SearchSettings() { ReturnAmount = 10; } public int ReturnAmount { get; set; } } public class DownLoadedSettings { public DownLoadedSettings() { DownLoaded = new List<string>(); DownLoadFileLocation = ""; } public List<string> DownLoaded { get; set; } public string DownLoadFileLocation { get; set; } } public class LoginSettings { public string UserName { get; set; } [JsonIgnore] public string password { get { return Cipher.Decrypt(Password, "<PASSWORD>"); } set { Password = Cipher.Encrypt(value, "<PASSWORD>"); } } public string Password { get; set; } public bool KeepLogdIn { get; set; } } } <file_sep>using Damato_App.DataBase; using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace Damato_App { public partial class AddTags : Form { public AddTags() { InitializeComponent(); label1.Text = TopText; } private void button7_Click(object sender, EventArgs e) { this.Close(); } private void button2_Click(object sender, EventArgs e) { ((sender as Button).Parent as TableLayoutPanel).Dispose(); } private List<bool> temp1Text = new List<bool>() { true }; private List<string> temp2Text = new List<string>() { "Add Tag" }; private void textBox1_KeyDown(object sender, KeyEventArgs e) { temp1Text[Int32.Parse(((sender as TextBox).Parent as TableLayoutPanel).Tag.ToString())] = false; } private void textBox1_Leave(object sender, EventArgs e) { if ((sender as TextBox).Text.Trim() == "") temp1Text[Int32.Parse(((sender as TextBox).Parent as TableLayoutPanel).Tag.ToString())] = true; if (temp1Text[Int32.Parse(((sender as TextBox).Parent as TableLayoutPanel).Tag.ToString())]) { (sender as TextBox).Text = temp2Text[Int32.Parse(((sender as TextBox).Parent as TableLayoutPanel).Tag.ToString())]; (sender as TextBox).Tag = null; } } private void textBox1_Enter(object sender, EventArgs e) { if (temp1Text[Int32.Parse(((sender as TextBox).Parent as TableLayoutPanel).Tag.ToString())]) (sender as TextBox).Text = ""; } public int Ccount = 0; public string TopText { get { return label1.Text; } set { label1.Text = value; } } private void textBox1_TextChanged(object sender, EventArgs e) { } private void button3_Click(object sender, EventArgs e) { temp1Text.Add(false); temp2Text.Add("Add Tag"); Ccount++; System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(AddTags)); TableLayoutPanel xtableLayoutPanel2 = new System.Windows.Forms.TableLayoutPanel(); Button xbutton2 = new System.Windows.Forms.Button(); Button xbutton3 = new System.Windows.Forms.Button(); TextBox xtextBox1 = new System.Windows.Forms.TextBox(); // // tableLayoutPanel2 // xtableLayoutPanel2.CellBorderStyle = System.Windows.Forms.TableLayoutPanelCellBorderStyle.Single; xtableLayoutPanel2.ColumnCount = 3; xtableLayoutPanel2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle()); xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F)); xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle()); xtableLayoutPanel2.Controls.Add(xtextBox1, 1, 0); xtableLayoutPanel2.Controls.Add(xbutton2, 2, 0); xtableLayoutPanel2.Location = new System.Drawing.Point(3, 3); xtableLayoutPanel2.Name = "tableLayoutPanel2"; xtableLayoutPanel2.RowCount = 1; xtableLayoutPanel2.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F)); xtableLayoutPanel2.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 29F)); xtableLayoutPanel2.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 29F)); xtableLayoutPanel2.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 29F)); xtableLayoutPanel2.Size = new System.Drawing.Size(441, 30); xtableLayoutPanel2.TabIndex = 11; // // button2 // xbutton2.Cursor = System.Windows.Forms.Cursors.Hand; xbutton2.FlatAppearance.BorderSize = 0; xbutton2.FlatStyle = System.Windows.Forms.FlatStyle.Flat; xbutton2.Image = ((System.Drawing.Image)(resources.GetObject("button2.Image"))); xbutton2.Location = new System.Drawing.Point(446, 6); xbutton2.Margin = new System.Windows.Forms.Padding(5); xbutton2.Name = "button2"; xbutton2.RightToLeft = System.Windows.Forms.RightToLeft.No; xbutton2.Size = new System.Drawing.Size(18, 18); xbutton2.TabIndex = 5; xbutton2.UseVisualStyleBackColor = true; xbutton2.Click += new System.EventHandler(button2_Click); // // textBox1 // xtextBox1.Anchor = System.Windows.Forms.AnchorStyles.None; xtextBox1.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); xtextBox1.BorderStyle = System.Windows.Forms.BorderStyle.None; xtextBox1.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F); xtextBox1.ForeColor = System.Drawing.Color.DarkGray; xtextBox1.Location = new System.Drawing.Point(29, 7); xtextBox1.MaxLength = 20; xtextBox1.Name = "textBox1"; xtextBox1.Size = new System.Drawing.Size(408, 16); xtextBox1.TabIndex = 7; xtextBox1.Text = ""; xtextBox1.Enter += new System.EventHandler(textBox1_Enter); xtextBox1.KeyDown += new System.Windows.Forms.KeyEventHandler(textBox1_KeyDown); xtextBox1.Leave += new System.EventHandler(textBox1_Leave); xtextBox1.TextChanged += new EventHandler(textBox1_TextChanged); flowLayoutPanel1.Controls.Add(xtableLayoutPanel2); xtableLayoutPanel2.Tag = Ccount; xtextBox1.Focus(); } public List<string> vss = new List<string>(); private void button1_Click(object sender, EventArgs e) { List<string> vs = new List<string>(); foreach (var item in flowLayoutPanel1.Controls) { string s = ((item as TableLayoutPanel).Controls[0] as TextBox).Text; if (s.Trim() != "" && s.Trim() != temp2Text[Int32.Parse((item as TableLayoutPanel).Tag.ToString())]) vs.Add(s.Trim()); else { ((item as TableLayoutPanel).Controls[0] as TextBox).Focus(); return; } } vss = vs; this.Close(); } public string Token { get; set; } private void AddTags_Load(object sender, EventArgs e) { MethodInvoker methodInvokerDelegate = async delegate () { var sss = await API.GetPresetss(Token, ""); foreach (var item in sss) { comboBox1.Items.Add(item); } this.Cursor = Cursors.Default; }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } private void comboBox1_SelectedIndexChanged(object sender, EventArgs e) { flowLayoutPanel1.Controls.Clear(); foreach (var item in (comboBox1.SelectedItem as Presets).Feleds.Split('*')) { Ccount++; TableLayoutPanel tableLayoutPanel2xx = new TableLayoutPanel(); TextBox textBox1xx = new TextBox(); tableLayoutPanel2xx.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); tableLayoutPanel2xx.CellBorderStyle = System.Windows.Forms.TableLayoutPanelCellBorderStyle.Single; tableLayoutPanel2xx.ColumnCount = 3; tableLayoutPanel2xx.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle()); tableLayoutPanel2xx.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F)); tableLayoutPanel2xx.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle()); tableLayoutPanel2xx.Controls.Add(textBox1xx, 1, 0); tableLayoutPanel2xx.Location = new System.Drawing.Point(3, 3); tableLayoutPanel2xx.Name = "tableLayoutPanel2"; tableLayoutPanel2xx.RowCount = 1; tableLayoutPanel2xx.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F)); tableLayoutPanel2xx.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 29F)); tableLayoutPanel2xx.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 29F)); tableLayoutPanel2xx.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 29F)); tableLayoutPanel2xx.Size = new System.Drawing.Size(441, 30); tableLayoutPanel2xx.TabIndex = 11; tableLayoutPanel2xx.Tag = Ccount; // // textBox1 // textBox1xx.Anchor = System.Windows.Forms.AnchorStyles.None; textBox1xx.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); textBox1xx.BorderStyle = System.Windows.Forms.BorderStyle.None; textBox1xx.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F); textBox1xx.ForeColor = System.Drawing.Color.DarkGray; textBox1xx.Location = new System.Drawing.Point(19, 7); textBox1xx.MaxLength = 20; textBox1xx.Name = "textBox1"; textBox1xx.Size = new System.Drawing.Size(403, 16); textBox1xx.TabIndex = 7; textBox1xx.Text = item; textBox1xx.TextChanged += new System.EventHandler(this.textBox1_TextChanged); textBox1xx.Enter += new System.EventHandler(this.textBox1_Enter); textBox1xx.KeyDown += new System.Windows.Forms.KeyEventHandler(this.textBox1_KeyDown); textBox1xx.Leave += new System.EventHandler(this.textBox1_Leave); this.flowLayoutPanel1.Controls.Add(tableLayoutPanel2xx); temp1Text.Add(false); temp2Text.Add(item); flowLayoutPanel1.Refresh(); } } } } <file_sep>using Newtonsoft.Json; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Damato_App.DataBase { public class User { public User() { DateAdded = DateTime.Now; } [Key] [Column("Key")] public int ID { get; set; } [MaxLength(100)] [Index(IsUnique = true)] public string Name { get; set; } [NotMapped] [JsonIgnore] public string PasswordDecrypted { get { return Cipher.Decrypt(Password, "<PASSWORD>"); } set { Password = Cipher.Encrypt(value, "<PASSWORD>"); } } [JsonIgnore] public string Password { get; set; } public int Level { get; set; } [JsonIgnore] public DateTime DateAdded { get; set; } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Web; namespace Damato_API.Settings { public class OutSettings { public OutSettings() { FileOut = new Dictionary<string, int>();} public Dictionary<string, int> FileOut { get; set; } } }<file_sep>using Newtonsoft.Json; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; using System.Data.Entity; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Damato_API.DataBase { public class DAMContext : DbContext { public DbSet<User> Users { get; set; } public DbSet<Token> Tokens { get; set; } public DbSet<File> Files { get; set; } public DbSet<Tag> Tags { get; set; } public DbSet<Presets> Presetss { get; set; } public static string Password() { using (StreamReader r = new StreamReader(@"D:\home\site\wwwroot\bin\DBSettings.json")) { var json = r.ReadToEnd(); return json; } } public DAMContext() { this.Database.Connection.ConnectionString = $"Data Source=damatoapidbserver.database.windows.net,1433;Initial Catalog=DamatoAPI_db;Persist Security Info=False;User ID=Damato;Password={<PASSWORD>()};MultipleActiveResultSets=False;Encrypt=True;TrustServerCertificate=False;Connection Timeout=30"; } } } <file_sep>using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; using System.Web.Http.Description; using Damato_API.DataBase; namespace Damato_API.Controllers { [RoutePrefix("api/Misc")] public class MiscController : ApiController { [HttpGet, Route("{token}/GetAllFilesTypes")] [ResponseType(typeof(List<string>))] public IHttpActionResult GetAllFilesTypes(string token) { Token _token = db.Tokens.Include(t => t.User).Single(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); var result1 = db.Files; List<File> result2 = new List<File>(); foreach (var item in result1) { if (item.RLevel >= _token.User.Level) result2.Add(item); } List<string> result = new List<string>(); foreach (var item in result1) { result.Add(item.Path); } List<string> vs = new List<string>(); foreach (var item in result) { vs.Add($".{item.Split('.').Last()}"); } return Ok(vs.Distinct().ToList()); } [HttpGet, Route("{token}/GetAllFilesTags")] [ResponseType(typeof(List<string>))] public IHttpActionResult GetAllFilesTags(string token) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); var result1 = db.Files.Include(f => f.MainTags); List<File> resultf = new List<File>(); foreach (var item in result1) { try { if (item.RLevel >= _token.User.Level) resultf.Add(item); } catch { } } List<Tag> result = new List<Tag>(); foreach (var item in resultf) { try { result.AddRange(item.MainTags); } catch { } } List<string> vs = new List<string>(); foreach (var item in result) { vs.Add($"{item._Tag}"); } return Ok(vs.Distinct().ToList()); } private DAMContext db = new DAMContext(); protected override void Dispose(bool disposing) { if (disposing) { db.Dispose(); } base.Dispose(disposing); } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Web; namespace Damato_API.DataBase { public class Presets { public int ID { get; set; } public string Name { get; set; } public string Feleds { get; set; } } }<file_sep>using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; using System.Web.Http.Description; using Damato_API.DataBase; using Damato_API.Settings; using Newtonsoft.Json; namespace Damato_API.Controllers { [RoutePrefix("api/Users")] public class UsersController : ApiController { // GET: api/Users/GetNewToken [HttpPost, Route("GetNewToken")] public string GetToken(User user) { return new TokensController().NewToken(user)._Token; } [HttpGet, Route("{token}/GetOutFiles")] [ResponseType(typeof(List<string>))] public IHttpActionResult GetOutFiles(string token) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); string json = System.IO.File.ReadAllText($@"{FilesController.PathLocation}\ApplicationSettings.json"); OutSettings Settings = JsonConvert.DeserializeObject<OutSettings>(json); List<string> s = new List<string>(); foreach (var item in Settings.FileOut) { s.Add(item.Key + $"[{item.Value}]"); } return Ok(s); } [HttpGet, Route("{token}/Getlevel")] [ResponseType(typeof(string))] public IHttpActionResult Getlevel(string token) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); return Ok(_token.User.Level.ToString()); } private DAMContext db = new DAMContext(); // GET: api/Users public IQueryable<User> GetUsers() { return db.Users; } // GET: api/Users/5 [ResponseType(typeof(User))] public IHttpActionResult GetUser(int id) { User user = db.Users.Find(id); if (user == null) { return NotFound(); } return Ok(user); } // PUT: api/Users/5 [ResponseType(typeof(void))] public IHttpActionResult PutUser(int id, User user) { if (!ModelState.IsValid) { return BadRequest(ModelState); } if (id != user.ID) { return BadRequest(); } db.Entry(user).State = EntityState.Modified; try { db.SaveChanges(); } catch (DbUpdateConcurrencyException) { if (!UserExists(id)) { return NotFound(); } else { throw; } } return StatusCode(HttpStatusCode.NoContent); } // POST: api/Users [ResponseType(typeof(User))] public IHttpActionResult PostUser(User user) { if (!ModelState.IsValid) { return BadRequest(ModelState); } db.Users.Add(user); db.SaveChanges(); return CreatedAtRoute("DefaultApi", new { id = user.ID }, user); } // DELETE: api/Users/5 [ResponseType(typeof(User))] public IHttpActionResult DeleteUser(int id) { User user = db.Users.Find(id); if (user == null) { return NotFound(); } db.Users.Remove(user); db.SaveChanges(); return Ok(user); } protected override void Dispose(bool disposing) { if (disposing) { db.Dispose(); } base.Dispose(disposing); } private bool UserExists(int id) { return db.Users.Count(e => e.ID == id) > 0; } } }<file_sep>using Newtonsoft.Json; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Damato_API.DataBase { public class File { public File() { DateAdded = DateTime.Now; } [Key] [Column("Key")] public int ID { get; set; } [MaxLength(100)] [Index(IsUnique = true)] public string Path { get; set; } [NotMapped] [JsonIgnore] public string[] PathParts { get { return Path.Split('\\'); } } public DateTime DateAdded { get; set; } [NotMapped] [JsonIgnore] public int RLevel { get; set; } [NotMapped] [JsonIgnore] public int WLevel { get; set; } [NotMapped] [JsonIgnore] public int DLevel { get; set; } public string Level { get { return $"{RLevel.ToString()},{WLevel.ToString()},{DLevel.ToString()}"; } set { string[] levels = value.Split(','); RLevel = Int32.Parse(levels[0]); WLevel = Int32.Parse(levels[1]); DLevel = Int32.Parse(levels[2]); } } public User User { get; set; } public List<Tag> MainTags { get; set; } } public class TFile { public string Path { get; set; } public Byte[] File { get; set; } } public class CFile { public string Path { get; set; } public Byte[] File { get; set; } public String[] Tags { get; set; } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Damato_App.DataBase { public class Presets { public int ID { get; set; } public string Name { get; set; } public string Feleds { get; set; } public override string ToString() { return Name; } } } <file_sep>namespace Damato_API.Migrations { using System; using System.Data.Entity.Migrations; public partial class Tokens_Added : DbMigration { public override void Up() { CreateTable( "dbo.Tokens", c => new { _Token = c.String(nullable: false, maxLength: 10), DateAdded = c.DateTime(nullable: false), DateExpiered = c.DateTime(nullable: false), }) .PrimaryKey(t => t._Token) .Index(t => t._Token, unique: true); } public override void Down() { DropIndex("dbo.Tokens", new[] { "_Token" }); DropTable("dbo.Tokens"); } } } <file_sep>namespace Damato_API.Migrations { using System; using System.Data.Entity.Migrations; public partial class Tags_Edit : DbMigration { public override void Up() { RenameColumn(table: "dbo.Tags", name: "File_ID", newName: "File_ID_ID"); RenameIndex(table: "dbo.Tags", name: "IX_File_ID", newName: "IX_File_ID_ID"); } public override void Down() { RenameIndex(table: "dbo.Tags", name: "IX_File_ID_ID", newName: "IX_File_ID"); RenameColumn(table: "dbo.Tags", name: "File_ID_ID", newName: "File_ID"); } } } <file_sep>using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; using System.Web; namespace Damato_API.DataBase { public class Token { public Token() { DateAdded = DateTime.Now; DateExpiered = DateTime.Now.AddMinutes(30); } [Key] [MaxLength(10)] [Index(IsUnique = true)] public string _Token { get; set; } public DateTime DateAdded { get; set; } public DateTime DateExpiered { get; set; } public User User { get; set; } } }<file_sep>using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; using System.Web.Http.Description; using Damato_API.DataBase; namespace Damato_API.Controllers { public class TokensController : ApiController { public Token NewToken(User user) { user = db.Users.ToList().FirstOrDefault(u => u.Name == user.Name && u.PasswordDecrypted == user.PasswordDecrypted); if (user == null) return new Token() { _Token = "NotFound" }; Token token; do { token = new Token() { _Token = (DateTime.Now.ToString("fffffffK") + "0<PASSWORD>").Substring(0, 10) }; } while (TokenExists(token._Token)); db.Tokens.Add(token); db.SaveChanges(); db.Entry(token).Reload(); token.User = user; db.SaveChanges(); return token; } private DAMContext db = new DAMContext(); // GET: api/Tokens public IQueryable<Token> GetTokens() { return db.Tokens; } // GET: api/Tokens/5 [ResponseType(typeof(Token))] public IHttpActionResult GetToken(string id) { Token token = db.Tokens.Find(id); if (token == null) { return NotFound(); } return Ok(token); } // PUT: api/Tokens/5 [ResponseType(typeof(void))] public IHttpActionResult PutToken(string id, Token token) { if (!ModelState.IsValid) { return BadRequest(ModelState); } if (id != token._Token) { return BadRequest(); } db.Entry(token).State = EntityState.Modified; try { db.SaveChanges(); } catch (DbUpdateConcurrencyException) { if (!TokenExists(id)) { return NotFound(); } else { throw; } } return StatusCode(HttpStatusCode.NoContent); } // POST: api/Tokens [ResponseType(typeof(Token))] public IHttpActionResult PostToken(Token token) { if (!ModelState.IsValid) { return BadRequest(ModelState); } db.Tokens.Add(token); try { db.SaveChanges(); } catch (DbUpdateException) { if (TokenExists(token._Token)) { return Conflict(); } else { throw; } } return CreatedAtRoute("DefaultApi", new { id = token._Token }, token); } // DELETE: api/Tokens/5 [ResponseType(typeof(Token))] public IHttpActionResult DeleteToken(string id) { Token token = db.Tokens.Find(id); if (token == null) { return NotFound(); } db.Tokens.Remove(token); db.SaveChanges(); return Ok(token); } protected override void Dispose(bool disposing) { if (disposing) { db.Dispose(); } base.Dispose(disposing); } private bool TokenExists(string id) { return db.Tokens.Count(e => e._Token == id) > 0; } } }<file_sep>using Damato_App.Settings; using Newtonsoft.Json; using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace Damato_App { public partial class LoginForm : Form { public LoginForm() { InitializeComponent(); try { string json = File.ReadAllText("ApplicationSettings.json"); applicationSettings = JsonConvert.DeserializeObject<ApplicationSettings>(json); if (applicationSettings.SearchSettings == null) applicationSettings.SearchSettings = new SearchSettings() { ReturnAmount = 10}; } catch { applicationSettings = new ApplicationSettings() { LoginSettings = new LoginSettings(), SearchSettings = new SearchSettings() }; } if (applicationSettings.LoginSettings.KeepLogdIn) { //UpdateLogin(applicationSettings.LoginSettings.UserName, applicationSettings.LoginSettings.password, applicationSettings.LoginSettings.KeepLogdIn); Login(); } } private bool temp2Text = true; private void textBox2_KeyDown(object sender, KeyEventArgs e) { temp2Text = false; if (e.KeyCode == Keys.Enter) { applicationSettings.LoginSettings.UserName = textBox1.Text; applicationSettings.LoginSettings.password = <PASSWORD>; applicationSettings.LoginSettings.KeepLogdIn = checkBox1.Checked; //UpdateLogin(textBox1.Text, textBox2.Text, checkBox1.Checked); Login(); //First Time Fix //UpdateLogin(textBox1.Text, textBox2.Text, checkBox1.Checked); } } public ApplicationSettings applicationSettings; private void Login() { textBox1.Enabled = false; textBox2.Enabled = false; this.Hide(); //label4.Visible = true; this.Cursor = Cursors.WaitCursor; button1.Cursor = Cursors.WaitCursor; button1.Click -= new System.EventHandler(button1_Click); //button1.Click -= MethodInvoker methodInvokerDelegate = async delegate () { string json = JsonConvert.SerializeObject(applicationSettings); File.WriteAllText("ApplicationSettings.json", json); string token; try { token = await API.GetNewToken(applicationSettings.LoginSettings.UserName, applicationSettings.LoginSettings.Password); } catch { token = ""; } this.Cursor = Cursors.Default; button1.Cursor = Cursors.Default; button1.Click += new System.EventHandler(button1_Click); if (token.Length == 10) { MainForm main = new MainForm(token); this.Hide(); main.Show(); } else { label4.Visible = true; textBox1.Enabled = true; textBox2.Enabled = true; this.Show(); } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } private void textBox2_Leave(object sender, EventArgs e) { if (textBox2.Text.Trim() == "") temp2Text = true; if (temp2Text) { textBox2.Text = "Enter Password"; textBox2.UseSystemPasswordChar = false; } } private void textBox2_Enter(object sender, EventArgs e) { if (temp2Text) { textBox2.Text = ""; textBox2.UseSystemPasswordChar = true; } } private bool temp1Text = true; private void textBox1_KeyDown(object sender, KeyEventArgs e) { temp1Text = false; } private void textBox1_Leave(object sender, EventArgs e) { if (textBox1.Text.Trim() == "") temp1Text = true; if (temp1Text) textBox1.Text = "Enter User"; } private void textBox1_Enter(object sender, EventArgs e) { if (temp1Text) textBox1.Text = ""; } private void button7_Click(object sender, EventArgs e) { Application.Exit(); } private void button8_Click(object sender, EventArgs e) { this.WindowState = FormWindowState.Minimized; } private void button1_Click(object sender, EventArgs e) { temp2Text = false; applicationSettings.LoginSettings.UserName = textBox1.Text; applicationSettings.LoginSettings.password = <PASSWORD>; applicationSettings.LoginSettings.KeepLogdIn = checkBox1.Checked; //UpdateLogin(textBox1.Text, textBox2.Text, checkBox1.Checked); Login(); //First Time Fix //UpdateLogin(textBox1.Text, textBox2.Text, checkBox1.Checked); } private void OpenLink(object sender, LinkLabelLinkClickedEventArgs e) { System.Diagnostics.Process.Start("https://github.com/RaptorHunter56/Damato/"); } } } <file_sep>using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Data; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace Damato_App.UserControls { public partial class TemplatesControl : UserControl { public TemplatesControl() { InitializeComponent(); } public string Token { get; set; } public Dictionary<Label, int> keyValuePairs = new Dictionary<Label, int>(); private async void button7_ClickAsync(object sender, EventArgs e) { await API.DeletePresets(Token, keyValuePairs[(((sender as Button).Parent as TableLayoutPanel).Controls[0] as Label)].ToString(), ""); ((sender as Button).Parent as TableLayoutPanel).Dispose(); } private void button1_ClickAsync(object sender, EventArgs e) { NewPlan(); } private void NewPlan() { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(TemplatesControl)); TableLayoutPanel xtableLayoutPanel2 = new TableLayoutPanel(); TextBox xtextBox2 = new TextBox(); TextBox xtextBox1 = new TextBox(); Button xbutton2 = new Button(); // // tableLayoutPanel2 // xtableLayoutPanel2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(43)))), ((int)(((byte)(43)))), ((int)(((byte)(43))))); xtableLayoutPanel2.ColumnCount = 3; xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 33.33F)); xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 66.67F)); xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle()); xtableLayoutPanel2.Controls.Add(xtextBox2, 1, 0); xtableLayoutPanel2.Controls.Add(xtextBox1, 0, 0); xtableLayoutPanel2.Controls.Add(xbutton2, 2, 0); xtableLayoutPanel2.Dock = System.Windows.Forms.DockStyle.Top; xtableLayoutPanel2.Location = new System.Drawing.Point(5, 35); xtableLayoutPanel2.Name = "tableLayoutPanel2"; xtableLayoutPanel2.RowCount = 1; xtableLayoutPanel2.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F)); xtableLayoutPanel2.Size = new System.Drawing.Size(440, 30); xtableLayoutPanel2.TabIndex = 2; // // textBox1 // xtextBox1.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right))); xtextBox1.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(43)))), ((int)(((byte)(43)))), ((int)(((byte)(43))))); xtextBox1.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle; xtextBox1.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F); xtextBox1.ForeColor = System.Drawing.Color.DarkGray; xtextBox1.Location = new System.Drawing.Point(3, 3); xtextBox1.MaxLength = 20; xtextBox1.Name = "textBox1"; xtextBox1.Size = new System.Drawing.Size(134, 23); xtextBox1.TabIndex = 8; // // textBox2 // xtextBox2.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right))); xtextBox2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(43)))), ((int)(((byte)(43)))), ((int)(((byte)(43))))); xtextBox2.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle; xtextBox2.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F); xtextBox2.ForeColor = System.Drawing.Color.DarkGray; xtextBox2.Location = new System.Drawing.Point(143, 7); xtextBox2.MaxLength = 20; xtextBox2.Name = "textBox2"; xtextBox2.Size = new System.Drawing.Size(274, 23); xtextBox2.TabIndex = 9; // // button2 // xbutton2.Cursor = System.Windows.Forms.Cursors.Hand; xbutton2.Dock = System.Windows.Forms.DockStyle.Right; xbutton2.FlatAppearance.BorderSize = 0; xbutton2.FlatStyle = System.Windows.Forms.FlatStyle.Flat; xbutton2.Image = ((System.Drawing.Image)(resources.GetObject("button2.Image"))); xbutton2.Location = new System.Drawing.Point(421, 0); xbutton2.Margin = new System.Windows.Forms.Padding(5); xbutton2.Name = "button2"; xbutton2.Size = new System.Drawing.Size(30, 30); xbutton2.TabIndex = 5; xbutton2.UseVisualStyleBackColor = true; xbutton2.Click += new System.EventHandler(this.button2_Click); panel1.Controls.Add(xtableLayoutPanel2); xtableLayoutPanel2.BringToFront(); xtextBox1.Focus(); } private void button8_ClickAsync(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; string s1 = keyValuePairs[((sender as Button).Parent as TableLayoutPanel).Controls[1] as Label].ToString(); keyValuePairs.Remove(((sender as Button).Parent as TableLayoutPanel).Controls[1] as Label); TableLayoutPanel ss = ((sender as Button).Parent as TableLayoutPanel); MethodInvoker methodInvokerDelegate = async delegate () { await API.DeletePresets(Token, s1, ""); ss.Dispose(); this.Cursor = Cursors.Default; }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } private void button2_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; string s1 = (((sender as Button).Parent as TableLayoutPanel).Controls[1] as TextBox).Text; string s2 = (((sender as Button).Parent as TableLayoutPanel).Controls[0] as TextBox).Text; MethodInvoker methodInvokerDelegate = async delegate () { int temp = await API.PostPresets(Token, "", new DataBase.Presets() { Name = s1, Feleds = s2 }); keyValuePairs.Add(((sender as Button).Parent as TableLayoutPanel).Controls[1] as Label, temp); this.Cursor = Cursors.Default; }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(TemplatesControl)); // // button7 // (sender as Button).Cursor = System.Windows.Forms.Cursors.Hand; (sender as Button).Dock = System.Windows.Forms.DockStyle.Right; (sender as Button).FlatAppearance.BorderSize = 0; (sender as Button).FlatStyle = System.Windows.Forms.FlatStyle.Flat; (sender as Button).Image = ((System.Drawing.Image)(resources.GetObject("button7.Image"))); (sender as Button).Location = new System.Drawing.Point(421, 0); (sender as Button).Margin = new System.Windows.Forms.Padding(5); (sender as Button).Name = "button7"; (sender as Button).Size = new System.Drawing.Size(24, 30); (sender as Button).TabIndex = 5; (sender as Button).UseVisualStyleBackColor = true; (sender as Button).Click -= new System.EventHandler(this.button2_Click); (sender as Button).Click += new System.EventHandler(this.button8_ClickAsync); Label xlabel2 = new Label(); Label xlabel1 = new Label(); // // label2 // xlabel2.Dock = System.Windows.Forms.DockStyle.Fill; xlabel2.FlatStyle = System.Windows.Forms.FlatStyle.Flat; xlabel2.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F); xlabel2.ForeColor = System.Drawing.Color.Gray; xlabel2.Location = new System.Drawing.Point(143, 0); xlabel2.Name = "label2"; xlabel2.Padding = new System.Windows.Forms.Padding(6, 0, 0, 0); xlabel2.RightToLeft = System.Windows.Forms.RightToLeft.No; xlabel2.Size = new System.Drawing.Size(274, 30); xlabel2.TabIndex = 7; xlabel2.Text = s2; xlabel2.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // label1 // xlabel1.Dock = System.Windows.Forms.DockStyle.Fill; xlabel1.FlatStyle = System.Windows.Forms.FlatStyle.Flat; xlabel1.Font = new System.Drawing.Font("Microsoft Sans Serif", 12F); xlabel1.ForeColor = System.Drawing.Color.DarkGray; xlabel1.Location = new System.Drawing.Point(3, 0); xlabel1.Name = "label1"; xlabel1.Padding = new System.Windows.Forms.Padding(6, 0, 0, 0); xlabel1.RightToLeft = System.Windows.Forms.RightToLeft.No; xlabel1.Size = new System.Drawing.Size(134, 30); xlabel1.TabIndex = 6; xlabel1.Text = s1; xlabel1.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; (((sender as Button).Parent as TableLayoutPanel).Controls[1] as TextBox).Dispose(); (((sender as Button).Parent as TableLayoutPanel).Controls[0] as TextBox).Dispose(); ((sender as Button).Parent as TableLayoutPanel).Controls.Add(xlabel2, 1, 0); ((sender as Button).Parent as TableLayoutPanel).Controls.Add(xlabel1, 0, 0); } private void TemplatesControl_Load(object sender, EventArgs e) { MethodInvoker methodInvokerDelegate = async delegate () { var sss = await API.GetPresetss(Token, ""); foreach (var item in sss) { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(TemplatesControl)); TableLayoutPanel xtableLayoutPanel2 = new TableLayoutPanel(); Label xlabel2 = new Label(); Label xlabel1 = new Label(); Button xbutton2 = new Button(); // // tableLayoutPanel2 // xtableLayoutPanel2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(43)))), ((int)(((byte)(43)))), ((int)(((byte)(43))))); xtableLayoutPanel2.ColumnCount = 3; xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 33.33F)); xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 66.67F)); xtableLayoutPanel2.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle()); xtableLayoutPanel2.Controls.Add(xlabel2, 1, 0); xtableLayoutPanel2.Controls.Add(xlabel1, 0, 0); xtableLayoutPanel2.Controls.Add(xbutton2, 2, 0); xtableLayoutPanel2.Dock = System.Windows.Forms.DockStyle.Top; xtableLayoutPanel2.Location = new System.Drawing.Point(5, 35); xtableLayoutPanel2.Name = "tableLayoutPanel2"; xtableLayoutPanel2.RowCount = 1; xtableLayoutPanel2.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F)); xtableLayoutPanel2.Size = new System.Drawing.Size(440, 30); xtableLayoutPanel2.TabIndex = 2; // // label2 // xlabel2.Dock = System.Windows.Forms.DockStyle.Fill; xlabel2.FlatStyle = System.Windows.Forms.FlatStyle.Flat; xlabel2.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F); xlabel2.ForeColor = System.Drawing.Color.Gray; xlabel2.Location = new System.Drawing.Point(143, 0); xlabel2.Name = "label2"; xlabel2.Padding = new System.Windows.Forms.Padding(6, 0, 0, 0); xlabel2.RightToLeft = System.Windows.Forms.RightToLeft.No; xlabel2.Size = new System.Drawing.Size(274, 30); xlabel2.TabIndex = 7; xlabel2.Text = item.Feleds; xlabel2.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // label1 // xlabel1.Dock = System.Windows.Forms.DockStyle.Fill; xlabel1.FlatStyle = System.Windows.Forms.FlatStyle.Flat; xlabel1.Font = new System.Drawing.Font("Microsoft Sans Serif", 12F); xlabel1.ForeColor = System.Drawing.Color.DarkGray; xlabel1.Location = new System.Drawing.Point(3, 0); xlabel1.Name = "label1"; xlabel1.Padding = new System.Windows.Forms.Padding(6, 0, 0, 0); xlabel1.RightToLeft = System.Windows.Forms.RightToLeft.No; xlabel1.Size = new System.Drawing.Size(134, 30); xlabel1.TabIndex = 6; xlabel1.Text = item.Name; xlabel1.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; keyValuePairs.Add(xlabel1, item.ID); // // button2 // xbutton2.Cursor = System.Windows.Forms.Cursors.Hand; xbutton2.Cursor = System.Windows.Forms.Cursors.Hand; xbutton2.Dock = System.Windows.Forms.DockStyle.Right; xbutton2.FlatAppearance.BorderSize = 0; xbutton2.FlatStyle = System.Windows.Forms.FlatStyle.Flat; xbutton2.Image = ((System.Drawing.Image)(resources.GetObject("button7.Image"))); xbutton2.Location = new System.Drawing.Point(421, 0); xbutton2.Margin = new System.Windows.Forms.Padding(5); xbutton2.Name = "button7"; xbutton2.Size = new System.Drawing.Size(24, 30); xbutton2.TabIndex = 5; xbutton2.UseVisualStyleBackColor = true; xbutton2.Click -= new System.EventHandler(this.button2_Click); xbutton2.Click += new System.EventHandler(this.button8_ClickAsync); panel1.Controls.Add(xtableLayoutPanel2); xtableLayoutPanel2.BringToFront(); } this.Cursor = Cursors.Default; }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } } } <file_sep>using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; using System.Web.Http.Description; using Damato_API.DataBase; using Damato_API.Settings; using Newtonsoft.Json; namespace Damato_API.Controllers { [RoutePrefix("api/Files")] public class FilesController : ApiController { public static string PathLocation = @"D:\home\Offical_Folder"; // GET: api/Files/2460348+13/GetRecentFiles [HttpGet, Route("{token}/GetRecentFiles")] [ResponseType(typeof(List<File>))] public IHttpActionResult GetRecentFiles(string token, int amount = 10) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); IEnumerable<File> files = db.Files.Include(f => f.User).Include(f => f.MainTags).OrderBy(f => f.DateAdded); files = files.Reverse().Take(amount); foreach (var item in files) { item.User.Password = "*****"; } if (files == null) return NotFound(); return Ok(files.Where(f => f.RLevel >= _token.User.Level)); } // PUT: api/Files/2460348+13/DownloadFile [HttpPost, Route("{token}/DownloadFile")] [ResponseType(typeof(string))] public IHttpActionResult DownloadFile(string token, string filename) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); File _file = db.Files.Where(f => f.Path.Contains(filename) && f.WLevel >= _token.User.Level).FirstOrDefault(); if (_file == null) return NotFound(); OutSettings o; try { string json1 = System.IO.File.ReadAllText($@"{PathLocation}\ApplicationSettings.json"); o = JsonConvert.DeserializeObject<OutSettings>(json1); } catch (Exception) { o = new OutSettings(); } o.FileOut.Add(_file.PathParts.Last(), _token.User.ID); string json = JsonConvert.SerializeObject(o); System.IO.File.WriteAllText($@"{PathLocation}\ApplicationSettings.json", json); byte[] temp = System.IO.File.ReadAllBytes(_file.Path); return Ok(Convert.ToBase64String(temp)); } // PUT: api/Files/2460348+13/UploadFile [HttpPost, Route("{token}/UploadFile/{returnfile}")] [ResponseType(typeof(void))] public IHttpActionResult UploadFile(string token, TFile file, string returnfile = "false") { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); if (returnfile == "true") { if (db.Files.Where(f => f.Path == $@"{PathLocation}\{file.Path}" && f.WLevel >= _token.User.Level).FirstOrDefault() == null) return Content(HttpStatusCode.Unauthorized, "File Does Not Exist"); } else { int coppy = 1; while (System.IO.File.Exists($@"{PathLocation}\{file.Path}")) { try { file.Path = file.Path.Substring(0, file.Path.Length - file.Path.Split('_').Last().Length - 1) + $"_{coppy}." + file.Path.Split('.').Last(); } catch { file.Path = file.Path.Substring(0, file.Path.Length - file.Path.Split('.').Last().Length - 1) + $"_{coppy}." + file.Path.Split('.').Last(); } coppy++; } } System.IO.File.WriteAllBytes($@"{PathLocation}\{file.Path}", file.File); Damato_API.DataBase.File file2 = new Damato_API.DataBase.File() { Path = $@"{PathLocation}\{file.Path}", Level = $"{_token.User.Level},{_token.User.Level},{_token.User.Level}" }; if (returnfile == "true") { string json = System.IO.File.ReadAllText($@"{PathLocation}\ApplicationSettings.json"); OutSettings o = JsonConvert.DeserializeObject<OutSettings>(json); o.FileOut.Remove(file2.PathParts.Last()); json = JsonConvert.SerializeObject(o); System.IO.File.WriteAllText($@"{PathLocation}\ApplicationSettings.json", json); } else { db.Files.Add(file2); db.SaveChanges(); db.Entry(file2).Reload(); var user = db.Users.ToList().FirstOrDefault(u => u.ID == _token.User.ID); file2.User = user; db.SaveChanges(); } return StatusCode(HttpStatusCode.NoContent); } // PUT: api/Files/2460348+13/UploadFileTaged [HttpPost, Route("{token}/UploadFileTaged/{returnfile}")] [ResponseType(typeof(void))] public IHttpActionResult UploadFileTaged(string token, CFile file, string returnfile = "false") { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); if (returnfile == "true") { if (db.Files.Where(f => f.Path == $@"{PathLocation}\{file.Path}" && f.WLevel >= _token.User.Level).FirstOrDefault() == null) return Content(HttpStatusCode.Unauthorized, "File Does Not Exist"); } else { int coppy = 1; while (System.IO.File.Exists($@"{PathLocation}\{file.Path}")) { try { file.Path = file.Path.Substring(0, file.Path.Length - file.Path.Split('_').Last().Length - 1) + $"_{coppy}." + file.Path.Split('.').Last(); } catch { file.Path = file.Path.Substring(0, file.Path.Length - file.Path.Split('.').Last().Length - 1) + $"_{coppy}." + file.Path.Split('.').Last(); } coppy++; } } System.IO.File.WriteAllBytes($@"{PathLocation}\{file.Path}", file.File); Damato_API.DataBase.File file2 = new Damato_API.DataBase.File() { Path = $@"{PathLocation}\{file.Path}", Level = $"{_token.User.Level},{_token.User.Level},{_token.User.Level}" }; if (returnfile == "true") { string json = System.IO.File.ReadAllText($@"{PathLocation}\ApplicationSettings.json"); OutSettings o = JsonConvert.DeserializeObject<OutSettings>(json); o.FileOut.Remove(file2.PathParts.Last()); json = JsonConvert.SerializeObject(o); System.IO.File.WriteAllText($@"{PathLocation}\ApplicationSettings.json", json); } else { db.Files.Add(file2); db.SaveChanges(); db.Entry(file2).Reload(); foreach (var item in file.Tags) { db.Entry(file2).Reload(); Damato_API.DataBase.Tag s = new Tag() { _Tag = item, File_ID = file2 }; db.Tags.Add(s); db.SaveChanges(); db.Entry(s).Reload(); db.Entry(file2).Reload(); } db.Entry(file2).Reload(); var user = db.Users.ToList().FirstOrDefault(u => u.ID == _token.User.ID); file2.User = user; db.SaveChanges(); } return StatusCode(HttpStatusCode.NoContent); } // GET: api/Files/2460348+13/GetRecentFiles [HttpGet, Route("{token}/SearchRecentFiles")] [ResponseType(typeof(List<File>))] public IHttpActionResult SearchRecentFiles(string token, [FromUri] string[] search, int amount = 10) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); IEnumerable<File> files = db.Files.Include(f => f.User).Include(f => f.MainTags).OrderBy(f => f.DateAdded); List<File> temp = new List<File>(); foreach (var item in search.Where(s => s[0] == '.')) { temp.AddRange(files.Where(f => ("." + f.Path.Split('.').Last()) == item)); } if (temp.Count() > 0) files = temp; List<File> filestemp = new List<File>(); foreach (var item in search.Where(s => s[0] == '*')) { filestemp = new List<File>(); foreach (var item77 in files) { if (item77.MainTags.ToList().Where(g => g._Tag == item.Substring(1)).Count() > 0) filestemp.Add(item77); } if (filestemp.Count() > 0) files = filestemp; } foreach (var item in search.Where(s => s[0] != '.' && s[0] != '*')) { files = files.Where(f => (f.Path.Split('\\').Last().Contains(item))); } files = files.Reverse().Take(amount); foreach (var item in files) { item.User.Password = "*****"; } if (files == null) return NotFound(); return Ok(files.Where(f => f.RLevel >= _token.User.Level)); } private DAMContext db = new DAMContext(); // GET: api/Files public IQueryable<File> GetFiles() { return db.Files; } // GET: api/Files/5 [ResponseType(typeof(File))] public IHttpActionResult GetFile(int id) { File file = db.Files.Find(id); if (file == null) { return NotFound(); } return Ok(file); } // PUT: api/Files/5 [ResponseType(typeof(void))] public IHttpActionResult PutFile(int id, File file) { if (!ModelState.IsValid) { return BadRequest(ModelState); } if (id != file.ID) { return BadRequest(); } db.Entry(file).State = EntityState.Modified; try { db.SaveChanges(); } catch (DbUpdateConcurrencyException) { if (!FileExists(id)) { return NotFound(); } else { throw; } } return StatusCode(HttpStatusCode.NoContent); } // POST: api/Files [ResponseType(typeof(File))] public IHttpActionResult PostFile(File file) { if (!ModelState.IsValid) { return BadRequest(ModelState); } db.Files.Add(file); db.SaveChanges(); return CreatedAtRoute("DefaultApi", new { id = file.ID }, file); } // DELETE: api/Files/5 [ResponseType(typeof(File))] public IHttpActionResult DeleteFile(int id) { File file = db.Files.Find(id); if (file == null) { return NotFound(); } db.Files.Remove(file); db.SaveChanges(); return Ok(file); } protected override void Dispose(bool disposing) { if (disposing) { db.Dispose(); } base.Dispose(disposing); } private bool FileExists(int id) { return db.Files.Count(e => e.ID == id) > 0; } } }<file_sep>using System; using System.Collections.Generic; using System.Data; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; using System.Web.Http.Description; using Damato_API.DataBase; namespace Damato_API.Controllers { [RoutePrefix("api/Presets")] public class PresetsController : ApiController { private DAMContext db = new DAMContext(); [HttpGet, Route("{token}/GetPresetss")] [ResponseType(typeof(IQueryable<Presets>))] // GET: api/Presets public IHttpActionResult GetPresetss(string token) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); return Ok(db.Presetss); } // GET: api/Presets/5 [ResponseType(typeof(Presets))] public IHttpActionResult GetPresets(int id) { Presets presets = db.Presetss.Find(id); if (presets == null) { return NotFound(); } return Ok(presets); } // PUT: api/Presets/5 [ResponseType(typeof(void))] public IHttpActionResult PutPresets(int id, Presets presets) { if (!ModelState.IsValid) { return BadRequest(ModelState); } if (id != presets.ID) { return BadRequest(); } db.Entry(presets).State = EntityState.Modified; try { db.SaveChanges(); } catch (DbUpdateConcurrencyException) { if (!PresetsExists(id)) { return NotFound(); } else { throw; } } return StatusCode(HttpStatusCode.NoContent); } // POST: api/Presets [HttpPost, Route("{token}/PostPresets")] [ResponseType(typeof(Presets))] public IHttpActionResult PostPresets(string token, Presets presets) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); if (!ModelState.IsValid) { return BadRequest(ModelState); } db.Presetss.Add(presets); db.SaveChanges(); db.Entry(presets).GetDatabaseValues(); return Ok(presets); } // DELETE: api/Presets/5 [HttpDelete, Route("{token}/DeletePresets/{id}")] [ResponseType(typeof(Presets))] public IHttpActionResult DeletePresets(string token, int id) { Token _token = db.Tokens.Include(t => t.User).FirstOrDefault(t => t._Token == token); if (_token == null) return Content(HttpStatusCode.Unauthorized, "Token Does Not Exist"); if (_token.DateExpiered.CompareTo(DateTime.Now) < 0) return Content(HttpStatusCode.Unauthorized, "Token Expired"); Presets presets = db.Presetss.Find(id); if (presets == null) { return NotFound(); } db.Presetss.Remove(presets); db.SaveChanges(); return Ok(presets); } protected override void Dispose(bool disposing) { if (disposing) { db.Dispose(); } base.Dispose(disposing); } private bool PresetsExists(int id) { return db.Presetss.Count(e => e.ID == id) > 0; } } }<file_sep>using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Damato_App.DataBase { public class Tag { public Tag() { DateAdded = DateTime.Now; } [Key] public int Key { get; set; } public string _Tag { get; set; } public DateTime DateAdded { get; set; } } } <file_sep>using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Data; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using Damato_App.Settings; using Newtonsoft.Json; using System.IO; namespace Damato_App.UserControls { public partial class SettingsControl : UserControl { public SettingsControl(ApplicationSettings applicationSettings) { _ApplicationSettings = applicationSettings; if (_ApplicationSettings.SearchSettings == null) _ApplicationSettings.SearchSettings = new SearchSettings() { ReturnAmount = 10 }; InitializeComponent(); pictureBox1.Height = pictureBox1.Width; numericUpDown1.Controls.RemoveAt(0); } public bool checkBox1checked; public bool checkBox1Checked { get { return checkBox1checked; } set { checkBox1checked = value; if (checkBox1checked) pictureBox1.Image = global::Damato_App.Properties.Resources.icons8_Toggle_On_32px_1; else pictureBox1.Image = global::Damato_App.Properties.Resources.icons8_Toggle_Off_32px_1; } } public ApplicationSettings _ApplicationSettings; private void SettingsControl_Load(object sender, EventArgs e) { checkBox1Checked = _ApplicationSettings.LoginSettings.KeepLogdIn; numericUpDown1.Value = _ApplicationSettings.SearchSettings.ReturnAmount; textBox1.Text = _ApplicationSettings.DownLoadedSettings.DownLoadFileLocation; //C:\Users\<NAME>\Desktop\Temp } public void update() { string json = JsonConvert.SerializeObject(_ApplicationSettings); File.WriteAllText("ApplicationSettings.json", json); (this.Parent.Parent.Parent as MainForm).ApplicationSettings = _ApplicationSettings; } private void checkBox1_CheckedChanged(object sender, EventArgs e) { _ApplicationSettings.LoginSettings.KeepLogdIn = checkBox1Checked; update(); } private void pictureBox1_Click(object sender, EventArgs e) { checkBox1Checked = !checkBox1Checked; } private void numericUpDown1_ValueChanged(object sender, EventArgs e) { _ApplicationSettings.SearchSettings.ReturnAmount = Int32.Parse(numericUpDown1.Value.ToString()); } private void textBox1_TextChanged(object sender, EventArgs e) { _ApplicationSettings.DownLoadedSettings.DownLoadFileLocation = textBox1.Text; } } } <file_sep>using Newtonsoft.Json; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Web; namespace Damato_API.DataBase { public class Tag { public Tag() { DateAdded = DateTime.Now; } [Key] public int Key { get; set; } public string _Tag { get; set; } public DateTime DateAdded { get; set; } [JsonIgnore] public File File_ID { get; set; } } }<file_sep>using System; using System.Collections.Generic; using System.Data.Entity; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Damato_App.DataBase { class DAMContext : DbContext { } } <file_sep>using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Data; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace Damato_App.UserControls { public partial class CheckTreeView : UserControl { private static CheckBox newCheckBox(string text = "Subcategory") { CheckBox checkBox = new CheckBox(); // // checkBox // checkBox.AutoSize = true; checkBox.Dock = System.Windows.Forms.DockStyle.Top; checkBox.ForeColor = System.Drawing.SystemColors.ControlDark; checkBox.Location = new System.Drawing.Point(5, 0); checkBox.Size = new System.Drawing.Size(164, 17); checkBox.TabIndex = 2; checkBox.Text = text; checkBox.UseVisualStyleBackColor = true; return checkBox; } private readonly int itemHeight = 17; public CheckTreeView() { InitializeComponent(); subcategoryChatch = new List<string>(); } public string Category { get { return label1.Text; } set { label1.Text = value; } } [Editor("System.Windows.Forms.Design.StringCollectionEditor, System.Design", "System.Drawing.Design.UITypeEditor, System.Drawing")] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public List<string> Subcategory { get { return subcategoryChatch; } set { subcategoryChatch = value; } } private void UpdateSub() { List<CheckBox> boxes = new List<CheckBox>(); foreach (var item in subcategoryChatch) { boxes.Add(newCheckBox(item)); } subcategory = boxes; } private List<string> subcategoryChatch { get; set; } //{ // get // { // List<string> vs = new List<string>(); // foreach (var item in subcategory) { vs.Add(item.Text); } // return vs; // } // set // { // List<CheckBox> boxes = new List<CheckBox>(); // foreach (var item in value) { boxes.Add(newCheckBox(item)); } // subcategory = boxes; // } //} private List<CheckBox> subcategory { get { List<CheckBox> boxes = new List<CheckBox>(); foreach (var item in panel1.Controls) { boxes.Add(item as CheckBox); } return boxes; } set { panel1.Controls.Clear(); foreach (var item in value) { panel1.Controls.Add(item); } } } private void checkBox1_CheckedChanged(object sender, EventArgs e) { for (int i = 0; i <= (subcategory.Count - 1); i++) { if (checkBox1.Checked) subcategory[i].Checked = true; else subcategory[i].Checked = false; } } private void CheckTreeView_Load(object sender, EventArgs e) { UpdateSub(); VisibleSub = !VisibleSub; for (int i = 0; i <= (subcategory.Count - 1); i++) { if (checkBox1.Checked) subcategory[i].Visible = VisibleSub; else subcategory[i].Visible = VisibleSub; } } private bool VisibleSub = true; private void checkBox1_Click(object sender, EventArgs e) { VisibleSub = !VisibleSub; for (int i = 0; i <= (subcategory.Count - 1); i++) { if (checkBox1.Checked) subcategory[i].Visible = VisibleSub; else subcategory[i].Visible = VisibleSub; } } internal List<string> GetAllChecked() { List<string> Return = new List<string>(); for (int i = 0; i <= (subcategory.Count - 1); i++) { if (subcategory[i].Checked) Return.Add(subcategory[i].Text); } return Return; } } } <file_sep>using Damato_App.DataBase; using Damato_App.Settings; using Damato_App.UserControls; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Net.Http; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace Damato_App { public partial class MainForm : Form { int panelWidth; bool Hidden; public ApplicationSettings ApplicationSettings; public string Token; public int level; public MainForm(string token) { string json = System.IO.File.ReadAllText("ApplicationSettings.json"); ApplicationSettings = JsonConvert.DeserializeObject<ApplicationSettings>(json); InitializeComponent(); panelWidth = 150; Hidden = false; Token = token.Trim('"'); this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { level = Int32.Parse((await API.Getlevel(Token)).Trim('"')); }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } public MainForm() { InitializeComponent(); panelWidth = 150; Hidden = false; string json = System.IO.File.ReadAllText("ApplicationSettings.json"); ApplicationSettings = JsonConvert.DeserializeObject<ApplicationSettings>(json); } private void button1_Click(object sender, EventArgs e) { timer1.Start(); } private void timer1_Tick(object sender, EventArgs e) { if (Hidden) { PanelSlide.Width = PanelSlide.Width + 10; if (PanelSlide.Width >= panelWidth) { timer1.Stop(); Hidden = false; this.Refresh(); } } else { PanelSlide.Width = PanelSlide.Width - 10; if (PanelSlide.Width <= 0) { timer1.Stop(); Hidden = true; this.Refresh(); } } } private void button7_Click(object sender, EventArgs e) { Application.Exit(); } private void button8_Click(object sender, EventArgs e) { this.WindowState = FormWindowState.Minimized; } private void panel3_DragDrop(object sender, DragEventArgs e) { string[] FileList = (string[])e.Data.GetData(DataFormats.FileDrop, false); List<string> vs = new List<string>(); foreach (var item in FileList) { try { string[] allfiles = System.IO.Directory.GetFiles(item, "*.*", System.IO.SearchOption.AllDirectories); foreach (var item2 in allfiles) { vs.Add(item2); } } catch { vs.Add(item); } } foreach (var item in vs) { this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { AddTags a = new AddTags() { TopText = item, Token = Token }; a.ShowDialog(); if (a.vss.Count() == 0) { this.Cursor = Cursors.Default; return; } try { await API.UploadFile(Token, item, a.vss); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); await API.UploadFile(Token, item, a.vss); } List<File> names; try { names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } names.Reverse(); panel3.Controls.Clear(); foreach (File item2 in names) { addtocontrole(item2); } this.Cursor = Cursors.Default; foreach (var item2 in panel3.Controls) { (item2 as TableLayoutPanel).Controls[0].Visible = isdonwnload; } List<string> filetypes; try { filetypes = await API.GetAllFilesTypes(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); filetypes = await API.GetAllFilesTypes(Token); } filetypes.Sort(); filetypes.Reverse(); CheckTreeView view = new CheckTreeView(); // // checkTreeView1 // view.AutoSize = true; view.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); view.Category = "File Types"; view.Dock = System.Windows.Forms.DockStyle.Top; view.Location = new System.Drawing.Point(1, 1); view.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); view.MinimumSize = new System.Drawing.Size(0, 42); view.Name = "checkTreeView1"; view.Padding = new System.Windows.Forms.Padding(8); view.Size = new System.Drawing.Size(126, 90); view.Subcategory = filetypes; view.TabIndex = 0; panel6.Controls.Clear(); panel6.Controls.Add(view); this.Cursor = Cursors.Default; foreach (var item2 in panel3.Controls) { (item2 as TableLayoutPanel).Controls[0].Visible = false; } try { filetypes = await API.GetAllFilesTags(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); filetypes = await API.GetAllFilesTags(Token); } filetypes.Sort(); filetypes.Reverse(); CheckTreeView view2 = new CheckTreeView(); // // checkTreeView1 // view2.AutoSize = true; view2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); view2.Category = "Tags"; view2.Dock = System.Windows.Forms.DockStyle.Top; view2.Location = new System.Drawing.Point(1, 1); view2.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); view2.MinimumSize = new System.Drawing.Size(0, 42); view2.Name = "checkTreeView1"; view2.Padding = new System.Windows.Forms.Padding(8); view2.Size = new System.Drawing.Size(126, 90); view2.Subcategory = filetypes; view2.TabIndex = 0; panel6.Controls.Add(view2); this.Cursor = Cursors.Default; foreach (var item2 in panel3.Controls) { (item2 as TableLayoutPanel).Controls[0].Visible = false; } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } } private void panel3_DragEnter(object sender, DragEventArgs e) { if (e.Data.GetDataPresent(DataFormats.FileDrop)) e.Effect = DragDropEffects.Copy; else e.Effect = DragDropEffects.None; } private bool tempText = true; private void textBox1_KeyDown(object sender, KeyEventArgs e) { tempText = false; } private void textBox1_Leave(object sender, EventArgs e) { if (textBox1.Text.Trim() == "") tempText = true; if (tempText) textBox1.Text = "Search DAM"; } private void textBox1_Enter(object sender, EventArgs e) { if (tempText) textBox1.Text = ""; } public bool issettings = false; private void button5_Click(object sender, EventArgs e) { issettings = true; button1.Click -= new System.EventHandler(this.button1_Click); Hidden = false; timer1.Start(); panel3.Controls.Clear(); panel3.Controls.Add(new SettingsControl(ApplicationSettings) { Dock = DockStyle.Fill }); } private void button2_Click(object sender, EventArgs e) { if (issettings || ispre) { string json = JsonConvert.SerializeObject(ApplicationSettings); System.IO.File.WriteAllText("ApplicationSettings.json", json); } issettings = false; ispre = false; isdonwnload = false; button1.Click += new System.EventHandler(this.button1_Click); panel3.Controls.Clear(); this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { List<File> names; try { names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } names.Reverse(); foreach (File item in names) { addtocontrole(item); } this.Cursor = Cursors.Default; foreach (var item in panel3.Controls) { (item as TableLayoutPanel).Controls[0].Visible = false; } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } public static Image GetImage(string ext) { switch (ext.Split('.').Last()) { case "cs": return global::Damato_App.Properties.Resources.cs_Image; case "css": return global::Damato_App.Properties.Resources.css_Image; case "csv": return global::Damato_App.Properties.Resources.csv_Image; case "dll": return global::Damato_App.Properties.Resources.dll_Image; case "dmg": return global::Damato_App.Properties.Resources.dmg_Image; case "doc": return global::Damato_App.Properties.Resources.doc_Image; case "eps": return global::Damato_App.Properties.Resources.eps_Image; case "exe": return global::Damato_App.Properties.Resources.exe_Image; case "flv": return global::Damato_App.Properties.Resources.flv_Image; case "gif": return global::Damato_App.Properties.Resources.gif_Image; case "gis": return global::Damato_App.Properties.Resources.gis_Image; case "gpx": return global::Damato_App.Properties.Resources.gpx_Image; case "html": return global::Damato_App.Properties.Resources.html_Image; case "ico": return global::Damato_App.Properties.Resources.ico_Image; case "jp2": return global::Damato_App.Properties.Resources.jp2_Image; case "jpg": return global::Damato_App.Properties.Resources.jpg_Image; case "kml": return global::Damato_App.Properties.Resources.kml_Image; case "kmz": return global::Damato_App.Properties.Resources.kmz_Image; case "mov": return global::Damato_App.Properties.Resources.mov_Image; case "mp3": return global::Damato_App.Properties.Resources.mp3_Image; case "mpg": return global::Damato_App.Properties.Resources.mpg_Image; case "nmea": return global::Damato_App.Properties.Resources.nmea_Image; case "ogg": return global::Damato_App.Properties.Resources.ogg_Image; case "osm": return global::Damato_App.Properties.Resources.osm_Image; case "otf": return global::Damato_App.Properties.Resources.otf_Image; case "png": return global::Damato_App.Properties.Resources.png_Image; case "ppt": return global::Damato_App.Properties.Resources.ppt_Image; case "psd": return global::Damato_App.Properties.Resources.psd_Image; case "rar": return global::Damato_App.Properties.Resources.rar_Image; case "tar": return global::Damato_App.Properties.Resources.tar_Image; case "tif": return global::Damato_App.Properties.Resources.tif_Image; case "ttf": return global::Damato_App.Properties.Resources.ttf_Image; case "txt": return global::Damato_App.Properties.Resources.txt_Image; case "wav": return global::Damato_App.Properties.Resources.wav_Image; case "wma": return global::Damato_App.Properties.Resources.wma_Image; case "woff": return global::Damato_App.Properties.Resources.woff_Image; case "zip": return global::Damato_App.Properties.Resources.zip_Image; default: return global::Damato_App.Properties.Resources._default_Image; } } public void addtocontrole(File item) { //panel3.Controls.Add(new SettingsControl(ApplicationSettings) { Dock = DockStyle.Fill }); TableLayoutPanel tableLayoutPanelx = new TableLayoutPanel(); PictureBox pictureBoxx = new PictureBox(); PictureBox pictureBoxx2 = new PictureBox(); Label label1x = new Label(); Label label2x = new Label(); Label label4x = new Label(); // // pictureBox2 // pictureBoxx.Dock = System.Windows.Forms.DockStyle.Fill; pictureBoxx.Image = GetImage(item.PathParts.Last()); pictureBoxx.Location = new System.Drawing.Point(3, 3); pictureBoxx.Name = "pictureBox2"; pictureBoxx.Size = new System.Drawing.Size(24, 24); pictureBoxx.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom; pictureBoxx.TabIndex = 0; pictureBoxx.TabStop = false; // // label2 // label1x.Anchor = System.Windows.Forms.AnchorStyles.Left; label1x.AutoSize = true; label1x.ForeColor = System.Drawing.SystemColors.ControlDark; label1x.Location = new System.Drawing.Point(33, 3); label1x.Name = "label2"; label1x.Padding = new System.Windows.Forms.Padding(0, 2, 2, 2); label1x.Size = new System.Drawing.Size(75, 24); label1x.TabIndex = 3; label1x.Text = item.PathParts.Last(); // // label3 // label2x.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right))); label2x.AutoSize = true; label2x.ForeColor = System.Drawing.SystemColors.ControlDarkDark; label2x.Location = new System.Drawing.Point(411, 3); label2x.Name = "label3"; label2x.Padding = new System.Windows.Forms.Padding(0, 2, 2, 2); label2x.Size = new System.Drawing.Size(91, 24); label2x.TabIndex = 4; label2x.Text = item.DateAdded.ToShortDateString(); // // pictureBoxx2 // pictureBoxx2.Dock = System.Windows.Forms.DockStyle.Fill; pictureBoxx2.Image = global::Damato_App.Properties.Resources.icons8_Down_Arrow_26px; pictureBoxx2.Tag = "Down"; pictureBoxx2.Location = new System.Drawing.Point(508, 3); pictureBoxx2.Name = "pictureBox3"; pictureBoxx2.Size = new System.Drawing.Size(24, 24); pictureBoxx2.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom; pictureBoxx2.TabIndex = 5; pictureBoxx2.TabStop = false; pictureBoxx2.Click += new System.EventHandler(pictureBox3_Click); // // tableLayoutPanel1 // tableLayoutPanelx.ColumnCount = 4; tableLayoutPanelx.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Absolute, 30F)); tableLayoutPanelx.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F)); tableLayoutPanelx.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle()); tableLayoutPanelx.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Absolute, 30F)); tableLayoutPanelx.Controls.Add(pictureBoxx2, 3, 0); tableLayoutPanelx.Controls.Add(label2x, 2, 0); tableLayoutPanelx.Controls.Add(label1x, 1, 0); tableLayoutPanelx.Controls.Add(pictureBoxx, 0, 0); tableLayoutPanelx.Controls.Add(label4x, 1, 1); tableLayoutPanelx.Dock = System.Windows.Forms.DockStyle.Top; tableLayoutPanelx.Location = new System.Drawing.Point(0, 0); tableLayoutPanelx.Name = "tableLayoutPanelx"; tableLayoutPanelx.RowCount = 2; this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 30F)); this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F)); tableLayoutPanelx.Size = new System.Drawing.Size(535, 30); tableLayoutPanelx.TabIndex = 0; label1x.MouseEnter += new System.EventHandler(this.tableLayoutPanel1_MouseEnter); label1x.MouseLeave += new System.EventHandler(this.tableLayoutPanel1_MouseLeave); // // label4 // label4x.Anchor = System.Windows.Forms.AnchorStyles.Left; label4x.AutoSize = true; tableLayoutPanel1.SetColumnSpan(this.label4, 2); label4x.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F); label4x.ForeColor = System.Drawing.SystemColors.ControlDark; label4x.Location = new System.Drawing.Point(33, 33); label4x.Name = "label4"; label4x.Padding = new System.Windows.Forms.Padding(0, 2, 2, 2); label4x.Size = new System.Drawing.Size(67, 21); label4x.TabIndex = 6; string total = ""; foreach (var item22 in item.MainTags) { total += " " + item22._Tag + ", "; } total = total.Trim().Trim(','); label4x.Text = total; panel3.Controls.Add(tableLayoutPanelx); } private void tableLayoutPanel1_MouseEnter(object sender, EventArgs e) { ((sender as Label).Parent as TableLayoutPanel).BackColor = Color.FromArgb(35, 35, 35); ((sender as Label).Parent as TableLayoutPanel).Size = new System.Drawing.Size(535, 58); } private void tableLayoutPanel1_MouseLeave(object sender, EventArgs e) { ((sender as Label).Parent as TableLayoutPanel).BackColor = Color.FromArgb(31, 31, 31); ((sender as Label).Parent as TableLayoutPanel).Size = new System.Drawing.Size(535, 30); } private void MainForm_Load(object sender, EventArgs e) { panel3.Controls.Clear(); this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { List<File> names; try { names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } names.Reverse(); foreach (File item in names) { addtocontrole(item); } List<string> filetypes; try { filetypes = await API.GetAllFilesTypes(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); filetypes = await API.GetAllFilesTypes(Token); } filetypes.Sort(); filetypes.Reverse(); CheckTreeView view = new CheckTreeView(); // // checkTreeView1 // view.AutoSize = true; view.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); view.Category = "File Types"; view.Dock = System.Windows.Forms.DockStyle.Top; view.Location = new System.Drawing.Point(1, 1); view.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); view.MinimumSize = new System.Drawing.Size(0, 42); view.Name = "checkTreeView1"; view.Padding = new System.Windows.Forms.Padding(8); view.Size = new System.Drawing.Size(126, 90); view.Subcategory = filetypes; view.TabIndex = 0; panel6.Controls.Clear(); panel6.Controls.Add(view); this.Cursor = Cursors.Default; foreach (var item in panel3.Controls) { (item as TableLayoutPanel).Controls[0].Visible = false; } try { filetypes = await API.GetAllFilesTags(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); filetypes = await API.GetAllFilesTags(Token); } filetypes.Sort(); filetypes.Reverse(); CheckTreeView view2 = new CheckTreeView(); // // checkTreeView1 // view2.AutoSize = true; view2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(31)))), ((int)(((byte)(31)))), ((int)(((byte)(31))))); view2.Category = "Tags"; view2.Dock = System.Windows.Forms.DockStyle.Top; view2.Location = new System.Drawing.Point(1, 1); view2.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); view2.MinimumSize = new System.Drawing.Size(0, 42); view2.Name = "checkTreeView1"; view2.Padding = new System.Windows.Forms.Padding(8); view2.Size = new System.Drawing.Size(126, 90); view2.Subcategory = filetypes; view2.TabIndex = 0; panel6.Controls.Add(view2); this.Cursor = Cursors.Default; foreach (var item2 in panel3.Controls) { (item2 as TableLayoutPanel).Controls[0].Visible = false; } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } public bool isdonwnload = false; private void button4_Click(object sender, EventArgs e) { if (issettings || ispre) { string json = JsonConvert.SerializeObject(ApplicationSettings); System.IO.File.WriteAllText("ApplicationSettings.json", json); this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { panel3.Controls.Clear(); List<File> names; try { names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } names.Reverse(); foreach (File item in names) { addtocontrole(item); } this.Cursor = Cursors.Default; foreach (var item in panel3.Controls) { (item as TableLayoutPanel).Controls[0].Visible = true; } List<string> outfiles; try { outfiles = await API.GetOutFiles(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); outfiles = await API.GetOutFiles(Token); } Dictionary<string, int> outfiles2 = new Dictionary<string, int>(); foreach (var item in outfiles) { outfiles2.Add(item.Substring(0, item.Length - item.Split('[').Last().Length + 1), Int32.Parse(item.Split('[').Last().TrimEnd(']'))); } foreach (var item in panel3.Controls) { if (outfiles2.Keys.Contains(((item as TableLayoutPanel).Controls[2] as Label).Text)) { if (outfiles2[((item as TableLayoutPanel).Controls[2] as Label).Text] >= level) { ((item as TableLayoutPanel).Controls[0] as PictureBox).Image = global::Damato_App.Properties.Resources.icons8_Up_26px; ((item as TableLayoutPanel).Controls[0] as PictureBox).Tag = "Up"; } else { ((item as TableLayoutPanel).Controls[0] as PictureBox).Visible = false; } } } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } else { this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { foreach (var item in panel3.Controls) { (item as TableLayoutPanel).Controls[0].Visible = true; } List<string> outfiles; try { outfiles = await API.GetOutFiles(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); outfiles = await API.GetOutFiles(Token); } Dictionary<string, int> outfiles2 = new Dictionary<string, int>(); foreach (var item in outfiles) { outfiles2.Add(item.Substring(0, item.Length - item.Split('[').Last().Length + 1), Int32.Parse(item.Split('[').Last().TrimEnd(']'))); } foreach (var item in panel3.Controls) { if (outfiles2.Keys.Contains(((item as TableLayoutPanel).Controls[2] as Label).Text)) { if (outfiles2[((item as TableLayoutPanel).Controls[2] as Label).Text] >= level) { ((item as TableLayoutPanel).Controls[0] as PictureBox).Image = global::Damato_App.Properties.Resources.icons8_Up_26px; ((item as TableLayoutPanel).Controls[0] as PictureBox).Tag = "Up"; } else { ((item as TableLayoutPanel).Controls[0] as PictureBox).Visible = false; } } } this.Cursor = Cursors.Default; }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } isdonwnload = true; button1.Click -= new System.EventHandler(this.button1_Click); Hidden = false; timer1.Start(); } private void pictureBox3_Click(object sender, EventArgs e) { if ((((sender as PictureBox).Parent as TableLayoutPanel).Controls[0] as PictureBox).Tag.ToString() == "Up") { this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate1 = async delegate () { try { await API.UploadFile(Token, $"{ApplicationSettings.DownLoadedSettings.DownLoadFileLocation}\\{(((sender as PictureBox).Parent as TableLayoutPanel).Controls[2] as Label).Text}", "true"); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); await API.UploadFile(Token, $"{ApplicationSettings.DownLoadedSettings.DownLoadFileLocation}\\{(((sender as PictureBox).Parent as TableLayoutPanel).Controls[2] as Label).Text}", "true"); } System.IO.File.Delete($"{ApplicationSettings.DownLoadedSettings.DownLoadFileLocation}\\{(((sender as PictureBox).Parent as TableLayoutPanel).Controls[2] as Label).Text}"); List<File> names; try { names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } names.Reverse(); panel3.Controls.Clear(); foreach (File item2 in names) { addtocontrole(item2); } this.Cursor = Cursors.Default; foreach (var item2 in panel3.Controls) { (item2 as TableLayoutPanel).Controls[0].Visible = isdonwnload; } List<string> outfiles; try { outfiles = await API.GetOutFiles(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); outfiles = await API.GetOutFiles(Token); } foreach (var item in panel3.Controls) { if (outfiles.Contains(((item as TableLayoutPanel).Controls[2] as Label).Text)) { ((item as TableLayoutPanel).Controls[0] as PictureBox).Image = global::Damato_App.Properties.Resources.icons8_Up_26px; ((item as TableLayoutPanel).Controls[0] as PictureBox).Tag = "Up"; } } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate1); else methodInvokerDelegate1(); return; } (((sender as PictureBox).Parent as TableLayoutPanel).Controls[0] as PictureBox).Tag = "Up"; //(sender as PictureBox) //(((sender as PictureBox).Parent as TableLayoutPanel).Controls[2] as Label).Text = ""; this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { try { await API.DownloadFile(Token, (((sender as PictureBox).Parent as TableLayoutPanel).Controls[2] as Label).Text, ApplicationSettings.DownLoadedSettings.DownLoadFileLocation); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); await API.DownloadFile(Token, (((sender as PictureBox).Parent as TableLayoutPanel).Controls[2] as Label).Text, ApplicationSettings.DownLoadedSettings.DownLoadFileLocation); } List<File> names; try { names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); names = await API.GetRecentFiles(Token, ApplicationSettings.SearchSettings.ReturnAmount); } names.Reverse(); panel3.Controls.Clear(); foreach (File item2 in names) { addtocontrole(item2); } this.Cursor = Cursors.Default; foreach (var item2 in panel3.Controls) { (item2 as TableLayoutPanel).Controls[0].Visible = isdonwnload; } List<string> outfiles; try { outfiles = await API.GetOutFiles(Token); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); outfiles = await API.GetOutFiles(Token); } foreach (var item in panel3.Controls) { if (outfiles.Contains(((item as TableLayoutPanel).Controls[2] as Label).Text)) { ((item as TableLayoutPanel).Controls[0] as PictureBox).Image = global::Damato_App.Properties.Resources.icons8_Up_26px; ((item as TableLayoutPanel).Controls[0] as PictureBox).Tag = "Up"; } } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } private void button9_Click(object sender, EventArgs e) { issettings = false; ispre = false; isdonwnload = false; button1.Click += new System.EventHandler(this.button1_Click); panel3.Controls.Clear(); this.Cursor = Cursors.WaitCursor; MethodInvoker methodInvokerDelegate = async delegate () { List<string> ss = new List<string>(); if (textBox1.Text.Trim() != "" && textBox1.Text.Trim() != "Search DAM") ss.Add(textBox1.Text); foreach (var item in panel6.Controls) { if ((item as CheckTreeView).Category == "Tags") { foreach (var item3 in (item as CheckTreeView).GetAllChecked()) { ss.Add("*" + item3); } } else { ss.AddRange((item as CheckTreeView).GetAllChecked()); } } List<File> names; try { names = await API.SearchRecentFiles(Token, ss, ApplicationSettings.SearchSettings.ReturnAmount); } catch { Token = await API.GetNewToken(ApplicationSettings.LoginSettings.UserName, ApplicationSettings.LoginSettings.Password); names = await API.SearchRecentFiles(Token, ss, ApplicationSettings.SearchSettings.ReturnAmount); } names.Reverse(); panel3.Controls.Clear(); foreach (File item in names) { addtocontrole(item); } this.Cursor = Cursors.Default; foreach (var item in panel3.Controls) { (item as TableLayoutPanel).Controls[0].Visible = false; } }; if (this.InvokeRequired) this.Invoke(methodInvokerDelegate); else methodInvokerDelegate(); } public bool ispre = false; private void button3_Click(object sender, EventArgs e) { ispre = true; button1.Click -= new System.EventHandler(this.button1_Click); Hidden = false; timer1.Start(); panel3.Controls.Clear(); panel3.Controls.Add(new TemplatesControl() { Dock = DockStyle.Fill, Token = Token}); } } public static class API { public static HttpClient _api = new HttpClient() { BaseAddress = new Uri("https://damatoapi.azurewebsites.net/api/") }; public static async Task<List<File>> GetRecentFiles(string token, int amount = 10) { HttpResponseMessage response = await _api.GetAsync($"Files/{token}/GetRecentFiles?amount={amount}"); if (response.IsSuccessStatusCode) return JArray.Parse((await response.Content.ReadAsStringAsync())).ToObject<List<File>>(); else throw new Exception(); //return new List<File>(); } public static async Task<List<string>> GetAllFilesTypes(string token) { //Misc/3124084%2B00/GetAllFilesTags HttpResponseMessage response = await _api.GetAsync($"Misc/{token}/GetAllFilesTypes"); if (response.IsSuccessStatusCode) return JArray.Parse((await response.Content.ReadAsStringAsync())).ToObject<List<string>>(); else throw new Exception(); //return new List<string>(); } public static async Task<List<string>> GetAllFilesTags(string token) { HttpResponseMessage response = await _api.GetAsync($"Misc/{token}/GetAllFilesTags"); if (response.IsSuccessStatusCode) return JArray.Parse((await response.Content.ReadAsStringAsync())).ToObject<List<string>>(); else throw new Exception(); //return new List<string>(); } public static async Task<string> GetNewToken(string name, string password) { //\{\"Name\":\"{}\",\"Password\":\"{}\"\} HttpContent _content = new StringContent($"{"{"}\"Name\":\"{name}\",\"Password\":\"{password}\"{"}"}", Encoding.UTF8, "application/json"); HttpResponseMessage response = await _api.PostAsync($"Users/GetNewToken", _content); if (response.IsSuccessStatusCode) return (await response.Content.ReadAsStringAsync()).Trim('"'); return "Fail"; } public static async Task<bool> UploadFile(string token, string filepath, string reupload = "false") { byte[] temp = System.IO.File.ReadAllBytes(filepath);//{filepath.Split('\\').Last()} //{ "Path": "string", "File": "AxD//w==" } HttpContent _content = new StringContent($"{"{"} \"Path\": \"{filepath.Split('\\').Last()}\", \"File\": \"{ Convert.ToBase64String(System.IO.File.ReadAllBytes(filepath))}\" {"}"}", Encoding.UTF8, "application/json"); HttpResponseMessage response = await _api.PostAsync($"Files/{token}/UploadFile/{reupload}", _content); if (response.IsSuccessStatusCode) return true; else throw new Exception(); // } public static async Task<bool> UploadFile(string token, string filepath, List<string> reupload) { byte[] temp = System.IO.File.ReadAllBytes(filepath);//{filepath.Split('\\').Last()} //{ "Path": "string", "File": "AxD//w==" } //{ "Path": "string", "File": "AxD//w==", Tags": [ "Test20", "Test20"] } string tt = ", \"Tags\": [ "; foreach (var item in reupload) { tt += $"\"{item}\", "; } tt = tt.TrimEnd().TrimEnd(',') + ']'; HttpContent _content = new StringContent($"{"{"} \"Path\": \"{filepath.Split('\\').Last()}\", \"File\": \"{ Convert.ToBase64String(System.IO.File.ReadAllBytes(filepath))}\" {tt} {"}"}", Encoding.UTF8, "application/json"); HttpResponseMessage response = await _api.PostAsync($"Files/{token}/UploadFileTaged/false", _content); if (response.IsSuccessStatusCode) return true; else throw new Exception(); // } public static async Task<bool> DownloadFile(string token, string filename, string filepath) { //api/Files/0132995%2B13/DownloadFile?filename=2.txt HttpResponseMessage response = await _api.PostAsync($"Files/{token}/DownloadFile?filename={filename}", null); if (response.IsSuccessStatusCode) { string ss = await response.Content.ReadAsStringAsync(); System.IO.File.WriteAllBytes($"{filepath}\\{filename}", Convert.FromBase64String(ss.Trim('"'))); } else throw new Exception(); // return true; } public static async Task<List<string>> GetOutFiles(string token) { HttpResponseMessage response = await _api.GetAsync($"Users/{token}/GetOutFiles"); if (response.IsSuccessStatusCode) return JArray.Parse((await response.Content.ReadAsStringAsync())).ToObject<List<string>>(); else throw new Exception(); //return new List<string>(); } public static async Task<string> Getlevel(string token) { //\{\"Name\":\"{}\",\"Password\":\"{}\"\} HttpResponseMessage response = await _api.GetAsync($"Users/{token}/Getlevel"); if (response.IsSuccessStatusCode) return await response.Content.ReadAsStringAsync(); return "Fail"; } public static async Task<List<File>> SearchRecentFiles(string token, List<string> search, int amount = 10) { string input = $"Files/{token}/SearchRecentFiles?amount={amount}"; foreach (var item in search) { input += $"&search={item}"; } HttpResponseMessage response = await _api.GetAsync(input); if (response.IsSuccessStatusCode) return JArray.Parse((await response.Content.ReadAsStringAsync())).ToObject<List<File>>(); else throw new Exception(); //return new List<File>(); } //public static async Task<List<Flag>> ByCurrency(string code) //{ // string[] vs = code.Split(','); // List<string> vss = new List<string>(); // List<Flag> vsf = new List<Flag>(); // foreach (var item in vs) { vss.Add(item.Split('(').Last().TrimEnd(')')); } // foreach (var item in vss) // { // HttpResponseMessage response = await _api.GetAsync($"currency/{item}?fields=name;flag"); // if (response.IsSuccessStatusCode) // vsf.AddRange(JArray.Parse((await response.Content.ReadAsStringAsync())).ToObject<List<Flag>>()); // } // return vsf; //} public static async Task<bool> DeletePresets(string token, string filename, string filepath) { //api/Files/0132995%2B13/DownloadFile?filename=2.txt HttpResponseMessage response = await _api.DeleteAsync($"Presets/{token}/DeletePresets/{filename}"); // return true; } public static async Task<int> PostPresets(string token, string filename, Presets filepath) { //{ "Name": "Passport", "Feleds": "Name*DOB*ID_No." } HttpContent s = new StringContent($"{"{"} \"Name\": \"{filepath.Name}\", \"Feleds\": \"{filepath.Feleds}\" {"}"}", Encoding.UTF8, "application/json"); HttpResponseMessage response = await _api.PostAsync($"Presets/{token}/PostPresets", s); if (response.IsSuccessStatusCode) { string dd = await response.Content.ReadAsStringAsync(); return Int32.Parse(dd.Split('"')[2].Trim(',').Trim(':')); } else throw new Exception(); } public static async Task<List<Presets>> GetPresetss(string token, string filename) { //api/Files/0132995%2B13/DownloadFile?filename=2.txt HttpResponseMessage response = await _api.GetAsync($"Presets/{token}/GetPresetss"); // if (response.IsSuccessStatusCode) return JArray.Parse((await response.Content.ReadAsStringAsync())).ToObject<List<Presets>>(); else throw new Exception(); } } } <file_sep># Damato Personal Data Asset Management
057cbfc23564dc225a4de27e84faed8c75424499
[ "Markdown", "C#" ]
25
C#
RaptorHunter56/Damato
a27920f6ddeacaa58d0a37235565f89ce0df2be7
81a9497743df6cfa1ef7fb782fea8216284537f5
refs/heads/master
<file_sep>#include<iostream> #include<vector> using namespace std; // 求解跨越中心的最大子数组 int cross_max_subarray(vector<int> & a, int & low, int mid, int & high) { int sum_L = 0, sum_R = 0; // 左右两边的累加和 int max_L = INT_MIN; // 左边的最大值 int max_R = INT_MIN; // 右边的最大值 int min_i; // 左边最大值的脚标 int max_j; // 右边最大值的脚标 // 从mid往low计算子数组的最大和 for (int i = mid; i >= low; i--) { sum_L += a[i]; if (sum_L > max_L) { max_L = sum_L; min_i = i; } } // 从mid+1往high计算子数组最大和 for (int j = mid + 1; j <= high; j++) { sum_R += a[j]; if (sum_R > max_R) { max_R = sum_R; max_j = j; } } low = min_i; // 返回左边脚标 high = max_j; // 返回右边角标 return max_L + max_R; } // 递归求解原问题 int max_subarray(vector<int> &a, int low, int high) { if (low == high) return a[low]; else{ int mid = (low + high) / 2; int L, R, M; L = max_subarray(a, low, mid); // 求解左边的子问题 R = max_subarray(a, mid + 1, high); // 求解右边的子问题 M = cross_max_subarray(a, low, mid, high); // 求解跨中心的子问题 //cout << low << "," << high << endl; // 输出每次最大子数组的区间,最后一个为最大的数组区间 // 判断三个子问题中哪个返回的值最大 if (L >= R && L >= M) return L; else if (R >= L && R >= M) return R; else return M; } } int main() { int n[] = { 13,-3,-25,20,-3,-16,-23,18,20,-7,12,-5,-22,15,-4,7 }; vector<int> a(n, n+16); // 输出 for (int i = 0; i < a.size(); i++) cout << a[i] << " "; cout << endl; int result; result = max_subarray(a, 0, a.size()-1); cout << result << endl; return 0; }<file_sep># 分治法 1. 最大子数组问题 2. 汉诺塔 <file_sep>/// 说明: /// 设要移动的圆盘数位n,A B C为柱子编号 /// ①当n = 1时,直接将圆盘从A移到C ———— 可作为终止条件 /// ②当n = 2时,先将第一个圆盘移到B,再将第二个圆盘移到C,最后将第一个圆盘移到C。那么圆盘的运行轨迹为:A-B,A-C,B-C /// ③当n > 2时,可以将1到n-1个圆盘当成一个整体,作为第一个圆盘,而第n个圆盘单独作为一个整体,作为第二个圆盘,那么它们的移动轨迹为:A-B(n-1个),A-C(第n个),B-C(n-1个) /// 使用hanoi(n, A, B, C)对圆盘进行移动,该调用函数表示,有n个圆盘从A移到C。 #include <iostream> using namespace std; int k = 0; // 总移动次数 // 递归操作 void hanoi(int n, char A, char B, char C) { /// 函数说明: /// 1. A移到C,B为辅组。 // 终止条件:只有一个圆盘,直接A-C if (n == 1) { cout << "将1个圆盘从" << A << "移到" << C << endl; k += 1; } else { hanoi(n - 1, A, C, B); // 将n-1个圆盘从A移到B cout << "将" << n - 1 << "个圆盘从" << A << "移到" << C << endl;// 将第n个圆盘从A移动C k += 1; hanoi(n - 1, B, A, C); // 将n-1个圆盘从B移到C } } int main() { int n; cout << "输入圆盘个数:"; cin >> n; // A B C为柱子编号 hanoi(n, 'A', 'B', 'C'); cout << "共移动了" << k << "次" << endl; cin.get(); cin.get(); return 0; }
6d92ad623bedc3d2451a173a206d622195e6ac7d
[ "Markdown", "C++" ]
3
C++
sunnyyeah/Algorithm
09ba8068166fde18810e25df20f2a3f628983472
da7cb81c79e2312783ccf02d6acf508beb574ae7
refs/heads/master
<repo_name>rafsan18/online-class<file_sep>/README.md # Online Class ## Online Class | [Live link](https://odera-online-learning-platform.netlify.app/) It is a simple React SPA where - People can select course - Course will be added in the cart - The Cart will contain the list of added course along with individual and total price Technologies Used: - React.js - React-Bootstrap Deployed in Netlify. <file_sep>/src/components/CartList/CartList.js import React from "react"; const CartList = ({ cartItem }) => { const { img, title, price } = cartItem; return ( <div className="row"> <img className="col-md-3" style={{ width: "30px", height: "30px" }} src={img} alt="" /> <h6 className="col-md-6">{title}</h6> <p className="col-md-3">${price}</p> </div> ); }; export default CartList; <file_sep>/src/components/Course/Course.js import React from "react"; const Course = ({ course, handleAddCourse }) => { const { img, title, name, info, price } = course; return ( <div className="col-md-4 mb-3"> <div className="card " style={{ height: "100%" }}> <img src={img} style={{ height: "200px" }} className="card-img-top" alt="..." /> <div className="card-body d-flex flex-column justify-content-between"> <div> <h5 className="card-title">{title}</h5> <p className="card-text">{info}</p> <small> By: <strong>{name}</strong> </small> <h6>${price}</h6> </div> <button onClick={() => handleAddCourse(course)} className="btn btn-danger" > Enroll Now </button> </div> </div> </div> ); }; export default Course; <file_sep>/src/fakeData/courses.js var courses = [ { title: "SEO 2020: Complete Guide", name: "<NAME>", price: 19.99, img: "https://i.ibb.co/mG2sSdh/seo.jpg", info: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Facere, qui?", id: 201, }, { title: "The Complete Ruby on Rails Developer Course", name: "<NAME>", price: 123.99, img: "https://i.ibb.co/KDq09tX/ruby-on-rails.jpg", info: "Lorem ipsum dolor sit, amet consectetur adipisicing elit. Adipisci, nulla.", id: 202, }, { title: "React: The Complete Guide ", name: "<NAME>", price: 69.99, img: "https://i.ibb.co/WzXvQkc/react-js.png", info: "Lorem ipsum dolor, sit amet consectetur adipisicing elit. Id, atque!", id: 203, }, { title: "Learn Python Programming Masterclass", name: "<NAME>", price: 89.99, img: "https://i.ibb.co/120xhPK/python.png", info: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Repellendus, enim?", id: 204, }, { title: "PHP for Beginners", name: "<NAME>", price: 59.99, img: "https://i.ibb.co/cXzBxCQ/php.png", info: "Lorem ipsum dolor sit, amet consectetur adipisicing elit. Facere, eum?", id: 205, }, { title: "Adobe Photoshop CC: Complete Beginner to Advance Guide", name: "<NAME>", price: 79.99, img: "https://i.ibb.co/xHSF44z/photoshop.png", info: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Aliquid, illum?", id: 206, }, { title: "Maya for Beginners", name: "<NAME>", price: 139.99, img: "https://i.ibb.co/f1sDNbv/maya.jpg", info: "Lorem ipsum dolor sit amet, consectetur adipisicing elit. Ut, voluptatem.", id: 207, }, { title: "Complete Guide of Lightroom", name: "<NAME>", price: 129.99, img: "https://i.ibb.co/ZdYFVLT/lightroom.png", info: "Lorem ipsum, dolor sit amet consectetur adipisicing elit. Nostrum, blanditiis.", id: 208, }, { title: "Java Programming Masterclass", name: "<NAME>", price: 169.99, img: "https://i.ibb.co/Xjw64rG/java.jpg", info: "Lorem ipsum, dolor sit amet consectetur adipisicing elit. Molestias, beatae?", id: 209, }, { title: "Complete JavaScript Course 2020 ", name: "<NAME>", price: 99.99, img: "https://i.ibb.co/BBKN86b/javascript.jpg", info: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Incidunt, itaque.", id: 210, }, { title: "Complete Front-End Web Development Guide", name: "<NAME>", price: 119.99, img: "https://i.ibb.co/Wpky7FS/front-end-development.jpg", info: "Lorem ipsum, dolor sit amet consectetur adipisicing elit. Doloribus, harum?", id: 211, }, { title: "Learn Content Writing", name: "<NAME>", price: 29.99, img: "https://i.ibb.co/MpnM6HJ/content-writing.png", info: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Cumque, dignissimos!", id: 212, }, { title: "Complete AutoCad Training Course 2021", name: "<NAME>", price: 69.99, img: "https://i.ibb.co/j46S2B0/autodesk-autocad.png", info: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Facere, doloremque?", id: 213, }, { title: "Complete Logo Design Course Using Adobe Illustrator: 2020", name: "<NAME>", price: 49.99, img: "https://i.ibb.co/LRBkvvz/adobe-illustrator.jpg", info: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Numquam, provident?", id: 214, }, { title: "Beginners to Advance Guide on Angular JS", name: "<NAME>", price: 79.99, img: "https://i.ibb.co/vJs2GZc/angular-js.jpg", info: "Lorem ipsum, dolor sit amet consectetur adipisicing elit. Est, animi!", id: 215, }, ]; export default courses;
26201e99d7022e56a8e395a376627cccc3fb56f8
[ "Markdown", "JavaScript" ]
4
Markdown
rafsan18/online-class
dfb63d4e203b52376d5c24059ca9d95efbc4389b
9a84307c6dd66aa3e1cc261bebb9c5fd5e97a84a
refs/heads/master
<repo_name>HyperSuprime-Cam/distEst<file_sep>/python/hsc/meas/match/hscDistortion.py import math import lsst.afw.geom as afwGeom import lsst.afw.image as afwImage import lsst.pipette.distortion as pipDist import hsc.meas.match.distest as distest class HscDistortion(pipDist.CameraDistortion): """ Adapter class for the pipette framework. """ def __init__(self, ccd, config): """Constructor @param ccd Ccd for distortion (sets position relative to center) @param config Configuration for distortion """ self.ccd = ccd self.pixelSize = ccd.getPixelSize() self.transform = ccd.getGlobalTransform() self.inverseTransform = self.transform.invert() angle = ccd.getOrientation().getNQuarter() * math.pi/2 self.cos, self.sin = math.cos(angle), math.sin(angle) def _rotate(self, x, y, reverse=False): sin = - self.sin if reverse else self.sin return self.cos * x + sin * y, self.cos * y - sin * x def _distortPosition(self, x, y, direction=None, elevation=30.0, copy=True): """Distort/undistort a position. @param x X coordinate to distort. pixels from focal plane center. @param y Y coordinate to distort @param direction "toIdeal" or "toActual" @returns Copy of input source with distorted/undistorted coordinates """ if direction == "toIdeal": point = self.transform(afwGeom.PointD(x, y)) x, y = point.getX() / self.pixelSize, point.getY() / self.pixelSize distX, distY = distest.getUndistortedPosition(x, y, elevation) return self._rotate(distX, distY, reverse=False) if direction == "toActual": x, y = self._rotate(x, y, reverse=True) undistX, undistY = distest.getDistortedPositionIterative(x, y, elevation) point = afwGeom.PointD(undistX * self.pixelSize, undistY * self.pixelSize) point = self.inverseTransform(point) return point.getX(), point.getY() raise RuntimeError("unknown distortion direction: %s" % (direction)) # Need to get elevation in here -- CPL def actualToIdeal(self, sources, copy=True): return self._distortSources(sources, direction="toIdeal", copy=copy) def idealToActual(self, sources, copy=True): return self._distortSources(sources, direction="toActual", copy=copy) <file_sep>/example/DISTforSIM.cc //-------------------------------------------------- //Calculating Distortion for HSC Simulation // //Last modification : 2012/02/25 //-------------------------------------------------- #include<iostream> #include<cmath> #include"./DISTforSIM.h" using namespace std; int main(){ int i,j; CL_DISTforSIM DSIM; DSIM.EL=85; DSIM.ORDER=9; DSIM.DNUM=121; DSIM.DNUM=401*401; DSIM.F_DSIM_NEWCCDtoSKY(); DSIM.F_DSIM_NEWSKYtoCCD(); for(i=0;i<11;i+=1) for(j=0;j<11;j+=1){ DSIM.REAL_X_CCD[11*i+j][0]=4000*i-20000; DSIM.REAL_X_CCD[11*i+j][1]=4000*j-20000; DSIM.REAL_X_SKY[11*i+j][0]=4000*i-20000; DSIM.REAL_X_SKY[11*i+j][1]=4000*j-20000; } for(i=0;i<401;i+=1) for(j=0;j<401;j+=1){ DSIM.REAL_X_CCD[401*i+j][0]=100*i-20000; DSIM.REAL_X_CCD[401*i+j][1]=100*j-20000; DSIM.REAL_X_SKY[401*i+j][0]=100*i-20000; DSIM.REAL_X_SKY[401*i+j][1]=100*j-20000; } DSIM.F_DSIM_CCDtoSKY(); if(DSIM.ERROR==1)return 0; for(i=0;i<DSIM.DNUM;i++) cout << DSIM.REAL_X_CCD[i][0] << " " << DSIM.REAL_X_CCD[i][1] << " " << DSIM.PREDICT_X_SKY[i][0] << " " << DSIM.PREDICT_X_SKY[i][1] << endl; DSIM.F_DSIM_SKYtoCCD(); if(DSIM.ERROR==1)return 0; for(i=0;i<DSIM.DNUM;i++) cout << DSIM.REAL_X_SKY[i][0] << " " << DSIM.REAL_X_SKY[i][1] << " " << DSIM.PREDICT_X_CCD[i][0] << " " << DSIM.PREDICT_X_CCD[i][1] << endl; cout << DSIM.Coef_CCDtoSKY[0][10] << " " << DSIM.Coef_CCDtoSKY[1][1] << endl; cout << DSIM.Coef_SKYtoCCD[0][10] << " " << DSIM.Coef_SKYtoCCD[1][1] << endl; DSIM.F_DSIM_DELCCDtoSKY(); DSIM.F_DSIM_DELSKYtoCCD(); return 0; } void CL_DISTforSIM::F_DSIM_CCDtoSKY(){ int i,j,ij,NUM; double PX[10][2]; F_DSIM_GETCOEFCCDtoSKY(); if(ERROR==1)return; for(NUM=0;NUM<DNUM;NUM++){ PREDICT_X_SKY[NUM][0]=PREDICT_X_SKY[NUM][1]=0; PX[0][0]=1.0; PX[0][1]=1.0; for(i=1;i<ORDER+1;i++) for(j=0;j< 2;j++) PX[i][j]=PX[i-1][j]*REAL_X_CCD[NUM][j]; ij=0; for(i=0;i<ORDER+1;i++) for(j=0;j<ORDER+1;j++) if(i+j<ORDER+1){ PREDICT_X_SKY[NUM][0]+=Coef_CCDtoSKY[0][ij]*PX[i][0]*PX[j][1]; PREDICT_X_SKY[NUM][1]+=Coef_CCDtoSKY[1][ij]*PX[i][0]*PX[j][1]; ij++; } // PREDICT_X_SKY[NUM][2]=EAL_X_CCD[NUM][2] //if(NUM%100==0) //cout << NUM << " / 20001 " << endl; } } void CL_DISTforSIM::F_DSIM_SKYtoCCD(){ int i,j,ij,NUM; double PX[10][2]; F_DSIM_GETCOEFSKYtoCCD(); if(ERROR==1)return; for(NUM=0;NUM<DNUM;NUM++){ PREDICT_X_CCD[NUM][0]=PREDICT_X_CCD[NUM][1]=0; PX[0][0]=1.0; PX[0][1]=1.0; for(i=1;i<ORDER+1;i++) for(j=0;j< 2;j++) PX[i][j]=PX[i-1][j]*REAL_X_SKY[NUM][j]; ij=0; for(i=0;i<ORDER+1;i++) for(j=0;j<ORDER+1;j++) if(i+j<ORDER+1){ PREDICT_X_CCD[NUM][0]+=Coef_SKYtoCCD[0][ij]*PX[i][0]*PX[j][1]; PREDICT_X_CCD[NUM][1]+=Coef_SKYtoCCD[1][ij]*PX[i][0]*PX[j][1]; ij++; } // PREDICT_X_CCD[NUM][2]=EAL_X_SKY[NUM][2] //if(NUM%100==0) //cout << NUM << " / 20001 " << endl; } } void CL_DISTforSIM::F_DSIM_NEWCCDtoSKY(){ int NUM,N; REAL_X_CCD = new double*[DNUM]; PREDICT_X_SKY = new double*[DNUM]; for(NUM=0;NUM<DNUM;NUM++){ REAL_X_CCD[NUM] = new double[3]; PREDICT_X_SKY[NUM] = new double[3]; for(N=0 ;N<3 ;N++) REAL_X_CCD[NUM][N]=PREDICT_X_SKY[NUM][N]=0; } } void CL_DISTforSIM::F_DSIM_NEWSKYtoCCD(){ int NUM,N; REAL_X_SKY = new double*[DNUM]; PREDICT_X_CCD = new double*[DNUM]; for(NUM=0;NUM<DNUM;NUM++){ REAL_X_SKY[NUM] = new double[3]; PREDICT_X_CCD[NUM] = new double[3]; for(N=0 ;N<3 ;N++) REAL_X_SKY[NUM][N]=PREDICT_X_CCD[NUM][N]=0; } } void CL_DISTforSIM::F_DSIM_DELCCDtoSKY(){ int NUM; for(NUM=0;NUM<DNUM;NUM++){ delete [] REAL_X_CCD[NUM]; delete [] PREDICT_X_SKY[NUM]; } delete [] REAL_X_CCD; delete [] PREDICT_X_SKY; } void CL_DISTforSIM::F_DSIM_DELSKYtoCCD(){ int NUM; for(NUM=0;NUM<DNUM;NUM++){ delete [] REAL_X_SKY[NUM]; delete [] PREDICT_X_CCD[NUM]; } delete [] REAL_X_SKY; delete [] PREDICT_X_CCD; } void CL_DISTforSIM::F_DSIM_GETCOEFCCDtoSKY(){ ERROR=0; if(EL==30){ Coef_CCDtoSKY[0][ 0] = -7.173920e-06; Coef_CCDtoSKY[0][ 1] = 4.100930e-10; Coef_CCDtoSKY[0][ 2] = -9.038870e-14; Coef_CCDtoSKY[0][ 3] = 1.181910e-18; Coef_CCDtoSKY[0][ 4] = -6.095620e-23; Coef_CCDtoSKY[0][ 5] = -4.676950e-26; Coef_CCDtoSKY[0][ 6] = 1.712380e-30; Coef_CCDtoSKY[0][ 7] = 1.839700e-34; Coef_CCDtoSKY[0][ 8] = -3.558260e-39; Coef_CCDtoSKY[0][ 9] = -2.758770e-43; Coef_CCDtoSKY[0][10] = 1.000010e+00; Coef_CCDtoSKY[0][11] = 1.403710e-09; Coef_CCDtoSKY[0][12] = -1.055930e-10; Coef_CCDtoSKY[0][13] = 3.530060e-18; Coef_CCDtoSKY[0][14] = -6.212070e-21; Coef_CCDtoSKY[0][15] = 1.497910e-26; Coef_CCDtoSKY[0][16] = -1.121990e-28; Coef_CCDtoSKY[0][17] = -2.234020e-35; Coef_CCDtoSKY[0][18] = 1.016860e-37; Coef_CCDtoSKY[0][19] = 1.178560e-12; Coef_CCDtoSKY[0][20] = -8.802990e-17; Coef_CCDtoSKY[0][21] = 1.321880e-20; Coef_CCDtoSKY[0][22] = 3.549880e-25; Coef_CCDtoSKY[0][23] = -2.046800e-29; Coef_CCDtoSKY[0][24] = 1.517790e-33; Coef_CCDtoSKY[0][25] = 2.074410e-40; Coef_CCDtoSKY[0][26] = -1.999240e-42; Coef_CCDtoSKY[0][27] = -1.055400e-10; Coef_CCDtoSKY[0][28] = 3.594110e-18; Coef_CCDtoSKY[0][29] = -1.235440e-20; Coef_CCDtoSKY[0][30] = 3.020890e-26; Coef_CCDtoSKY[0][31] = -3.377650e-28; Coef_CCDtoSKY[0][32] = -7.030490e-35; Coef_CCDtoSKY[0][33] = 4.100930e-37; Coef_CCDtoSKY[0][34] = -2.740490e-20; Coef_CCDtoSKY[0][35] = 2.337200e-24; Coef_CCDtoSKY[0][36] = -2.161310e-28; Coef_CCDtoSKY[0][37] = -1.076600e-32; Coef_CCDtoSKY[0][38] = 2.079970e-37; Coef_CCDtoSKY[0][39] = -8.390050e-42; Coef_CCDtoSKY[0][40] = -7.009580e-21; Coef_CCDtoSKY[0][41] = 1.434630e-26; Coef_CCDtoSKY[0][42] = -3.372520e-28; Coef_CCDtoSKY[0][43] = -6.674840e-35; Coef_CCDtoSKY[0][44] = 6.155570e-37; Coef_CCDtoSKY[0][45] = 1.847370e-28; Coef_CCDtoSKY[0][46] = -1.760820e-32; Coef_CCDtoSKY[0][47] = 7.849140e-37; Coef_CCDtoSKY[0][48] = 5.738010e-41; Coef_CCDtoSKY[0][49] = -1.078560e-28; Coef_CCDtoSKY[0][50] = -2.137870e-35; Coef_CCDtoSKY[0][51] = 4.085390e-37; Coef_CCDtoSKY[0][52] = -3.494790e-37; Coef_CCDtoSKY[0][53] = 3.779800e-41; Coef_CCDtoSKY[0][54] = 9.407650e-38; Coef_CCDtoSKY[1][ 0] = -4.702030e-04; Coef_CCDtoSKY[1][ 1] = 1.000020e+00; Coef_CCDtoSKY[1][ 2] = -1.377560e-09; Coef_CCDtoSKY[1][ 3] = -1.055360e-10; Coef_CCDtoSKY[1][ 4] = 3.994190e-18; Coef_CCDtoSKY[1][ 5] = -7.045480e-21; Coef_CCDtoSKY[1][ 6] = 1.562290e-26; Coef_CCDtoSKY[1][ 7] = -1.077150e-28; Coef_CCDtoSKY[1][ 8] = -2.439530e-35; Coef_CCDtoSKY[1][ 9] = 9.382480e-38; Coef_CCDtoSKY[1][10] = -1.405140e-10; Coef_CCDtoSKY[1][11] = 4.001470e-14; Coef_CCDtoSKY[1][12] = 3.019860e-18; Coef_CCDtoSKY[1][13] = -4.046310e-22; Coef_CCDtoSKY[1][14] = -1.533940e-26; Coef_CCDtoSKY[1][15] = 8.547770e-31; Coef_CCDtoSKY[1][16] = 1.354380e-36; Coef_CCDtoSKY[1][17] = 1.816990e-40; Coef_CCDtoSKY[1][18] = 9.225600e-45; Coef_CCDtoSKY[1][19] = -2.798340e-09; Coef_CCDtoSKY[1][20] = -1.055890e-10; Coef_CCDtoSKY[1][21] = 4.588130e-18; Coef_CCDtoSKY[1][22] = -1.240320e-20; Coef_CCDtoSKY[1][23] = 3.138050e-26; Coef_CCDtoSKY[1][24] = -3.370070e-28; Coef_CCDtoSKY[1][25] = -7.433100e-35; Coef_CCDtoSKY[1][26] = 4.081320e-37; Coef_CCDtoSKY[1][27] = 6.938850e-18; Coef_CCDtoSKY[1][28] = -1.565950e-21; Coef_CCDtoSKY[1][29] = -1.177570e-25; Coef_CCDtoSKY[1][30] = 1.078070e-29; Coef_CCDtoSKY[1][31] = 6.109120e-34; Coef_CCDtoSKY[1][32] = -1.871860e-38; Coef_CCDtoSKY[1][33] = -1.254400e-43; Coef_CCDtoSKY[1][34] = 4.685390e-19; Coef_CCDtoSKY[1][35] = -6.196180e-21; Coef_CCDtoSKY[1][36] = 1.617100e-26; Coef_CCDtoSKY[1][37] = -3.374570e-28; Coef_CCDtoSKY[1][38] = -7.837900e-35; Coef_CCDtoSKY[1][39] = 6.147450e-37; Coef_CCDtoSKY[1][40] = -8.656790e-26; Coef_CCDtoSKY[1][41] = 1.523500e-29; Coef_CCDtoSKY[1][42] = 8.723320e-34; Coef_CCDtoSKY[1][43] = -4.048160e-38; Coef_CCDtoSKY[1][44] = -3.893160e-42; Coef_CCDtoSKY[1][45] = 7.519870e-28; Coef_CCDtoSKY[1][46] = -1.122970e-28; Coef_CCDtoSKY[1][47] = -2.924320e-35; Coef_CCDtoSKY[1][48] = 4.095050e-37; Coef_CCDtoSKY[1][49] = 3.843590e-34; Coef_CCDtoSKY[1][50] = -4.523070e-38; Coef_CCDtoSKY[1][51] = -1.035610e-42; Coef_CCDtoSKY[1][52] = -2.251020e-36; Coef_CCDtoSKY[1][53] = 1.019210e-37; Coef_CCDtoSKY[1][54] = -5.682850e-43; }else if(EL==35){ Coef_CCDtoSKY[0][ 0] = -2.137500e-05; Coef_CCDtoSKY[0][ 1] = 1.411250e-10; Coef_CCDtoSKY[0][ 2] = 2.449680e-13; Coef_CCDtoSKY[0][ 3] = 3.923590e-18; Coef_CCDtoSKY[0][ 4] = -1.428200e-21; Coef_CCDtoSKY[0][ 5] = -5.456450e-26; Coef_CCDtoSKY[0][ 6] = 3.485800e-30; Coef_CCDtoSKY[0][ 7] = 2.001600e-34; Coef_CCDtoSKY[0][ 8] = -6.609410e-39; Coef_CCDtoSKY[0][ 9] = -3.286570e-43; Coef_CCDtoSKY[0][10] = 1.000010e+00; Coef_CCDtoSKY[0][11] = 1.114950e-09; Coef_CCDtoSKY[0][12] = -1.055930e-10; Coef_CCDtoSKY[0][13] = 3.097270e-18; Coef_CCDtoSKY[0][14] = -6.195130e-21; Coef_CCDtoSKY[0][15] = 1.035100e-26; Coef_CCDtoSKY[0][16] = -1.122650e-28; Coef_CCDtoSKY[0][17] = -1.530230e-35; Coef_CCDtoSKY[0][18] = 1.017940e-37; Coef_CCDtoSKY[0][19] = 3.014020e-12; Coef_CCDtoSKY[0][20] = -4.994930e-17; Coef_CCDtoSKY[0][21] = -2.402320e-20; Coef_CCDtoSKY[0][22] = -3.568670e-28; Coef_CCDtoSKY[0][23] = 9.786230e-29; Coef_CCDtoSKY[0][24] = 2.169050e-33; Coef_CCDtoSKY[0][25] = -1.578610e-38; Coef_CCDtoSKY[0][26] = -1.154110e-42; Coef_CCDtoSKY[0][27] = -1.055380e-10; Coef_CCDtoSKY[0][28] = 3.009320e-18; Coef_CCDtoSKY[0][29] = -1.235170e-20; Coef_CCDtoSKY[0][30] = 2.096240e-26; Coef_CCDtoSKY[0][31] = -3.379110e-28; Coef_CCDtoSKY[0][32] = -4.426720e-35; Coef_CCDtoSKY[0][33] = 4.104520e-37; Coef_CCDtoSKY[0][34] = -6.157330e-20; Coef_CCDtoSKY[0][35] = 1.615000e-24; Coef_CCDtoSKY[0][36] = 2.983480e-28; Coef_CCDtoSKY[0][37] = -5.027010e-33; Coef_CCDtoSKY[0][38] = -9.410700e-37; Coef_CCDtoSKY[0][39] = -1.927320e-41; Coef_CCDtoSKY[0][40] = -7.034470e-21; Coef_CCDtoSKY[0][41] = 1.132300e-26; Coef_CCDtoSKY[0][42] = -3.371360e-28; Coef_CCDtoSKY[0][43] = -4.877710e-35; Coef_CCDtoSKY[0][44] = 6.159530e-37; Coef_CCDtoSKY[0][45] = 3.758520e-28; Coef_CCDtoSKY[0][46] = -1.393360e-32; Coef_CCDtoSKY[0][47] = -6.781470e-37; Coef_CCDtoSKY[0][48] = 4.120480e-41; Coef_CCDtoSKY[0][49] = -1.077110e-28; Coef_CCDtoSKY[0][50] = -1.744340e-35; Coef_CCDtoSKY[0][51] = 4.080450e-37; Coef_CCDtoSKY[0][52] = -6.704750e-37; Coef_CCDtoSKY[0][53] = 3.298090e-41; Coef_CCDtoSKY[0][54] = 9.383010e-38; Coef_CCDtoSKY[1][ 0] = -3.378910e-04; Coef_CCDtoSKY[1][ 1] = 1.000010e+00; Coef_CCDtoSKY[1][ 2] = -1.105760e-09; Coef_CCDtoSKY[1][ 3] = -1.055370e-10; Coef_CCDtoSKY[1][ 4] = 3.344790e-18; Coef_CCDtoSKY[1][ 5] = -7.038610e-21; Coef_CCDtoSKY[1][ 6] = 1.174700e-26; Coef_CCDtoSKY[1][ 7] = -1.077250e-28; Coef_CCDtoSKY[1][ 8] = -1.839490e-35; Coef_CCDtoSKY[1][ 9] = 9.386450e-38; Coef_CCDtoSKY[1][10] = 3.145220e-10; Coef_CCDtoSKY[1][11] = -1.073330e-14; Coef_CCDtoSKY[1][12] = 2.908490e-18; Coef_CCDtoSKY[1][13] = 1.915280e-23; Coef_CCDtoSKY[1][14] = -2.572700e-26; Coef_CCDtoSKY[1][15] = 4.593470e-31; Coef_CCDtoSKY[1][16] = 2.814560e-35; Coef_CCDtoSKY[1][17] = -1.145350e-39; Coef_CCDtoSKY[1][18] = 4.528890e-44; Coef_CCDtoSKY[1][19] = -2.239070e-09; Coef_CCDtoSKY[1][20] = -1.055900e-10; Coef_CCDtoSKY[1][21] = 3.707750e-18; Coef_CCDtoSKY[1][22] = -1.234430e-20; Coef_CCDtoSKY[1][23] = 2.340120e-26; Coef_CCDtoSKY[1][24] = -3.371260e-28; Coef_CCDtoSKY[1][25] = -5.444450e-35; Coef_CCDtoSKY[1][26] = 4.079790e-37; Coef_CCDtoSKY[1][27] = -2.172250e-17; Coef_CCDtoSKY[1][28] = 3.659290e-22; Coef_CCDtoSKY[1][29] = -8.064030e-26; Coef_CCDtoSKY[1][30] = -2.685750e-30; Coef_CCDtoSKY[1][31] = 8.565340e-34; Coef_CCDtoSKY[1][32] = 5.017260e-40; Coef_CCDtoSKY[1][33] = -1.124970e-42; Coef_CCDtoSKY[1][34] = 3.504270e-19; Coef_CCDtoSKY[1][35] = -6.197310e-21; Coef_CCDtoSKY[1][36] = 1.309640e-26; Coef_CCDtoSKY[1][37] = -3.381190e-28; Coef_CCDtoSKY[1][38] = -5.857920e-35; Coef_CCDtoSKY[1][39] = 6.164370e-37; Coef_CCDtoSKY[1][40] = 3.647830e-25; Coef_CCDtoSKY[1][41] = -1.378990e-30; Coef_CCDtoSKY[1][42] = 1.815430e-34; Coef_CCDtoSKY[1][43] = 1.614070e-38; Coef_CCDtoSKY[1][44] = -3.630520e-42; Coef_CCDtoSKY[1][45] = 7.121870e-28; Coef_CCDtoSKY[1][46] = -1.122030e-28; Coef_CCDtoSKY[1][47] = -2.428600e-35; Coef_CCDtoSKY[1][48] = 4.110340e-37; Coef_CCDtoSKY[1][49] = -2.076600e-33; Coef_CCDtoSKY[1][50] = -4.280090e-39; Coef_CCDtoSKY[1][51] = 1.229790e-42; Coef_CCDtoSKY[1][52] = -1.945330e-36; Coef_CCDtoSKY[1][53] = 1.016440e-37; Coef_CCDtoSKY[1][54] = 3.627180e-42; }else if(EL==40){ Coef_CCDtoSKY[0][ 0]= -2.084070e-05; Coef_CCDtoSKY[0][ 1]= -1.963860e-09; Coef_CCDtoSKY[0][ 2]= 1.979540e-13; Coef_CCDtoSKY[0][ 3]= -5.170150e-18; Coef_CCDtoSKY[0][ 4]= -4.968360e-22; Coef_CCDtoSKY[0][ 5]= 1.244720e-25; Coef_CCDtoSKY[0][ 6]= 3.742730e-31; Coef_CCDtoSKY[0][ 7]= -4.565560e-34; Coef_CCDtoSKY[0][ 8]= -9.690280e-40; Coef_CCDtoSKY[0][ 9]= 8.130360e-43; Coef_CCDtoSKY[0][10] = 1.000010e+00; Coef_CCDtoSKY[0][11] = 9.056420e-10; Coef_CCDtoSKY[0][12] = -1.055920e-10; Coef_CCDtoSKY[0][13] = 2.581550e-18; Coef_CCDtoSKY[0][14] = -6.202430e-21; Coef_CCDtoSKY[0][15] = 8.450960e-27; Coef_CCDtoSKY[0][16] = -1.121730e-28; Coef_CCDtoSKY[0][17] = -1.260240e-35; Coef_CCDtoSKY[0][18] = 1.016010e-37; Coef_CCDtoSKY[0][19] = 3.088630e-12; Coef_CCDtoSKY[0][20] = 3.855580e-16; Coef_CCDtoSKY[0][21] = -2.487370e-20; Coef_CCDtoSKY[0][22] = -6.432670e-25; Coef_CCDtoSKY[0][23] = 4.747680e-29; Coef_CCDtoSKY[0][24] = -5.067810e-33; Coef_CCDtoSKY[0][25] = 7.160410e-39; Coef_CCDtoSKY[0][26] = 2.151510e-43; Coef_CCDtoSKY[0][27] = -1.055380e-10; Coef_CCDtoSKY[0][28] = 2.738030e-18; Coef_CCDtoSKY[0][29] = -1.230560e-20; Coef_CCDtoSKY[0][30] = 1.531210e-26; Coef_CCDtoSKY[0][31] = -3.381330e-28; Coef_CCDtoSKY[0][32] = -3.477040e-35; Coef_CCDtoSKY[0][33] = 4.106570e-37; Coef_CCDtoSKY[0][34] = -6.558860e-20; Coef_CCDtoSKY[0][35] = -9.777860e-24; Coef_CCDtoSKY[0][36] = 3.971590e-28; Coef_CCDtoSKY[0][37] = 2.569850e-32; Coef_CCDtoSKY[0][38] = -5.032320e-37; Coef_CCDtoSKY[0][39] = 5.573430e-41; Coef_CCDtoSKY[0][40] = -7.025090e-21; Coef_CCDtoSKY[0][41] = 7.166600e-27; Coef_CCDtoSKY[0][42] = -3.375000e-28; Coef_CCDtoSKY[0][43] = -3.190800e-35; Coef_CCDtoSKY[0][44] = 6.170190e-37; Coef_CCDtoSKY[0][45] = 4.128730e-28; Coef_CCDtoSKY[0][46] = 6.914870e-32; Coef_CCDtoSKY[0][47] = -1.390040e-36; Coef_CCDtoSKY[0][48] = -1.725840e-40; Coef_CCDtoSKY[0][49] = -1.077760e-28; Coef_CCDtoSKY[0][50] = -1.006580e-35; Coef_CCDtoSKY[0][51] = 4.088630e-37; Coef_CCDtoSKY[0][52] = -7.505810e-37; Coef_CCDtoSKY[0][53] = -1.297780e-40; Coef_CCDtoSKY[0][54] = 9.396770e-38; Coef_CCDtoSKY[1][ 0]= -2.709680e-04; Coef_CCDtoSKY[1][ 1]= 1.000010e+00; Coef_CCDtoSKY[1][ 2]= -9.025700e-10; Coef_CCDtoSKY[1][ 3]= -1.055350e-10; Coef_CCDtoSKY[1][ 4]= 2.739980e-18; Coef_CCDtoSKY[1][ 5]= -7.056710e-21; Coef_CCDtoSKY[1][ 6]= 9.458720e-27; Coef_CCDtoSKY[1][ 7]= -1.076500e-28; Coef_CCDtoSKY[1][ 8]= -1.468200e-35; Coef_CCDtoSKY[1][ 9]= 9.377090e-38; Coef_CCDtoSKY[1][10] = 4.147740e-10; Coef_CCDtoSKY[1][11] = -4.045570e-14; Coef_CCDtoSKY[1][12] = -1.592150e-19; Coef_CCDtoSKY[1][13] = 5.211300e-22; Coef_CCDtoSKY[1][14] = -1.655540e-26; Coef_CCDtoSKY[1][15] = -1.466310e-30; Coef_CCDtoSKY[1][16] = 4.620480e-35; Coef_CCDtoSKY[1][17] = 2.037400e-40; Coef_CCDtoSKY[1][18] = 2.058260e-44; Coef_CCDtoSKY[1][19] = -1.831630e-09; Coef_CCDtoSKY[1][20] = -1.055880e-10; Coef_CCDtoSKY[1][21] = 3.010450e-18; Coef_CCDtoSKY[1][22] = -1.239800e-20; Coef_CCDtoSKY[1][23] = 1.934530e-26; Coef_CCDtoSKY[1][24] = -3.369470e-28; Coef_CCDtoSKY[1][25] = -4.467130e-35; Coef_CCDtoSKY[1][26] = 4.078820e-37; Coef_CCDtoSKY[1][27] = -2.641930e-17; Coef_CCDtoSKY[1][28] = 1.347570e-21; Coef_CCDtoSKY[1][29] = 5.875500e-26; Coef_CCDtoSKY[1][30] = -1.237830e-29; Coef_CCDtoSKY[1][31] = 3.725130e-34; Coef_CCDtoSKY[1][32] = 2.778820e-38; Coef_CCDtoSKY[1][33] = -1.257250e-42; Coef_CCDtoSKY[1][34] = 2.843930e-19; Coef_CCDtoSKY[1][35] = -6.213760e-21; Coef_CCDtoSKY[1][36] = 1.111600e-26; Coef_CCDtoSKY[1][37] = -3.374460e-28; Coef_CCDtoSKY[1][38] = -4.969560e-35; Coef_CCDtoSKY[1][39] = 6.148930e-37; Coef_CCDtoSKY[1][40] = 4.179260e-25; Coef_CCDtoSKY[1][41] = -1.118290e-29; Coef_CCDtoSKY[1][42] = -1.093610e-33; Coef_CCDtoSKY[1][43] = 3.838440e-38; Coef_CCDtoSKY[1][44] = -9.770890e-44; Coef_CCDtoSKY[1][45] = 5.804680e-28; Coef_CCDtoSKY[1][46] = -1.121850e-28; Coef_CCDtoSKY[1][47] = -2.078980e-35; Coef_CCDtoSKY[1][48] = 4.094240e-37; Coef_CCDtoSKY[1][49] = -2.279640e-33; Coef_CCDtoSKY[1][50] = 3.049450e-38; Coef_CCDtoSKY[1][51] = 3.834400e-42; Coef_CCDtoSKY[1][52] = -1.573790e-36; Coef_CCDtoSKY[1][53] = 1.017230e-37; Coef_CCDtoSKY[1][54] = 3.877240e-42; }else if(EL==45){ Coef_CCDtoSKY[0][ 0]= -2.700480e-05; Coef_CCDtoSKY[0][ 1]= -1.232140e-09; Coef_CCDtoSKY[0][ 2]= 1.806400e-13; Coef_CCDtoSKY[0][ 3]= 9.041180e-18; Coef_CCDtoSKY[0][ 4]= -2.958750e-22; Coef_CCDtoSKY[0][ 5]= 1.742020e-26; Coef_CCDtoSKY[0][ 6]= 4.989580e-31; Coef_CCDtoSKY[0][ 7]= -1.912370e-34; Coef_CCDtoSKY[0][ 8]= -4.773630e-39; Coef_CCDtoSKY[0][ 9]= 3.882870e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 7.543180e-10; Coef_CCDtoSKY[0][12] = -1.055900e-10; Coef_CCDtoSKY[0][13] = 2.133880e-18; Coef_CCDtoSKY[0][14] = -6.200950e-21; Coef_CCDtoSKY[0][15] = 6.604400e-27; Coef_CCDtoSKY[0][16] = -1.122340e-28; Coef_CCDtoSKY[0][17] = -9.226420e-36; Coef_CCDtoSKY[0][18] = 1.017530e-37; Coef_CCDtoSKY[0][19] = 4.049790e-12; Coef_CCDtoSKY[0][20] = 2.057490e-16; Coef_CCDtoSKY[0][21] = -2.416610e-20; Coef_CCDtoSKY[0][22] = -1.618130e-24; Coef_CCDtoSKY[0][23] = 2.927520e-29; Coef_CCDtoSKY[0][24] = 1.950910e-33; Coef_CCDtoSKY[0][25] = 1.103110e-37; Coef_CCDtoSKY[0][26] = -1.631820e-42; Coef_CCDtoSKY[0][27] = -1.055360e-10; Coef_CCDtoSKY[0][28] = 2.071340e-18; Coef_CCDtoSKY[0][29] = -1.239500e-20; Coef_CCDtoSKY[0][30] = 1.522610e-26; Coef_CCDtoSKY[0][31] = -3.376770e-28; Coef_CCDtoSKY[0][32] = -3.346560e-35; Coef_CCDtoSKY[0][33] = 4.101500e-37; Coef_CCDtoSKY[0][34] = -8.642940e-20; Coef_CCDtoSKY[0][35] = -4.757750e-24; Coef_CCDtoSKY[0][36] = 4.038200e-28; Coef_CCDtoSKY[0][37] = 2.906640e-32; Coef_CCDtoSKY[0][38] = -6.457650e-37; Coef_CCDtoSKY[0][39] = -1.483030e-41; Coef_CCDtoSKY[0][40] = -7.042120e-21; Coef_CCDtoSKY[0][41] = 6.915150e-27; Coef_CCDtoSKY[0][42] = -3.368000e-28; Coef_CCDtoSKY[0][43] = -3.364750e-35; Coef_CCDtoSKY[0][44] = 6.149490e-37; Coef_CCDtoSKY[0][45] = 5.435520e-28; Coef_CCDtoSKY[0][46] = 3.293190e-32; Coef_CCDtoSKY[0][47] = -1.319440e-36; Coef_CCDtoSKY[0][48] = -1.201820e-40; Coef_CCDtoSKY[0][49] = -1.076960e-28; Coef_CCDtoSKY[0][50] = -9.526290e-36; Coef_CCDtoSKY[0][51] = 4.074750e-37; Coef_CCDtoSKY[0][52] = -9.868790e-37; Coef_CCDtoSKY[0][53] = -6.712580e-41; Coef_CCDtoSKY[0][54] = 9.383580e-38; Coef_CCDtoSKY[1][ 0]= -3.311230e-04; Coef_CCDtoSKY[1][ 1]= 1.000010e+00; Coef_CCDtoSKY[1][ 2]= -7.408420e-10; Coef_CCDtoSKY[1][ 3]= -1.055350e-10; Coef_CCDtoSKY[1][ 4]= 2.128720e-18; Coef_CCDtoSKY[1][ 5]= -7.057510e-21; Coef_CCDtoSKY[1][ 6]= 8.688840e-27; Coef_CCDtoSKY[1][ 7]= -1.076300e-28; Coef_CCDtoSKY[1][ 8]= -1.356260e-35; Coef_CCDtoSKY[1][ 9]= 9.373120e-38; Coef_CCDtoSKY[1][10] = 6.759490e-11; Coef_CCDtoSKY[1][11] = 5.298080e-14; Coef_CCDtoSKY[1][12] = -4.922510e-18; Coef_CCDtoSKY[1][13] = -3.596810e-22; Coef_CCDtoSKY[1][14] = 2.686980e-26; Coef_CCDtoSKY[1][15] = 3.679840e-31; Coef_CCDtoSKY[1][16] = -3.592460e-35; Coef_CCDtoSKY[1][17] = -5.719250e-41; Coef_CCDtoSKY[1][18] = 2.037110e-44; Coef_CCDtoSKY[1][19] = -1.517240e-09; Coef_CCDtoSKY[1][20] = -1.055890e-10; Coef_CCDtoSKY[1][21] = 2.439740e-18; Coef_CCDtoSKY[1][22] = -1.238220e-20; Coef_CCDtoSKY[1][23] = 1.707040e-26; Coef_CCDtoSKY[1][24] = -3.369220e-28; Coef_CCDtoSKY[1][25] = -4.112550e-35; Coef_CCDtoSKY[1][26] = 4.075910e-37; Coef_CCDtoSKY[1][27] = -1.813260e-18; Coef_CCDtoSKY[1][28] = -2.094840e-21; Coef_CCDtoSKY[1][29] = 1.933810e-25; Coef_CCDtoSKY[1][30] = 1.145170e-29; Coef_CCDtoSKY[1][31] = -8.633020e-34; Coef_CCDtoSKY[1][32] = -6.852500e-39; Coef_CCDtoSKY[1][33] = 5.699830e-43; Coef_CCDtoSKY[1][34] = 2.138770e-19; Coef_CCDtoSKY[1][35] = -6.206460e-21; Coef_CCDtoSKY[1][36] = 9.220790e-27; Coef_CCDtoSKY[1][37] = -3.376400e-28; Coef_CCDtoSKY[1][38] = -4.071180e-35; Coef_CCDtoSKY[1][39] = 6.154720e-37; Coef_CCDtoSKY[1][40] = 4.423710e-28; Coef_CCDtoSKY[1][41] = 1.872410e-29; Coef_CCDtoSKY[1][42] = -1.626040e-33; Coef_CCDtoSKY[1][43] = -6.125880e-38; Coef_CCDtoSKY[1][44] = 4.752610e-42; Coef_CCDtoSKY[1][45] = 6.366340e-28; Coef_CCDtoSKY[1][46] = -1.121860e-28; Coef_CCDtoSKY[1][47] = -1.756750e-35; Coef_CCDtoSKY[1][48] = 4.096850e-37; Coef_CCDtoSKY[1][49] = 1.076810e-34; Coef_CCDtoSKY[1][50] = -4.428060e-38; Coef_CCDtoSKY[1][51] = 3.249460e-42; Coef_CCDtoSKY[1][52] = -1.596630e-36; Coef_CCDtoSKY[1][53] = 1.016910e-37; Coef_CCDtoSKY[1][54] = -3.040980e-43; }else if(EL==50){ Coef_CCDtoSKY[0][ 0]= -1.599210e-05; Coef_CCDtoSKY[0][ 1]= 8.243510e-10; Coef_CCDtoSKY[0][ 2]= 1.194280e-13; Coef_CCDtoSKY[0][ 3]= -1.469910e-18; Coef_CCDtoSKY[0][ 4]= -2.491130e-22; Coef_CCDtoSKY[0][ 5]= -1.273500e-26; Coef_CCDtoSKY[0][ 6]= 2.066920e-31; Coef_CCDtoSKY[0][ 7]= 6.444970e-35; Coef_CCDtoSKY[0][ 8]= -3.179790e-40; Coef_CCDtoSKY[0][ 9]= -1.608250e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 6.336530e-10; Coef_CCDtoSKY[0][12] = -1.055920e-10; Coef_CCDtoSKY[0][13] = 1.667960e-18; Coef_CCDtoSKY[0][14] = -6.183580e-21; Coef_CCDtoSKY[0][15] = 6.470750e-27; Coef_CCDtoSKY[0][16] = -1.123230e-28; Coef_CCDtoSKY[0][17] = -9.997120e-36; Coef_CCDtoSKY[0][18] = 1.019330e-37; Coef_CCDtoSKY[0][19] = 2.416640e-12; Coef_CCDtoSKY[0][20] = -1.424200e-16; Coef_CCDtoSKY[0][21] = -1.554090e-20; Coef_CCDtoSKY[0][22] = 3.913140e-25; Coef_CCDtoSKY[0][23] = 2.269790e-29; Coef_CCDtoSKY[0][24] = 2.159860e-34; Coef_CCDtoSKY[0][25] = -1.049090e-39; Coef_CCDtoSKY[0][26] = 1.723250e-42; Coef_CCDtoSKY[0][27] = -1.055370e-10; Coef_CCDtoSKY[0][28] = 1.642040e-18; Coef_CCDtoSKY[0][29] = -1.235390e-20; Coef_CCDtoSKY[0][30] = 1.274350e-26; Coef_CCDtoSKY[0][31] = -3.377500e-28; Coef_CCDtoSKY[0][32] = -2.742620e-35; Coef_CCDtoSKY[0][33] = 4.100070e-37; Coef_CCDtoSKY[0][34] = -5.223160e-20; Coef_CCDtoSKY[0][35] = 3.327090e-24; Coef_CCDtoSKY[0][36] = 2.611270e-28; Coef_CCDtoSKY[0][37] = -8.950820e-33; Coef_CCDtoSKY[0][38] = -2.260360e-37; Coef_CCDtoSKY[0][39] = -9.818060e-42; Coef_CCDtoSKY[0][40] = -7.030900e-21; Coef_CCDtoSKY[0][41] = 6.653440e-27; Coef_CCDtoSKY[0][42] = -3.371510e-28; Coef_CCDtoSKY[0][43] = -3.040650e-35; Coef_CCDtoSKY[0][44] = 6.155880e-37; Coef_CCDtoSKY[0][45] = 3.336210e-28; Coef_CCDtoSKY[0][46] = -2.176790e-32; Coef_CCDtoSKY[0][47] = -9.858050e-37; Coef_CCDtoSKY[0][48] = 5.139260e-41; Coef_CCDtoSKY[0][49] = -1.077290e-28; Coef_CCDtoSKY[0][50] = -1.001300e-35; Coef_CCDtoSKY[0][51] = 4.082310e-37; Coef_CCDtoSKY[0][52] = -6.118940e-37; Coef_CCDtoSKY[0][53] = 3.640850e-41; Coef_CCDtoSKY[0][54] = 9.386570e-38; Coef_CCDtoSKY[1][ 0]= -1.960410e-04; Coef_CCDtoSKY[1][ 1]= 1.000010e+00; Coef_CCDtoSKY[1][ 2]= -6.187200e-10; Coef_CCDtoSKY[1][ 3]= -1.055350e-10; Coef_CCDtoSKY[1][ 4]= 1.745270e-18; Coef_CCDtoSKY[1][ 5]= -7.050030e-21; Coef_CCDtoSKY[1][ 6]= 7.562580e-27; Coef_CCDtoSKY[1][ 7]= -1.076520e-28; Coef_CCDtoSKY[1][ 8]= -1.206460e-35; Coef_CCDtoSKY[1][ 9]= 9.374410e-38; Coef_CCDtoSKY[1][10] = 5.198970e-10; Coef_CCDtoSKY[1][11] = -4.533500e-14; Coef_CCDtoSKY[1][12] = 2.539750e-18; Coef_CCDtoSKY[1][13] = 2.727170e-22; Coef_CCDtoSKY[1][14] = -2.374630e-26; Coef_CCDtoSKY[1][15] = -1.323150e-33; Coef_CCDtoSKY[1][16] = 7.468310e-35; Coef_CCDtoSKY[1][17] = -6.620330e-40; Coef_CCDtoSKY[1][18] = 7.863080e-45; Coef_CCDtoSKY[1][19] = -1.268790e-09; Coef_CCDtoSKY[1][20] = -1.055900e-10; Coef_CCDtoSKY[1][21] = 2.286320e-18; Coef_CCDtoSKY[1][22] = -1.237740e-20; Coef_CCDtoSKY[1][23] = 1.261670e-26; Coef_CCDtoSKY[1][24] = -3.370340e-28; Coef_CCDtoSKY[1][25] = -3.096510e-35; Coef_CCDtoSKY[1][26] = 4.080820e-37; Coef_CCDtoSKY[1][27] = -3.299470e-17; Coef_CCDtoSKY[1][28] = 1.806180e-21; Coef_CCDtoSKY[1][29] = -8.068890e-26; Coef_CCDtoSKY[1][30] = -9.912000e-30; Coef_CCDtoSKY[1][31] = 4.880940e-34; Coef_CCDtoSKY[1][32] = 5.643960e-39; Coef_CCDtoSKY[1][33] = -1.760620e-42; Coef_CCDtoSKY[1][34] = 2.240250e-19; Coef_CCDtoSKY[1][35] = -6.196470e-21; Coef_CCDtoSKY[1][36] = 5.316380e-27; Coef_CCDtoSKY[1][37] = -3.376360e-28; Coef_CCDtoSKY[1][38] = -2.728360e-35; Coef_CCDtoSKY[1][39] = 6.151310e-37; Coef_CCDtoSKY[1][40] = 5.206630e-25; Coef_CCDtoSKY[1][41] = -1.566670e-29; Coef_CCDtoSKY[1][42] = 6.712640e-34; Coef_CCDtoSKY[1][43] = 5.370890e-38; Coef_CCDtoSKY[1][44] = 7.812320e-44; Coef_CCDtoSKY[1][45] = 3.700360e-28; Coef_CCDtoSKY[1][46] = -1.122560e-28; Coef_CCDtoSKY[1][47] = -8.763590e-36; Coef_CCDtoSKY[1][48] = 4.100240e-37; Coef_CCDtoSKY[1][49] = -2.843100e-33; Coef_CCDtoSKY[1][50] = 3.493860e-38; Coef_CCDtoSKY[1][51] = -2.477870e-42; Coef_CCDtoSKY[1][52] = -1.196210e-36; Coef_CCDtoSKY[1][53] = 1.018060e-37; Coef_CCDtoSKY[1][54] = 4.872670e-42; }else if(EL==55){ Coef_CCDtoSKY[0][ 0]= -2.010880e-05; Coef_CCDtoSKY[0][ 1]= 1.907070e-09; Coef_CCDtoSKY[0][ 2]= 2.217430e-13; Coef_CCDtoSKY[0][ 3]= -6.776740e-18; Coef_CCDtoSKY[0][ 4]= -1.247870e-21; Coef_CCDtoSKY[0][ 5]= -5.468910e-26; Coef_CCDtoSKY[0][ 6]= 3.104090e-30; Coef_CCDtoSKY[0][ 7]= 3.192120e-34; Coef_CCDtoSKY[0][ 8]= -7.313170e-39; Coef_CCDtoSKY[0][ 9]= -6.313300e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 5.244980e-10; Coef_CCDtoSKY[0][12] = -1.055890e-10; Coef_CCDtoSKY[0][13] = 1.416100e-18; Coef_CCDtoSKY[0][14] = -6.227030e-21; Coef_CCDtoSKY[0][15] = 5.309990e-27; Coef_CCDtoSKY[0][16] = -1.120960e-28; Coef_CCDtoSKY[0][17] = -8.141780e-36; Coef_CCDtoSKY[0][18] = 1.015490e-37; Coef_CCDtoSKY[0][19] = 2.830180e-12; Coef_CCDtoSKY[0][20] = -3.362400e-16; Coef_CCDtoSKY[0][21] = -2.208890e-20; Coef_CCDtoSKY[0][22] = 1.702760e-24; Coef_CCDtoSKY[0][23] = 8.716070e-29; Coef_CCDtoSKY[0][24] = -2.383930e-34; Coef_CCDtoSKY[0][25] = 2.615230e-38; Coef_CCDtoSKY[0][26] = 2.213260e-42; Coef_CCDtoSKY[0][27] = -1.055360e-10; Coef_CCDtoSKY[0][28] = 1.490700e-18; Coef_CCDtoSKY[0][29] = -1.240220e-20; Coef_CCDtoSKY[0][30] = 9.795210e-27; Coef_CCDtoSKY[0][31] = -3.373930e-28; Coef_CCDtoSKY[0][32] = -2.198670e-35; Coef_CCDtoSKY[0][33] = 4.093090e-37; Coef_CCDtoSKY[0][34] = -5.748760e-20; Coef_CCDtoSKY[0][35] = 8.023440e-24; Coef_CCDtoSKY[0][36] = 2.744890e-28; Coef_CCDtoSKY[0][37] = -3.510410e-32; Coef_CCDtoSKY[0][38] = -9.656580e-37; Coef_CCDtoSKY[0][39] = -6.816530e-42; Coef_CCDtoSKY[0][40] = -7.033390e-21; Coef_CCDtoSKY[0][41] = 4.659520e-27; Coef_CCDtoSKY[0][42] = -3.368420e-28; Coef_CCDtoSKY[0][43] = -2.182710e-35; Coef_CCDtoSKY[0][44] = 6.144660e-37; Coef_CCDtoSKY[0][45] = 3.480650e-28; Coef_CCDtoSKY[0][46] = -5.545560e-32; Coef_CCDtoSKY[0][47] = -5.541860e-37; Coef_CCDtoSKY[0][48] = 1.691940e-40; Coef_CCDtoSKY[0][49] = -1.077580e-28; Coef_CCDtoSKY[0][50] = -6.450690e-36; Coef_CCDtoSKY[0][51] = 4.077410e-37; Coef_CCDtoSKY[0][52] = -6.173370e-37; Coef_CCDtoSKY[0][53] = 1.068300e-40; Coef_CCDtoSKY[0][54] = 9.396480e-38; Coef_CCDtoSKY[1][ 0]= -1.830480e-04; Coef_CCDtoSKY[1][ 1]= 1.000010e+00; Coef_CCDtoSKY[1][ 2]= -5.163460e-10; Coef_CCDtoSKY[1][ 3]= -1.055340e-10; Coef_CCDtoSKY[1][ 4]= 1.545510e-18; Coef_CCDtoSKY[1][ 5]= -7.055310e-21; Coef_CCDtoSKY[1][ 6]= 5.594510e-27; Coef_CCDtoSKY[1][ 7]= -1.076340e-28; Coef_CCDtoSKY[1][ 8]= -8.632010e-36; Coef_CCDtoSKY[1][ 9]= 9.373980e-38; Coef_CCDtoSKY[1][10] = -7.406750e-10; Coef_CCDtoSKY[1][11] = -3.659180e-14; Coef_CCDtoSKY[1][12] = 7.938520e-18; Coef_CCDtoSKY[1][13] = 5.279530e-22; Coef_CCDtoSKY[1][14] = -1.215750e-26; Coef_CCDtoSKY[1][15] = -1.633200e-30; Coef_CCDtoSKY[1][16] = -5.042270e-35; Coef_CCDtoSKY[1][17] = 4.019800e-40; Coef_CCDtoSKY[1][18] = -3.034110e-46; Coef_CCDtoSKY[1][19] = -1.052830e-09; Coef_CCDtoSKY[1][20] = -1.055890e-10; Coef_CCDtoSKY[1][21] = 1.746900e-18; Coef_CCDtoSKY[1][22] = -1.240730e-20; Coef_CCDtoSKY[1][23] = 1.092780e-26; Coef_CCDtoSKY[1][24] = -3.367770e-28; Coef_CCDtoSKY[1][25] = -2.467820e-35; Coef_CCDtoSKY[1][26] = 4.074860e-37; Coef_CCDtoSKY[1][27] = 4.319790e-17; Coef_CCDtoSKY[1][28] = 1.137640e-21; Coef_CCDtoSKY[1][29] = -4.024940e-25; Coef_CCDtoSKY[1][30] = -1.189540e-29; Coef_CCDtoSKY[1][31] = 8.160990e-34; Coef_CCDtoSKY[1][32] = 2.946620e-38; Coef_CCDtoSKY[1][33] = 1.134460e-42; Coef_CCDtoSKY[1][34] = 1.331780e-19; Coef_CCDtoSKY[1][35] = -6.204360e-21; Coef_CCDtoSKY[1][36] = 6.276930e-27; Coef_CCDtoSKY[1][37] = -3.373960e-28; Coef_CCDtoSKY[1][38] = -2.806340e-35; Coef_CCDtoSKY[1][39] = 6.143670e-37; Coef_CCDtoSKY[1][40] = -6.366680e-25; Coef_CCDtoSKY[1][41] = -8.909740e-30; Coef_CCDtoSKY[1][42] = 4.298290e-33; Coef_CCDtoSKY[1][43] = 3.312390e-38; Coef_CCDtoSKY[1][44] = -7.715580e-42; Coef_CCDtoSKY[1][45] = 5.302780e-28; Coef_CCDtoSKY[1][46] = -1.122340e-28; Coef_CCDtoSKY[1][47] = -1.178700e-35; Coef_CCDtoSKY[1][48] = 4.094150e-37; Coef_CCDtoSKY[1][49] = 3.289400e-33; Coef_CCDtoSKY[1][50] = 2.436530e-38; Coef_CCDtoSKY[1][51] = -1.100210e-41; Coef_CCDtoSKY[1][52] = -1.274730e-36; Coef_CCDtoSKY[1][53] = 1.018080e-37; Coef_CCDtoSKY[1][54] = -5.393710e-42; }else if(EL==60){ Coef_CCDtoSKY[0][ 0]= -2.129860e-05; Coef_CCDtoSKY[0][ 1]= -2.819700e-10; Coef_CCDtoSKY[0][ 2]= -1.533690e-13; Coef_CCDtoSKY[0][ 3]= -2.126310e-19; Coef_CCDtoSKY[0][ 4]= 4.644770e-22; Coef_CCDtoSKY[0][ 5]= 1.657380e-26; Coef_CCDtoSKY[0][ 6]= 1.030950e-30; Coef_CCDtoSKY[0][ 7]= -7.249060e-35; Coef_CCDtoSKY[0][ 8]= -1.853480e-39; Coef_CCDtoSKY[0][ 9]= 1.348190e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 4.256840e-10; Coef_CCDtoSKY[0][12] = -1.055900e-10; Coef_CCDtoSKY[0][13] = 1.308860e-18; Coef_CCDtoSKY[0][14] = -6.195210e-21; Coef_CCDtoSKY[0][15] = 3.499760e-27; Coef_CCDtoSKY[0][16] = -1.122540e-28; Coef_CCDtoSKY[0][17] = -4.938950e-36; Coef_CCDtoSKY[0][18] = 1.017910e-37; Coef_CCDtoSKY[0][19] = 3.536350e-12; Coef_CCDtoSKY[0][20] = 5.386880e-17; Coef_CCDtoSKY[0][21] = 1.748230e-20; Coef_CCDtoSKY[0][22] = -1.591140e-25; Coef_CCDtoSKY[0][23] = -6.771620e-29; Coef_CCDtoSKY[0][24] = -4.017820e-34; Coef_CCDtoSKY[0][25] = -6.740880e-39; Coef_CCDtoSKY[0][26] = -2.623710e-43; Coef_CCDtoSKY[0][27] = -1.055360e-10; Coef_CCDtoSKY[0][28] = 1.461820e-18; Coef_CCDtoSKY[0][29] = -1.237180e-20; Coef_CCDtoSKY[0][30] = 6.070600e-27; Coef_CCDtoSKY[0][31] = -3.377550e-28; Coef_CCDtoSKY[0][32] = -1.372910e-35; Coef_CCDtoSKY[0][33] = 4.102420e-37; Coef_CCDtoSKY[0][34] = -8.243210e-20; Coef_CCDtoSKY[0][35] = -1.340900e-24; Coef_CCDtoSKY[0][36] = -2.138010e-28; Coef_CCDtoSKY[0][37] = 4.317510e-33; Coef_CCDtoSKY[0][38] = 7.099630e-37; Coef_CCDtoSKY[0][39] = 5.618920e-42; Coef_CCDtoSKY[0][40] = -7.029230e-21; Coef_CCDtoSKY[0][41] = 2.507230e-27; Coef_CCDtoSKY[0][42] = -3.370060e-28; Coef_CCDtoSKY[0][43] = -1.264090e-35; Coef_CCDtoSKY[0][44] = 6.154240e-37; Coef_CCDtoSKY[0][45] = 5.567690e-28; Coef_CCDtoSKY[0][46] = 9.466690e-33; Coef_CCDtoSKY[0][47] = 4.295920e-37; Coef_CCDtoSKY[0][48] = -2.540640e-41; Coef_CCDtoSKY[0][49] = -1.077710e-28; Coef_CCDtoSKY[0][50] = -3.341230e-36; Coef_CCDtoSKY[0][51] = 4.079770e-37; Coef_CCDtoSKY[0][52] = -1.054500e-36; Coef_CCDtoSKY[0][53] = -1.807560e-41; Coef_CCDtoSKY[0][54] = 9.397340e-38; Coef_CCDtoSKY[1][ 0]= -8.203230e-05; Coef_CCDtoSKY[1][ 1]= 1.000000e+00; Coef_CCDtoSKY[1][ 2]= -4.342650e-10; Coef_CCDtoSKY[1][ 3]= -1.055350e-10; Coef_CCDtoSKY[1][ 4]= 1.381540e-18; Coef_CCDtoSKY[1][ 5]= -7.046280e-21; Coef_CCDtoSKY[1][ 6]= 4.182720e-27; Coef_CCDtoSKY[1][ 7]= -1.076700e-28; Coef_CCDtoSKY[1][ 8]= -6.548260e-36; Coef_CCDtoSKY[1][ 9]= 9.378480e-38; Coef_CCDtoSKY[1][10] = -3.753060e-11; Coef_CCDtoSKY[1][11] = 4.690050e-14; Coef_CCDtoSKY[1][12] = 1.611720e-18; Coef_CCDtoSKY[1][13] = -2.504770e-22; Coef_CCDtoSKY[1][14] = -3.611080e-26; Coef_CCDtoSKY[1][15] = 1.443610e-31; Coef_CCDtoSKY[1][16] = 4.578610e-35; Coef_CCDtoSKY[1][17] = -4.858100e-40; Coef_CCDtoSKY[1][18] = -1.443830e-44; Coef_CCDtoSKY[1][19] = -8.713480e-10; Coef_CCDtoSKY[1][20] = -1.055890e-10; Coef_CCDtoSKY[1][21] = 1.505560e-18; Coef_CCDtoSKY[1][22] = -1.236050e-20; Coef_CCDtoSKY[1][23] = 8.492970e-27; Coef_CCDtoSKY[1][24] = -3.371660e-28; Coef_CCDtoSKY[1][25] = -1.932680e-35; Coef_CCDtoSKY[1][26] = 4.082680e-37; Coef_CCDtoSKY[1][27] = -1.030330e-18; Coef_CCDtoSKY[1][28] = -2.033420e-21; Coef_CCDtoSKY[1][29] = 3.531420e-26; Coef_CCDtoSKY[1][30] = 8.540800e-30; Coef_CCDtoSKY[1][31] = 1.166690e-33; Coef_CCDtoSKY[1][32] = 1.280460e-39; Coef_CCDtoSKY[1][33] = -8.367450e-43; Coef_CCDtoSKY[1][34] = 1.294100e-19; Coef_CCDtoSKY[1][35] = -6.207950e-21; Coef_CCDtoSKY[1][36] = 4.991680e-27; Coef_CCDtoSKY[1][37] = -3.377490e-28; Coef_CCDtoSKY[1][38] = -2.265540e-35; Coef_CCDtoSKY[1][39] = 6.160900e-37; Coef_CCDtoSKY[1][40] = 5.346530e-26; Coef_CCDtoSKY[1][41] = 2.018370e-29; Coef_CCDtoSKY[1][42] = -1.729030e-33; Coef_CCDtoSKY[1][43] = -5.439880e-38; Coef_CCDtoSKY[1][44] = -6.275720e-42; Coef_CCDtoSKY[1][45] = 3.257870e-28; Coef_CCDtoSKY[1][46] = -1.121720e-28; Coef_CCDtoSKY[1][47] = -9.335790e-36; Coef_CCDtoSKY[1][48] = 4.099360e-37; Coef_CCDtoSKY[1][49] = -4.184950e-34; Coef_CCDtoSKY[1][50] = -5.281960e-38; Coef_CCDtoSKY[1][51] = 9.296320e-42; Coef_CCDtoSKY[1][52] = -8.515630e-37; Coef_CCDtoSKY[1][53] = 1.016580e-37; Coef_CCDtoSKY[1][54] = 7.904800e-43; }else if(EL==65){ Coef_CCDtoSKY[0][ 0]= -1.158220e-05; Coef_CCDtoSKY[0][ 1]= -1.453420e-09; Coef_CCDtoSKY[0][ 2]= -1.266090e-14; Coef_CCDtoSKY[0][ 3]= 1.591090e-17; Coef_CCDtoSKY[0][ 4]= 5.737350e-23; Coef_CCDtoSKY[0][ 5]= -1.579350e-26; Coef_CCDtoSKY[0][ 6]= 4.388200e-31; Coef_CCDtoSKY[0][ 7]= -1.001430e-34; Coef_CCDtoSKY[0][ 8]= -9.919030e-40; Coef_CCDtoSKY[0][ 9]= 1.611670e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 3.489860e-10; Coef_CCDtoSKY[0][12] = -1.055880e-10; Coef_CCDtoSKY[0][13] = 1.052590e-18; Coef_CCDtoSKY[0][14] = -6.215700e-21; Coef_CCDtoSKY[0][15] = 2.828920e-27; Coef_CCDtoSKY[0][16] = -1.121970e-28; Coef_CCDtoSKY[0][17] = -3.938350e-36; Coef_CCDtoSKY[0][18] = 1.017710e-37; Coef_CCDtoSKY[0][19] = 1.843960e-12; Coef_CCDtoSKY[0][20] = 2.200180e-16; Coef_CCDtoSKY[0][21] = 9.996410e-22; Coef_CCDtoSKY[0][22] = -2.339560e-24; Coef_CCDtoSKY[0][23] = -1.333090e-29; Coef_CCDtoSKY[0][24] = 4.125680e-33; Coef_CCDtoSKY[0][25] = 2.947580e-39; Coef_CCDtoSKY[0][26] = 6.939210e-43; Coef_CCDtoSKY[0][27] = -1.055320e-10; Coef_CCDtoSKY[0][28] = 1.021870e-18; Coef_CCDtoSKY[0][29] = -1.245700e-20; Coef_CCDtoSKY[0][30] = 5.604990e-27; Coef_CCDtoSKY[0][31] = -3.371490e-28; Coef_CCDtoSKY[0][32] = -1.221960e-35; Coef_CCDtoSKY[0][33] = 4.089250e-37; Coef_CCDtoSKY[0][34] = -4.160010e-20; Coef_CCDtoSKY[0][35] = -4.638510e-24; Coef_CCDtoSKY[0][36] = 5.376010e-30; Coef_CCDtoSKY[0][37] = 3.854050e-32; Coef_CCDtoSKY[0][38] = 1.268040e-37; Coef_CCDtoSKY[0][39] = -4.907450e-41; Coef_CCDtoSKY[0][40] = -7.079860e-21; Coef_CCDtoSKY[0][41] = 3.317410e-27; Coef_CCDtoSKY[0][42] = -3.364610e-28; Coef_CCDtoSKY[0][43] = -1.145620e-35; Coef_CCDtoSKY[0][44] = 6.138810e-37; Coef_CCDtoSKY[0][45] = 2.743720e-28; Coef_CCDtoSKY[0][46] = 2.944650e-32; Coef_CCDtoSKY[0][47] = -1.165540e-37; Coef_CCDtoSKY[0][48] = -1.310640e-40; Coef_CCDtoSKY[0][49] = -1.075230e-28; Coef_CCDtoSKY[0][50] = -5.660930e-36; Coef_CCDtoSKY[0][51] = 4.068660e-37; Coef_CCDtoSKY[0][52] = -5.126950e-37; Coef_CCDtoSKY[0][53] = -5.790880e-41; Coef_CCDtoSKY[0][54] = 9.357640e-38; Coef_CCDtoSKY[1][ 0]= -1.306120e-04; Coef_CCDtoSKY[1][ 1]= 1.000000e+00; Coef_CCDtoSKY[1][ 2]= -3.438450e-10; Coef_CCDtoSKY[1][ 3]= -1.055350e-10; Coef_CCDtoSKY[1][ 4]= 9.468750e-19; Coef_CCDtoSKY[1][ 5]= -7.044170e-21; Coef_CCDtoSKY[1][ 6]= 4.432160e-27; Coef_CCDtoSKY[1][ 7]= -1.076780e-28; Coef_CCDtoSKY[1][ 8]= -7.149010e-36; Coef_CCDtoSKY[1][ 9]= 9.379650e-38; Coef_CCDtoSKY[1][10] = 6.234380e-11; Coef_CCDtoSKY[1][11] = 1.813660e-14; Coef_CCDtoSKY[1][12] = -7.864700e-18; Coef_CCDtoSKY[1][13] = 4.855790e-23; Coef_CCDtoSKY[1][14] = 6.252670e-27; Coef_CCDtoSKY[1][15] = -6.270670e-31; Coef_CCDtoSKY[1][16] = 3.859490e-36; Coef_CCDtoSKY[1][17] = -1.110010e-40; Coef_CCDtoSKY[1][18] = 1.844690e-44; Coef_CCDtoSKY[1][19] = -7.081790e-10; Coef_CCDtoSKY[1][20] = -1.055890e-10; Coef_CCDtoSKY[1][21] = 1.298690e-18; Coef_CCDtoSKY[1][22] = -1.236780e-20; Coef_CCDtoSKY[1][23] = 6.277540e-27; Coef_CCDtoSKY[1][24] = -3.370470e-28; Coef_CCDtoSKY[1][25] = -1.428310e-35; Coef_CCDtoSKY[1][26] = 4.080320e-37; Coef_CCDtoSKY[1][27] = -2.971010e-18; Coef_CCDtoSKY[1][28] = -9.650050e-22; Coef_CCDtoSKY[1][29] = 4.356530e-25; Coef_CCDtoSKY[1][30] = 1.095150e-30; Coef_CCDtoSKY[1][31] = -2.907240e-34; Coef_CCDtoSKY[1][32] = 1.360970e-38; Coef_CCDtoSKY[1][33] = -2.723250e-43; Coef_CCDtoSKY[1][34] = 1.193610e-19; Coef_CCDtoSKY[1][35] = -6.212420e-21; Coef_CCDtoSKY[1][36] = 2.990940e-27; Coef_CCDtoSKY[1][37] = -3.376500e-28; Coef_CCDtoSKY[1][38] = -1.380530e-35; Coef_CCDtoSKY[1][39] = 6.152500e-37; Coef_CCDtoSKY[1][40] = 3.169200e-26; Coef_CCDtoSKY[1][41] = 1.035170e-29; Coef_CCDtoSKY[1][42] = -5.407690e-33; Coef_CCDtoSKY[1][43] = -2.536820e-38; Coef_CCDtoSKY[1][44] = 2.521470e-42; Coef_CCDtoSKY[1][45] = 2.152180e-28; Coef_CCDtoSKY[1][46] = -1.121400e-28; Coef_CCDtoSKY[1][47] = -5.149410e-36; Coef_CCDtoSKY[1][48] = 4.098070e-37; Coef_CCDtoSKY[1][49] = -8.454610e-35; Coef_CCDtoSKY[1][50] = -2.571540e-38; Coef_CCDtoSKY[1][51] = 1.760120e-41; Coef_CCDtoSKY[1][52] = -6.510310e-37; Coef_CCDtoSKY[1][53] = 1.015890e-37; Coef_CCDtoSKY[1][54] = -2.752720e-44; }else if(EL==70){ Coef_CCDtoSKY[0][ 0]= -1.061130e-05; Coef_CCDtoSKY[0][ 1]= 9.879510e-11; Coef_CCDtoSKY[0][ 2]= -1.005630e-14; Coef_CCDtoSKY[0][ 3]= 7.076870e-18; Coef_CCDtoSKY[0][ 4]= -4.270090e-22; Coef_CCDtoSKY[0][ 5]= -3.215680e-26; Coef_CCDtoSKY[0][ 6]= 2.080060e-30; Coef_CCDtoSKY[0][ 7]= 5.272120e-35; Coef_CCDtoSKY[0][ 8]= -2.899380e-39; Coef_CCDtoSKY[0][ 9]= -1.165170e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 2.788280e-10; Coef_CCDtoSKY[0][12] = -1.055880e-10; Coef_CCDtoSKY[0][13] = 7.613930e-19; Coef_CCDtoSKY[0][14] = -6.207900e-21; Coef_CCDtoSKY[0][15] = 2.546030e-27; Coef_CCDtoSKY[0][16] = -1.122050e-28; Coef_CCDtoSKY[0][17] = -3.466520e-36; Coef_CCDtoSKY[0][18] = 1.017110e-37; Coef_CCDtoSKY[0][19] = 1.635440e-12; Coef_CCDtoSKY[0][20] = -3.735770e-17; Coef_CCDtoSKY[0][21] = 4.602640e-21; Coef_CCDtoSKY[0][22] = -7.372260e-25; Coef_CCDtoSKY[0][23] = 1.113760e-29; Coef_CCDtoSKY[0][24] = 2.770020e-33; Coef_CCDtoSKY[0][25] = -4.130790e-38; Coef_CCDtoSKY[0][26] = 8.493000e-43; Coef_CCDtoSKY[0][27] = -1.055330e-10; Coef_CCDtoSKY[0][28] = 6.868950e-19; Coef_CCDtoSKY[0][29] = -1.244180e-20; Coef_CCDtoSKY[0][30] = 6.097510e-27; Coef_CCDtoSKY[0][31] = -3.374320e-28; Coef_CCDtoSKY[0][32] = -1.490930e-35; Coef_CCDtoSKY[0][33] = 4.098450e-37; Coef_CCDtoSKY[0][34] = -3.634200e-20; Coef_CCDtoSKY[0][35] = 1.194040e-24; Coef_CCDtoSKY[0][36] = -9.986720e-29; Coef_CCDtoSKY[0][37] = 8.441580e-33; Coef_CCDtoSKY[0][38] = 1.937710e-38; Coef_CCDtoSKY[0][39] = -3.461170e-41; Coef_CCDtoSKY[0][40] = -7.068930e-21; Coef_CCDtoSKY[0][41] = 3.361360e-27; Coef_CCDtoSKY[0][42] = -3.363760e-28; Coef_CCDtoSKY[0][43] = -1.413620e-35; Coef_CCDtoSKY[0][44] = 6.139430e-37; Coef_CCDtoSKY[0][45] = 2.382430e-28; Coef_CCDtoSKY[0][46] = -8.784020e-33; Coef_CCDtoSKY[0][47] = 4.032590e-37; Coef_CCDtoSKY[0][48] = -5.445120e-42; Coef_CCDtoSKY[0][49] = -1.075670e-28; Coef_CCDtoSKY[0][50] = -5.887280e-36; Coef_CCDtoSKY[0][51] = 4.065290e-37; Coef_CCDtoSKY[0][52] = -4.440200e-37; Coef_CCDtoSKY[0][53] = 1.238960e-41; Coef_CCDtoSKY[0][54] = 9.364170e-38; Coef_CCDtoSKY[1][ 0]= -7.275360e-05; Coef_CCDtoSKY[1][ 1]= 1.000000e+00; Coef_CCDtoSKY[1][ 2]= -2.745560e-10; Coef_CCDtoSKY[1][ 3]= -1.055370e-10; Coef_CCDtoSKY[1][ 4]= 8.627980e-19; Coef_CCDtoSKY[1][ 5]= -7.018680e-21; Coef_CCDtoSKY[1][ 6]= 2.757670e-27; Coef_CCDtoSKY[1][ 7]= -1.078140e-28; Coef_CCDtoSKY[1][ 8]= -4.336990e-36; Coef_CCDtoSKY[1][ 9]= 9.403260e-38; Coef_CCDtoSKY[1][10] = -1.219910e-10; Coef_CCDtoSKY[1][11] = -8.763470e-14; Coef_CCDtoSKY[1][12] = 1.357300e-17; Coef_CCDtoSKY[1][13] = 5.705330e-22; Coef_CCDtoSKY[1][14] = -7.106530e-26; Coef_CCDtoSKY[1][15] = -5.022780e-31; Coef_CCDtoSKY[1][16] = 1.011460e-36; Coef_CCDtoSKY[1][17] = -4.945990e-41; Coef_CCDtoSKY[1][18] = 7.733780e-44; Coef_CCDtoSKY[1][19] = -5.574280e-10; Coef_CCDtoSKY[1][20] = -1.055900e-10; Coef_CCDtoSKY[1][21] = 9.737460e-19; Coef_CCDtoSKY[1][22] = -1.234860e-20; Coef_CCDtoSKY[1][23] = 4.635870e-27; Coef_CCDtoSKY[1][24] = -3.371770e-28; Coef_CCDtoSKY[1][25] = -9.733920e-36; Coef_CCDtoSKY[1][26] = 4.084210e-37; Coef_CCDtoSKY[1][27] = -9.393000e-19; Coef_CCDtoSKY[1][28] = 3.523910e-21; Coef_CCDtoSKY[1][29] = -5.196520e-25; Coef_CCDtoSKY[1][30] = -1.860280e-29; Coef_CCDtoSKY[1][31] = 2.861590e-33; Coef_CCDtoSKY[1][32] = 1.048500e-38; Coef_CCDtoSKY[1][33] = -8.526560e-43; Coef_CCDtoSKY[1][34] = 8.619060e-20; Coef_CCDtoSKY[1][35] = -6.200660e-21; Coef_CCDtoSKY[1][36] = 3.368460e-27; Coef_CCDtoSKY[1][37] = -3.378050e-28; Coef_CCDtoSKY[1][38] = -1.284970e-35; Coef_CCDtoSKY[1][39] = 6.154570e-37; Coef_CCDtoSKY[1][40] = 1.204440e-25; Coef_CCDtoSKY[1][41] = -3.197900e-29; Coef_CCDtoSKY[1][42] = 3.660630e-33; Coef_CCDtoSKY[1][43] = 1.008310e-37; Coef_CCDtoSKY[1][44] = -1.773260e-41; Coef_CCDtoSKY[1][45] = 1.864510e-28; Coef_CCDtoSKY[1][46] = -1.122030e-28; Coef_CCDtoSKY[1][47] = -6.932290e-36; Coef_CCDtoSKY[1][48] = 4.103060e-37; Coef_CCDtoSKY[1][49] = -1.077680e-33; Coef_CCDtoSKY[1][50] = 7.677870e-38; Coef_CCDtoSKY[1][51] = -3.546340e-42; Coef_CCDtoSKY[1][52] = -4.889690e-37; Coef_CCDtoSKY[1][53] = 1.016870e-37; Coef_CCDtoSKY[1][54] = 2.265810e-42; }else if(EL==75){ Coef_CCDtoSKY[0][ 0]= -4.380370e-06; Coef_CCDtoSKY[0][ 1]= -1.995040e-10; Coef_CCDtoSKY[0][ 2]= -1.817160e-13; Coef_CCDtoSKY[0][ 3]= 5.617380e-18; Coef_CCDtoSKY[0][ 4]= 2.964630e-22; Coef_CCDtoSKY[0][ 5]= -4.136840e-26; Coef_CCDtoSKY[0][ 6]= 1.251490e-30; Coef_CCDtoSKY[0][ 7]= 1.350210e-34; Coef_CCDtoSKY[0][ 8]= -1.833610e-39; Coef_CCDtoSKY[0][ 9]= -2.459310e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 2.007060e-10; Coef_CCDtoSKY[0][12] = -1.055900e-10; Coef_CCDtoSKY[0][13] = 6.600740e-19; Coef_CCDtoSKY[0][14] = -6.189660e-21; Coef_CCDtoSKY[0][15] = 1.620700e-27; Coef_CCDtoSKY[0][16] = -1.122810e-28; Coef_CCDtoSKY[0][17] = -2.491050e-36; Coef_CCDtoSKY[0][18] = 1.018330e-37; Coef_CCDtoSKY[0][19] = 8.616620e-13; Coef_CCDtoSKY[0][20] = 1.040290e-17; Coef_CCDtoSKY[0][21] = 2.342430e-20; Coef_CCDtoSKY[0][22] = -4.253810e-25; Coef_CCDtoSKY[0][23] = -5.310430e-29; Coef_CCDtoSKY[0][24] = 2.046700e-33; Coef_CCDtoSKY[0][25] = -2.211180e-38; Coef_CCDtoSKY[0][26] = 1.731020e-43; Coef_CCDtoSKY[0][27] = -1.055360e-10; Coef_CCDtoSKY[0][28] = 7.971890e-19; Coef_CCDtoSKY[0][29] = -1.237090e-20; Coef_CCDtoSKY[0][30] = 1.561240e-27; Coef_CCDtoSKY[0][31] = -3.377410e-28; Coef_CCDtoSKY[0][32] = -3.722440e-36; Coef_CCDtoSKY[0][33] = 4.102940e-37; Coef_CCDtoSKY[0][34] = -2.256700e-20; Coef_CCDtoSKY[0][35] = 1.831590e-25; Coef_CCDtoSKY[0][36] = -3.534470e-28; Coef_CCDtoSKY[0][37] = 3.929320e-33; Coef_CCDtoSKY[0][38] = 6.098000e-37; Coef_CCDtoSKY[0][39] = -2.359170e-41; Coef_CCDtoSKY[0][40] = -7.040310e-21; Coef_CCDtoSKY[0][41] = 7.398160e-28; Coef_CCDtoSKY[0][42] = -3.369280e-28; Coef_CCDtoSKY[0][43] = -1.452360e-36; Coef_CCDtoSKY[0][44] = 6.150350e-37; Coef_CCDtoSKY[0][45] = 1.645070e-28; Coef_CCDtoSKY[0][46] = -4.046500e-33; Coef_CCDtoSKY[0][47] = 1.108910e-36; Coef_CCDtoSKY[0][48] = 4.139220e-42; Coef_CCDtoSKY[0][49] = -1.076870e-28; Coef_CCDtoSKY[0][50] = -1.198030e-36; Coef_CCDtoSKY[0][51] = 4.076940e-37; Coef_CCDtoSKY[0][52] = -3.242150e-37; Coef_CCDtoSKY[0][53] = 1.283090e-41; Coef_CCDtoSKY[0][54] = 9.379990e-38; Coef_CCDtoSKY[1][ 0]= -8.286180e-05; Coef_CCDtoSKY[1][ 1]= 1.000000e+00; Coef_CCDtoSKY[1][ 2]= -2.043080e-10; Coef_CCDtoSKY[1][ 3]= -1.055360e-10; Coef_CCDtoSKY[1][ 4]= 5.734950e-19; Coef_CCDtoSKY[1][ 5]= -7.031790e-21; Coef_CCDtoSKY[1][ 6]= 2.754290e-27; Coef_CCDtoSKY[1][ 7]= -1.077510e-28; Coef_CCDtoSKY[1][ 8]= -4.929120e-36; Coef_CCDtoSKY[1][ 9]= 9.393580e-38; Coef_CCDtoSKY[1][10] = -2.012200e-10; Coef_CCDtoSKY[1][11] = 5.215440e-14; Coef_CCDtoSKY[1][12] = 8.202230e-18; Coef_CCDtoSKY[1][13] = -1.367250e-22; Coef_CCDtoSKY[1][14] = -3.571670e-26; Coef_CCDtoSKY[1][15] = -6.255230e-31; Coef_CCDtoSKY[1][16] = 4.801170e-35; Coef_CCDtoSKY[1][17] = -8.679450e-41; Coef_CCDtoSKY[1][18] = -1.184220e-44; Coef_CCDtoSKY[1][19] = -4.173330e-10; Coef_CCDtoSKY[1][20] = -1.055890e-10; Coef_CCDtoSKY[1][21] = 8.467120e-19; Coef_CCDtoSKY[1][22] = -1.238130e-20; Coef_CCDtoSKY[1][23] = 2.317620e-27; Coef_CCDtoSKY[1][24] = -3.369240e-28; Coef_CCDtoSKY[1][25] = -4.791800e-36; Coef_CCDtoSKY[1][26] = 4.077870e-37; Coef_CCDtoSKY[1][27] = 9.621110e-18; Coef_CCDtoSKY[1][28] = -2.268360e-21; Coef_CCDtoSKY[1][29] = -3.570260e-25; Coef_CCDtoSKY[1][30] = 7.894370e-30; Coef_CCDtoSKY[1][31] = 1.145440e-33; Coef_CCDtoSKY[1][32] = 1.340330e-38; Coef_CCDtoSKY[1][33] = -9.379170e-43; Coef_CCDtoSKY[1][34] = 7.940900e-20; Coef_CCDtoSKY[1][35] = -6.207820e-21; Coef_CCDtoSKY[1][36] = 1.734180e-27; Coef_CCDtoSKY[1][37] = -3.375970e-28; Coef_CCDtoSKY[1][38] = -5.680250e-36; Coef_CCDtoSKY[1][39] = 6.149470e-37; Coef_CCDtoSKY[1][40] = -1.170020e-25; Coef_CCDtoSKY[1][41] = 2.081400e-29; Coef_CCDtoSKY[1][42] = 3.516930e-33; Coef_CCDtoSKY[1][43] = -6.731210e-38; Coef_CCDtoSKY[1][44] = -5.956520e-42; Coef_CCDtoSKY[1][45] = 7.247450e-29; Coef_CCDtoSKY[1][46] = -1.121890e-28; Coef_CCDtoSKY[1][47] = -3.616860e-36; Coef_CCDtoSKY[1][48] = 4.098260e-37; Coef_CCDtoSKY[1][49] = 5.022440e-34; Coef_CCDtoSKY[1][50] = -4.451430e-38; Coef_CCDtoSKY[1][51] = -9.308030e-42; Coef_CCDtoSKY[1][52] = -2.769340e-37; Coef_CCDtoSKY[1][53] = 1.017020e-37; Coef_CCDtoSKY[1][54] = -6.874890e-43; }else if(EL==80){ Coef_CCDtoSKY[0][ 0]= -8.991570e-06; Coef_CCDtoSKY[0][ 1]= 1.214350e-09; Coef_CCDtoSKY[0][ 2]= -6.738320e-14; Coef_CCDtoSKY[0][ 3]= -1.241740e-18; Coef_CCDtoSKY[0][ 4]= -1.185820e-22; Coef_CCDtoSKY[0][ 5]= -3.724970e-26; Coef_CCDtoSKY[0][ 6]= 1.642410e-30; Coef_CCDtoSKY[0][ 7]= 1.822850e-34; Coef_CCDtoSKY[0][ 8]= -2.682240e-39; Coef_CCDtoSKY[0][ 9]= -4.071530e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 1.360180e-10; Coef_CCDtoSKY[0][12] = -1.055900e-10; Coef_CCDtoSKY[0][13] = 3.576400e-19; Coef_CCDtoSKY[0][14] = -6.207290e-21; Coef_CCDtoSKY[0][15] = 1.653190e-27; Coef_CCDtoSKY[0][16] = -1.121870e-28; Coef_CCDtoSKY[0][17] = -2.695560e-36; Coef_CCDtoSKY[0][18] = 1.017020e-37; Coef_CCDtoSKY[0][19] = 1.453970e-12; Coef_CCDtoSKY[0][20] = -2.186110e-16; Coef_CCDtoSKY[0][21] = 1.032910e-20; Coef_CCDtoSKY[0][22] = 6.308230e-25; Coef_CCDtoSKY[0][23] = -1.440050e-29; Coef_CCDtoSKY[0][24] = 6.597510e-34; Coef_CCDtoSKY[0][25] = -2.071340e-38; Coef_CCDtoSKY[0][26] = 3.085060e-42; Coef_CCDtoSKY[0][27] = -1.055360e-10; Coef_CCDtoSKY[0][28] = 5.602120e-19; Coef_CCDtoSKY[0][29] = -1.237050e-20; Coef_CCDtoSKY[0][30] = 2.322260e-27; Coef_CCDtoSKY[0][31] = -3.375210e-28; Coef_CCDtoSKY[0][32] = -6.745100e-36; Coef_CCDtoSKY[0][33] = 4.094520e-37; Coef_CCDtoSKY[0][34] = -3.345750e-20; Coef_CCDtoSKY[0][35] = 5.280320e-24; Coef_CCDtoSKY[0][36] = -1.685160e-28; Coef_CCDtoSKY[0][37] = -1.515780e-32; Coef_CCDtoSKY[0][38] = 2.129150e-37; Coef_CCDtoSKY[0][39] = -2.060420e-41; Coef_CCDtoSKY[0][40] = -7.042060e-21; Coef_CCDtoSKY[0][41] = -1.460060e-29; Coef_CCDtoSKY[0][42] = -3.371320e-28; Coef_CCDtoSKY[0][43] = -3.598480e-36; Coef_CCDtoSKY[0][44] = 6.151350e-37; Coef_CCDtoSKY[0][45] = 2.244570e-28; Coef_CCDtoSKY[0][46] = -3.616670e-32; Coef_CCDtoSKY[0][47] = 5.734320e-37; Coef_CCDtoSKY[0][48] = 9.052570e-41; Coef_CCDtoSKY[0][49] = -1.076670e-28; Coef_CCDtoSKY[0][50] = 4.928080e-37; Coef_CCDtoSKY[0][51] = 4.083770e-37; Coef_CCDtoSKY[0][52] = -4.236470e-37; Coef_CCDtoSKY[0][53] = 6.543910e-41; Coef_CCDtoSKY[0][54] = 9.375290e-38; Coef_CCDtoSKY[1][ 0]= -6.384140e-05; Coef_CCDtoSKY[1][ 1]= 1.000000e+00; Coef_CCDtoSKY[1][ 2]= -1.352740e-10; Coef_CCDtoSKY[1][ 3]= -1.055370e-10; Coef_CCDtoSKY[1][ 4]= 3.376170e-19; Coef_CCDtoSKY[1][ 5]= -7.026540e-21; Coef_CCDtoSKY[1][ 6]= 1.994590e-27; Coef_CCDtoSKY[1][ 7]= -1.077560e-28; Coef_CCDtoSKY[1][ 8]= -3.137120e-36; Coef_CCDtoSKY[1][ 9]= 9.390610e-38; Coef_CCDtoSKY[1][10] = 3.131940e-11; Coef_CCDtoSKY[1][11] = 4.884370e-14; Coef_CCDtoSKY[1][12] = 2.208890e-18; Coef_CCDtoSKY[1][13] = -1.926930e-23; Coef_CCDtoSKY[1][14] = 3.471930e-27; Coef_CCDtoSKY[1][15] = -1.120280e-30; Coef_CCDtoSKY[1][16] = 2.754920e-35; Coef_CCDtoSKY[1][17] = 1.786330e-40; Coef_CCDtoSKY[1][18] = -3.720120e-46; Coef_CCDtoSKY[1][19] = -2.809130e-10; Coef_CCDtoSKY[1][20] = -1.055910e-10; Coef_CCDtoSKY[1][21] = 5.830220e-19; Coef_CCDtoSKY[1][22] = -1.232310e-20; Coef_CCDtoSKY[1][23] = 2.554010e-27; Coef_CCDtoSKY[1][24] = -3.373040e-28; Coef_CCDtoSKY[1][25] = -7.467310e-36; Coef_CCDtoSKY[1][26] = 4.085110e-37; Coef_CCDtoSKY[1][27] = -3.936390e-19; Coef_CCDtoSKY[1][28] = -2.352170e-21; Coef_CCDtoSKY[1][29] = -1.550480e-25; Coef_CCDtoSKY[1][30] = 5.811170e-30; Coef_CCDtoSKY[1][31] = -3.107800e-34; Coef_CCDtoSKY[1][32] = 2.109960e-38; Coef_CCDtoSKY[1][33] = -6.292960e-43; Coef_CCDtoSKY[1][34] = 5.537060e-20; Coef_CCDtoSKY[1][35] = -6.185400e-21; Coef_CCDtoSKY[1][36] = 4.365380e-28; Coef_CCDtoSKY[1][37] = -3.380230e-28; Coef_CCDtoSKY[1][38] = -4.286860e-36; Coef_CCDtoSKY[1][39] = 6.160400e-37; Coef_CCDtoSKY[1][40] = -1.388240e-26; Coef_CCDtoSKY[1][41] = 2.356040e-29; Coef_CCDtoSKY[1][42] = 2.486740e-33; Coef_CCDtoSKY[1][43] = -6.488460e-38; Coef_CCDtoSKY[1][44] = 3.306830e-42; Coef_CCDtoSKY[1][45] = 5.579430e-29; Coef_CCDtoSKY[1][46] = -1.122670e-28; Coef_CCDtoSKY[1][47] = -1.251570e-37; Coef_CCDtoSKY[1][48] = 4.107300e-37; Coef_CCDtoSKY[1][49] = 1.491310e-34; Coef_CCDtoSKY[1][50] = -5.608390e-38; Coef_CCDtoSKY[1][51] = -1.053310e-41; Coef_CCDtoSKY[1][52] = -2.394730e-37; Coef_CCDtoSKY[1][53] = 1.017740e-37; Coef_CCDtoSKY[1][54] = -2.789820e-43; }else if(EL==85){ Coef_CCDtoSKY[0][ 0]= -1.344370e-05; Coef_CCDtoSKY[0][ 1]= -1.732880e-10; Coef_CCDtoSKY[0][ 2]= 2.586470e-14; Coef_CCDtoSKY[0][ 3]= -1.258910e-17; Coef_CCDtoSKY[0][ 4]= -2.976040e-22; Coef_CCDtoSKY[0][ 5]= 1.050210e-25; Coef_CCDtoSKY[0][ 6]= 1.417200e-30; Coef_CCDtoSKY[0][ 7]= -3.655400e-34; Coef_CCDtoSKY[0][ 8]= -3.418840e-39; Coef_CCDtoSKY[0][ 9]= 7.535100e-43; Coef_CCDtoSKY[0][10] = 1.000000e+00; Coef_CCDtoSKY[0][11] = 8.105030e-11; Coef_CCDtoSKY[0][12] = -1.055900e-10; Coef_CCDtoSKY[0][13] = 7.163650e-20; Coef_CCDtoSKY[0][14] = -6.205160e-21; Coef_CCDtoSKY[0][15] = 1.255880e-27; Coef_CCDtoSKY[0][16] = -1.121700e-28; Coef_CCDtoSKY[0][17] = -1.785140e-36; Coef_CCDtoSKY[0][18] = 1.016320e-37; Coef_CCDtoSKY[0][19] = 2.050150e-12; Coef_CCDtoSKY[0][20] = 8.764360e-17; Coef_CCDtoSKY[0][21] = -1.637400e-21; Coef_CCDtoSKY[0][22] = 8.128720e-25; Coef_CCDtoSKY[0][23] = 1.065880e-29; Coef_CCDtoSKY[0][24] = -4.889720e-33; Coef_CCDtoSKY[0][25] = 1.329180e-38; Coef_CCDtoSKY[0][26] = -3.721300e-42; Coef_CCDtoSKY[0][27] = -1.055370e-10; Coef_CCDtoSKY[0][28] = -3.922990e-20; Coef_CCDtoSKY[0][29] = -1.233960e-20; Coef_CCDtoSKY[0][30] = 4.220270e-27; Coef_CCDtoSKY[0][31] = -3.378930e-28; Coef_CCDtoSKY[0][32] = -9.776870e-36; Coef_CCDtoSKY[0][33] = 4.103270e-37; Coef_CCDtoSKY[0][34] = -4.471180e-20; Coef_CCDtoSKY[0][35] = -3.101910e-24; Coef_CCDtoSKY[0][36] = 1.420630e-29; Coef_CCDtoSKY[0][37] = -4.685630e-33; Coef_CCDtoSKY[0][38] = -1.491060e-37; Coef_CCDtoSKY[0][39] = 7.054280e-41; Coef_CCDtoSKY[0][40] = -7.022510e-21; Coef_CCDtoSKY[0][41] = 2.194530e-27; Coef_CCDtoSKY[0][42] = -3.372240e-28; Coef_CCDtoSKY[0][43] = -1.178800e-35; Coef_CCDtoSKY[0][44] = 6.161270e-37; Coef_CCDtoSKY[0][45] = 2.872500e-28; Coef_CCDtoSKY[0][46] = 2.740990e-32; Coef_CCDtoSKY[0][47] = 2.766310e-38; Coef_CCDtoSKY[0][48] = -4.592800e-41; Coef_CCDtoSKY[0][49] = -1.077660e-28; Coef_CCDtoSKY[0][50] = -3.687840e-36; Coef_CCDtoSKY[0][51] = 4.082930e-37; Coef_CCDtoSKY[0][52] = -5.286180e-37; Coef_CCDtoSKY[0][53] = -6.017310e-41; Coef_CCDtoSKY[0][54] = 9.391710e-38; Coef_CCDtoSKY[1][ 0]= 1.491910e-05; Coef_CCDtoSKY[1][ 1]= 1.000000e+00; Coef_CCDtoSKY[1][ 2]= -7.718470e-11; Coef_CCDtoSKY[1][ 3]= -1.055370e-10; Coef_CCDtoSKY[1][ 4]= 3.425560e-19; Coef_CCDtoSKY[1][ 5]= -7.026120e-21; Coef_CCDtoSKY[1][ 6]= -4.344700e-29; Coef_CCDtoSKY[1][ 7]= -1.077410e-28; Coef_CCDtoSKY[1][ 8]= 2.791650e-37; Coef_CCDtoSKY[1][ 9]= 9.387410e-38; Coef_CCDtoSKY[1][10] = 7.452400e-10; Coef_CCDtoSKY[1][11] = -4.158320e-15; Coef_CCDtoSKY[1][12] = -1.171980e-17; Coef_CCDtoSKY[1][13] = 3.090320e-23; Coef_CCDtoSKY[1][14] = 7.892520e-28; Coef_CCDtoSKY[1][15] = 3.101610e-32; Coef_CCDtoSKY[1][16] = 8.729790e-35; Coef_CCDtoSKY[1][17] = -3.005430e-40; Coef_CCDtoSKY[1][18] = 4.069600e-44; Coef_CCDtoSKY[1][19] = -1.486730e-10; Coef_CCDtoSKY[1][20] = -1.055900e-10; Coef_CCDtoSKY[1][21] = 4.796850e-19; Coef_CCDtoSKY[1][22] = -1.234730e-20; Coef_CCDtoSKY[1][23] = -1.072930e-27; Coef_CCDtoSKY[1][24] = -3.371820e-28; Coef_CCDtoSKY[1][25] = 3.687020e-36; Coef_CCDtoSKY[1][26] = 4.082110e-37; Coef_CCDtoSKY[1][27] = -4.340780e-17; Coef_CCDtoSKY[1][28] = 1.096850e-22; Coef_CCDtoSKY[1][29] = 6.538760e-25; Coef_CCDtoSKY[1][30] = -1.267160e-30; Coef_CCDtoSKY[1][31] = -6.048240e-34; Coef_CCDtoSKY[1][32] = 1.945410e-39; Coef_CCDtoSKY[1][33] = -2.390300e-42; Coef_CCDtoSKY[1][34] = 3.356080e-20; Coef_CCDtoSKY[1][35] = -6.189720e-21; Coef_CCDtoSKY[1][36] = -8.811880e-28; Coef_CCDtoSKY[1][37] = -3.378630e-28; Coef_CCDtoSKY[1][38] = 3.974800e-36; Coef_CCDtoSKY[1][39] = 6.161020e-37; Coef_CCDtoSKY[1][40] = 6.331990e-25; Coef_CCDtoSKY[1][41] = -7.875850e-32; Coef_CCDtoSKY[1][42] = -7.733490e-33; Coef_CCDtoSKY[1][43] = 5.369790e-39; Coef_CCDtoSKY[1][44] = 8.733720e-42; Coef_CCDtoSKY[1][45] = 7.446190e-29; Coef_CCDtoSKY[1][46] = -1.122730e-28; Coef_CCDtoSKY[1][47] = 1.958260e-36; Coef_CCDtoSKY[1][48] = 4.103030e-37; Coef_CCDtoSKY[1][49] = -3.210840e-33; Coef_CCDtoSKY[1][50] = -2.868900e-39; Coef_CCDtoSKY[1][51] = 2.237080e-41; Coef_CCDtoSKY[1][52] = -2.779040e-37; Coef_CCDtoSKY[1][53] = 1.018280e-37; Coef_CCDtoSKY[1][54] = 5.139690e-42; }else{ cout << "EL must be 30 or 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85" << endl; ERROR=1; } } void CL_DISTforSIM::F_DSIM_GETCOEFSKYtoCCD(){ ERROR=0; if(EL==30){ Coef_SKYtoCCD[0][ 0] = 7.393140e-06; Coef_SKYtoCCD[0][ 1] = -5.877480e-10; Coef_SKYtoCCD[0][ 2] = 1.142960e-13; Coef_SKYtoCCD[0][ 3] = 6.611770e-20; Coef_SKYtoCCD[0][ 4] = 6.349700e-24; Coef_SKYtoCCD[0][ 5] = 5.431530e-26; Coef_SKYtoCCD[0][ 6] = -2.083990e-30; Coef_SKYtoCCD[0][ 7] = -2.404220e-34; Coef_SKYtoCCD[0][ 8] = 4.740790e-39; Coef_SKYtoCCD[0][ 9] = 3.846270e-43; Coef_SKYtoCCD[0][10] = 9.999960e-01; Coef_SKYtoCCD[0][11] = -1.407940e-09; Coef_SKYtoCCD[0][12] = 1.054550e-10; Coef_SKYtoCCD[0][13] = -3.499810e-18; Coef_SKYtoCCD[0][14] = 4.115880e-20; Coef_SKYtoCCD[0][15] = -1.962310e-26; Coef_SKYtoCCD[0][16] = 1.252990e-28; Coef_SKYtoCCD[0][17] = 1.387970e-35; Coef_SKYtoCCD[0][18] = 3.432080e-38; Coef_SKYtoCCD[0][19] = -1.278470e-12; Coef_SKYtoCCD[0][20] = 1.252940e-16; Coef_SKYtoCCD[0][21] = -1.742730e-20; Coef_SKYtoCCD[0][22] = -6.442570e-25; Coef_SKYtoCCD[0][23] = 3.193910e-29; Coef_SKYtoCCD[0][24] = -1.613030e-33; Coef_SKYtoCCD[0][25] = -3.351160e-39; Coef_SKYtoCCD[0][26] = 2.819900e-42; Coef_SKYtoCCD[0][27] = 1.053890e-10; Coef_SKYtoCCD[0][28] = -3.572880e-18; Coef_SKYtoCCD[0][29] = 8.224160e-20; Coef_SKYtoCCD[0][30] = -3.953730e-26; Coef_SKYtoCCD[0][31] = 3.776090e-28; Coef_SKYtoCCD[0][32] = 4.608360e-35; Coef_SKYtoCCD[0][33] = 1.319840e-37; Coef_SKYtoCCD[0][34] = 3.134660e-20; Coef_SKYtoCCD[0][35] = -3.434580e-24; Coef_SKYtoCCD[0][36] = 3.031460e-28; Coef_SKYtoCCD[0][37] = 1.784910e-32; Coef_SKYtoCCD[0][38] = -3.365020e-37; Coef_SKYtoCCD[0][39] = 7.298070e-42; Coef_SKYtoCCD[0][40] = 4.225910e-20; Coef_SKYtoCCD[0][41] = -1.883930e-26; Coef_SKYtoCCD[0][42] = 3.766980e-28; Coef_SKYtoCCD[0][43] = 4.128540e-35; Coef_SKYtoCCD[0][44] = 1.973620e-37; Coef_SKYtoCCD[0][45] = -2.229840e-28; Coef_SKYtoCCD[0][46] = 2.707590e-32; Coef_SKYtoCCD[0][47] = -1.177980e-36; Coef_SKYtoCCD[0][48] = -9.479310e-41; Coef_SKYtoCCD[0][49] = 1.189270e-28; Coef_SKYtoCCD[0][50] = 1.270250e-35; Coef_SKYtoCCD[0][51] = 1.349140e-37; Coef_SKYtoCCD[0][52] = 4.450900e-37; Coef_SKYtoCCD[0][53] = -6.138690e-41; Coef_SKYtoCCD[0][54] = 4.624990e-38; Coef_SKYtoCCD[1][ 0] = 5.136390e-04; Coef_SKYtoCCD[1][ 1] = 9.999870e-01; Coef_SKYtoCCD[1][ 2] = 1.369060e-09; Coef_SKYtoCCD[1][ 3] = 1.053800e-10; Coef_SKYtoCCD[1][ 4] = -3.004360e-18; Coef_SKYtoCCD[1][ 5] = 4.230150e-20; Coef_SKYtoCCD[1][ 6] = -2.066170e-26; Coef_SKYtoCCD[1][ 7] = 1.187120e-28; Coef_SKYtoCCD[1][ 8] = 1.833870e-35; Coef_SKYtoCCD[1][ 9] = 4.666230e-38; Coef_SKYtoCCD[1][10] = 9.299380e-11; Coef_SKYtoCCD[1][11] = -2.803040e-14; Coef_SKYtoCCD[1][12] = -1.887900e-18; Coef_SKYtoCCD[1][13] = 3.601900e-22; Coef_SKYtoCCD[1][14] = 1.954070e-26; Coef_SKYtoCCD[1][15] = -9.983450e-31; Coef_SKYtoCCD[1][16] = -1.930500e-35; Coef_SKYtoCCD[1][17] = -1.297050e-40; Coef_SKYtoCCD[1][18] = -5.906830e-45; Coef_SKYtoCCD[1][19] = 2.794060e-09; Coef_SKYtoCCD[1][20] = 1.054510e-10; Coef_SKYtoCCD[1][21] = -2.607370e-18; Coef_SKYtoCCD[1][22] = 8.229770e-20; Coef_SKYtoCCD[1][23] = -4.224370e-26; Coef_SKYtoCCD[1][24] = 3.763650e-28; Coef_SKYtoCCD[1][25] = 5.961710e-35; Coef_SKYtoCCD[1][26] = 1.355180e-37; Coef_SKYtoCCD[1][27] = -4.422330e-18; Coef_SKYtoCCD[1][28] = 1.084310e-21; Coef_SKYtoCCD[1][29] = 4.743330e-26; Coef_SKYtoCCD[1][30] = -9.465880e-30; Coef_SKYtoCCD[1][31] = -7.184400e-34; Coef_SKYtoCCD[1][32] = 2.251080e-38; Coef_SKYtoCCD[1][33] = 5.115060e-43; Coef_SKYtoCCD[1][34] = 5.078700e-19; Coef_SKYtoCCD[1][35] = 4.114160e-20; Coef_SKYtoCCD[1][36] = -2.236410e-26; Coef_SKYtoCCD[1][37] = 3.771810e-28; Coef_SKYtoCCD[1][38] = 6.986340e-35; Coef_SKYtoCCD[1][39] = 1.984910e-37; Coef_SKYtoCCD[1][40] = 5.287460e-26; Coef_SKYtoCCD[1][41] = -1.072360e-29; Coef_SKYtoCCD[1][42] = 9.937320e-35; Coef_SKYtoCCD[1][43] = 3.169870e-38; Coef_SKYtoCCD[1][44] = 4.207550e-42; Coef_SKYtoCCD[1][45] = -1.292720e-27; Coef_SKYtoCCD[1][46] = 1.254250e-28; Coef_SKYtoCCD[1][47] = 3.153750e-35; Coef_SKYtoCCD[1][48] = 1.328430e-37; Coef_SKYtoCCD[1][49] = -2.275250e-34; Coef_SKYtoCCD[1][50] = 3.438820e-38; Coef_SKYtoCCD[1][51] = -2.505850e-42; Coef_SKYtoCCD[1][52] = 5.010380e-36; Coef_SKYtoCCD[1][53] = 3.397200e-38; Coef_SKYtoCCD[1][54] = 3.452790e-43; }else if(EL==35){ Coef_SKYtoCCD[0][ 0]= 2.269370e-05; Coef_SKYtoCCD[0][ 1]= -3.089960e-10; Coef_SKYtoCCD[0][ 2]= -2.801000e-13; Coef_SKYtoCCD[0][ 3]= -3.278350e-18; Coef_SKYtoCCD[0][ 4]= 1.796060e-21; Coef_SKYtoCCD[0][ 5]= 6.566470e-26; Coef_SKYtoCCD[0][ 6]= -4.635930e-30; Coef_SKYtoCCD[0][ 7]= -2.641980e-34; Coef_SKYtoCCD[0][ 8]= 8.874440e-39; Coef_SKYtoCCD[0][ 9]= 4.572740e-43; Coef_SKYtoCCD[0][10] = 9.999980e-01; Coef_SKYtoCCD[0][11] = -1.117820e-09; Coef_SKYtoCCD[0][12] = 1.054560e-10; Coef_SKYtoCCD[0][13] = -3.115980e-18; Coef_SKYtoCCD[0][14] = 4.113710e-20; Coef_SKYtoCCD[0][15] = -1.366220e-26; Coef_SKYtoCCD[0][16] = 1.254000e-28; Coef_SKYtoCCD[0][17] = 7.641890e-36; Coef_SKYtoCCD[0][18] = 3.414370e-38; Coef_SKYtoCCD[0][19] = -3.361360e-12; Coef_SKYtoCCD[0][20] = 8.482700e-17; Coef_SKYtoCCD[0][21] = 2.863920e-20; Coef_SKYtoCCD[0][22] = -1.993980e-25; Coef_SKYtoCCD[0][23] = -1.311540e-28; Coef_SKYtoCCD[0][24] = -2.626750e-33; Coef_SKYtoCCD[0][25] = 3.720950e-38; Coef_SKYtoCCD[0][26] = 1.872760e-42; Coef_SKYtoCCD[0][27] = 1.053870e-10; Coef_SKYtoCCD[0][28] = -2.998270e-18; Coef_SKYtoCCD[0][29] = 8.223940e-20; Coef_SKYtoCCD[0][30] = -2.778210e-26; Coef_SKYtoCCD[0][31] = 3.778390e-28; Coef_SKYtoCCD[0][32] = 2.097420e-35; Coef_SKYtoCCD[0][33] = 1.313550e-37; Coef_SKYtoCCD[0][34] = 7.235990e-20; Coef_SKYtoCCD[0][35] = -2.654150e-24; Coef_SKYtoCCD[0][36] = -3.681550e-28; Coef_SKYtoCCD[0][37] = 1.049600e-32; Coef_SKYtoCCD[0][38] = 1.301910e-36; Coef_SKYtoCCD[0][39] = 2.341060e-41; Coef_SKYtoCCD[0][40] = 4.229520e-20; Coef_SKYtoCCD[0][41] = -1.500290e-26; Coef_SKYtoCCD[0][42] = 3.765350e-28; Coef_SKYtoCCD[0][43] = 2.756870e-35; Coef_SKYtoCCD[0][44] = 1.966770e-37; Coef_SKYtoCCD[0][45] = -4.664040e-28; Coef_SKYtoCCD[0][46] = 2.314870e-32; Coef_SKYtoCCD[0][47] = 8.484790e-37; Coef_SKYtoCCD[0][48] = -7.373310e-41; Coef_SKYtoCCD[0][49] = 1.187100e-28; Coef_SKYtoCCD[0][50] = 1.081730e-35; Coef_SKYtoCCD[0][51] = 1.356600e-37; Coef_SKYtoCCD[0][52] = 8.798210e-37; Coef_SKYtoCCD[0][53] = -5.671330e-41; Coef_SKYtoCCD[0][54] = 4.663910e-38; Coef_SKYtoCCD[1][ 0]= 3.645040e-04; Coef_SKYtoCCD[1][ 1]= 9.999920e-01; Coef_SKYtoCCD[1][ 2]= 1.100330e-09; Coef_SKYtoCCD[1][ 3]= 1.053830e-10; Coef_SKYtoCCD[1][ 4]= -2.583930e-18; Coef_SKYtoCCD[1][ 5]= 4.229580e-20; Coef_SKYtoCCD[1][ 6]= -1.560880e-26; Coef_SKYtoCCD[1][ 7]= 1.187300e-28; Coef_SKYtoCCD[1][ 8]= 1.324730e-35; Coef_SKYtoCCD[1][ 9]= 4.658790e-38; Coef_SKYtoCCD[1][10] = -4.014870e-10; Coef_SKYtoCCD[1][11] = -1.915990e-14; Coef_SKYtoCCD[1][12] = -1.662180e-18; Coef_SKYtoCCD[1][13] = 2.554780e-22; Coef_SKYtoCCD[1][14] = 3.220190e-26; Coef_SKYtoCCD[1][15] = -1.061180e-30; Coef_SKYtoCCD[1][16] = -5.331800e-35; Coef_SKYtoCCD[1][17] = 1.596840e-39; Coef_SKYtoCCD[1][18] = -5.009500e-44; Coef_SKYtoCCD[1][19] = 2.235960e-09; Coef_SKYtoCCD[1][20] = 1.054530e-10; Coef_SKYtoCCD[1][21] = -2.181000e-18; Coef_SKYtoCCD[1][22] = 8.222630e-20; Coef_SKYtoCCD[1][23] = -3.118630e-26; Coef_SKYtoCCD[1][24] = 3.765300e-28; Coef_SKYtoCCD[1][25] = 3.995880e-35; Coef_SKYtoCCD[1][26] = 1.357100e-37; Coef_SKYtoCCD[1][27] = 2.836560e-17; Coef_SKYtoCCD[1][28] = 6.549000e-22; Coef_SKYtoCCD[1][29] = -1.967170e-27; Coef_SKYtoCCD[1][30] = -5.342970e-30; Coef_SKYtoCCD[1][31] = -1.045420e-33; Coef_SKYtoCCD[1][32] = 9.264150e-39; Coef_SKYtoCCD[1][33] = 1.811240e-42; Coef_SKYtoCCD[1][34] = 4.322790e-19; Coef_SKYtoCCD[1][35] = 4.113970e-20; Coef_SKYtoCCD[1][36] = -1.772830e-26; Coef_SKYtoCCD[1][37] = 3.781130e-28; Coef_SKYtoCCD[1][38] = 4.843090e-35; Coef_SKYtoCCD[1][39] = 1.959750e-37; Coef_SKYtoCCD[1][40] = -4.925260e-25; Coef_SKYtoCCD[1][41] = -5.748190e-30; Coef_SKYtoCCD[1][42] = 1.039040e-33; Coef_SKYtoCCD[1][43] = 2.268420e-38; Coef_SKYtoCCD[1][44] = 4.086590e-42; Coef_SKYtoCCD[1][45] = -1.168560e-27; Coef_SKYtoCCD[1][46] = 1.253250e-28; Coef_SKYtoCCD[1][47] = 2.581330e-35; Coef_SKYtoCCD[1][48] = 1.304980e-37; Coef_SKYtoCCD[1][49] = 2.922680e-33; Coef_SKYtoCCD[1][50] = 1.462080e-38; Coef_SKYtoCCD[1][51] = -5.827570e-42; Coef_SKYtoCCD[1][52] = 4.208590e-36; Coef_SKYtoCCD[1][53] = 3.433030e-38; Coef_SKYtoCCD[1][54] = -5.359750e-42; }else if(EL==40){ Coef_SKYtoCCD[0][ 0]= 2.328360e-05; Coef_SKYtoCCD[0][ 1]= 2.381320e-09; Coef_SKYtoCCD[0][ 2]= -2.484280e-13; Coef_SKYtoCCD[0][ 3]= 2.830380e-18; Coef_SKYtoCCD[0][ 4]= 7.184550e-22; Coef_SKYtoCCD[0][ 5]= -1.477350e-25; Coef_SKYtoCCD[0][ 6]= -6.373640e-31; Coef_SKYtoCCD[0][ 7]= 5.983650e-34; Coef_SKYtoCCD[0][ 8]= 1.343480e-39; Coef_SKYtoCCD[0][ 9]= -1.116330e-42; Coef_SKYtoCCD[0][10] = 9.999980e-01; Coef_SKYtoCCD[0][11] = -9.064790e-10; Coef_SKYtoCCD[0][12] = 1.054570e-10; Coef_SKYtoCCD[0][13] = -2.619520e-18; Coef_SKYtoCCD[0][14] = 4.114490e-20; Coef_SKYtoCCD[0][15] = -1.107430e-26; Coef_SKYtoCCD[0][16] = 1.252910e-28; Coef_SKYtoCCD[0][17] = 6.203850e-36; Coef_SKYtoCCD[0][18] = 3.438490e-38; Coef_SKYtoCCD[0][19] = -3.619800e-12; Coef_SKYtoCCD[0][20] = -4.805250e-16; Coef_SKYtoCCD[0][21] = 3.263100e-20; Coef_SKYtoCCD[0][22] = 1.251700e-24; Coef_SKYtoCCD[0][23] = -7.175490e-29; Coef_SKYtoCCD[0][24] = 5.961560e-33; Coef_SKYtoCCD[0][25] = -5.372780e-40; Coef_SKYtoCCD[0][26] = -1.178580e-42; Coef_SKYtoCCD[0][27] = 1.053880e-10; Coef_SKYtoCCD[0][28] = -2.794220e-18; Coef_SKYtoCCD[0][29] = 8.217810e-20; Coef_SKYtoCCD[0][30] = -2.031120e-26; Coef_SKYtoCCD[0][31] = 3.781460e-28; Coef_SKYtoCCD[0][32] = 1.510430e-35; Coef_SKYtoCCD[0][33] = 1.310410e-37; Coef_SKYtoCCD[0][34] = 8.094510e-20; Coef_SKYtoCCD[0][35] = 1.267980e-23; Coef_SKYtoCCD[0][36] = -5.457410e-28; Coef_SKYtoCCD[0][37] = -4.183880e-32; Coef_SKYtoCCD[0][38] = 7.779720e-37; Coef_SKYtoCCD[0][39] = -6.629390e-41; Coef_SKYtoCCD[0][40] = 4.227490e-20; Coef_SKYtoCCD[0][41] = -9.533900e-27; Coef_SKYtoCCD[0][42] = 3.770670e-28; Coef_SKYtoCCD[0][43] = 1.143540e-35; Coef_SKYtoCCD[0][44] = 1.950990e-37; Coef_SKYtoCCD[0][45] = -5.382120e-28; Coef_SKYtoCCD[0][46] = -9.381300e-32; Coef_SKYtoCCD[0][47] = 2.005510e-36; Coef_SKYtoCCD[0][48] = 2.689770e-40; Coef_SKYtoCCD[0][49] = 1.188430e-28; Coef_SKYtoCCD[0][50] = 2.997510e-36; Coef_SKYtoCCD[0][51] = 1.343550e-37; Coef_SKYtoCCD[0][52] = 1.036880e-36; Coef_SKYtoCCD[0][53] = 1.852940e-40; Coef_SKYtoCCD[0][54] = 4.636000e-38; Coef_SKYtoCCD[1][ 0]= 2.922740e-04; Coef_SKYtoCCD[1][ 1]= 9.999940e-01; Coef_SKYtoCCD[1][ 2]= 8.983460e-10; Coef_SKYtoCCD[1][ 3]= 1.053820e-10; Coef_SKYtoCCD[1][ 4]= -2.127820e-18; Coef_SKYtoCCD[1][ 5]= 4.231330e-20; Coef_SKYtoCCD[1][ 6]= -1.252060e-26; Coef_SKYtoCCD[1][ 7]= 1.186650e-28; Coef_SKYtoCCD[1][ 8]= 1.024160e-35; Coef_SKYtoCCD[1][ 9]= 4.665940e-38; Coef_SKYtoCCD[1][10] = -3.158820e-10; Coef_SKYtoCCD[1][11] = 1.150960e-14; Coef_SKYtoCCD[1][12] = -4.250670e-19; Coef_SKYtoCCD[1][13] = -3.562390e-22; Coef_SKYtoCCD[1][14] = 1.621510e-26; Coef_SKYtoCCD[1][15] = 1.616770e-30; Coef_SKYtoCCD[1][16] = -3.903250e-35; Coef_SKYtoCCD[1][17] = -4.424390e-40; Coef_SKYtoCCD[1][18] = -2.311380e-44; Coef_SKYtoCCD[1][19] = 1.829020e-09; Coef_SKYtoCCD[1][20] = 1.054520e-10; Coef_SKYtoCCD[1][21] = -1.746980e-18; Coef_SKYtoCCD[1][22] = 8.229070e-20; Coef_SKYtoCCD[1][23] = -2.586850e-26; Coef_SKYtoCCD[1][24] = 3.763360e-28; Coef_SKYtoCCD[1][25] = 3.306220e-35; Coef_SKYtoCCD[1][26] = 1.357390e-37; Coef_SKYtoCCD[1][27] = 2.143800e-17; Coef_SKYtoCCD[1][28] = -2.949340e-22; Coef_SKYtoCCD[1][29] = -2.270900e-26; Coef_SKYtoCCD[1][30] = 6.658070e-30; Coef_SKYtoCCD[1][31] = -4.504350e-34; Coef_SKYtoCCD[1][32] = -3.107190e-38; Coef_SKYtoCCD[1][33] = 1.182560e-42; Coef_SKYtoCCD[1][34] = 3.553820e-19; Coef_SKYtoCCD[1][35] = 4.116140e-20; Coef_SKYtoCCD[1][36] = -1.506730e-26; Coef_SKYtoCCD[1][37] = 3.772320e-28; Coef_SKYtoCCD[1][38] = 4.249660e-35; Coef_SKYtoCCD[1][39] = 1.980650e-37; Coef_SKYtoCCD[1][40] = -3.618710e-25; Coef_SKYtoCCD[1][41] = 3.282250e-30; Coef_SKYtoCCD[1][42] = 7.157430e-34; Coef_SKYtoCCD[1][43] = -1.965750e-39; Coef_SKYtoCCD[1][44] = 9.942470e-43; Coef_SKYtoCCD[1][45] = -9.421620e-28; Coef_SKYtoCCD[1][46] = 1.252970e-28; Coef_SKYtoCCD[1][47] = 2.235020e-35; Coef_SKYtoCCD[1][48] = 1.327930e-37; Coef_SKYtoCCD[1][49] = 2.108060e-33; Coef_SKYtoCCD[1][50] = -1.919900e-38; Coef_SKYtoCCD[1][51] = -3.075950e-42; Coef_SKYtoCCD[1][52] = 3.395970e-36; Coef_SKYtoCCD[1][53] = 3.422290e-38; Coef_SKYtoCCD[1][54] = -3.831240e-42; }else if(EL==45){ Coef_SKYtoCCD[0][ 0]= 2.986930e-05; Coef_SKYtoCCD[0][ 1]= 1.517550e-09; Coef_SKYtoCCD[0][ 2]= -2.335250e-13; Coef_SKYtoCCD[0][ 3]= -1.228710e-17; Coef_SKYtoCCD[0][ 4]= 5.154190e-22; Coef_SKYtoCCD[0][ 5]= -1.625760e-26; Coef_SKYtoCCD[0][ 6]= -7.612880e-31; Coef_SKYtoCCD[0][ 7]= 2.439950e-34; Coef_SKYtoCCD[0][ 8]= 6.218740e-39; Coef_SKYtoCCD[0][ 9]= -5.341090e-43; Coef_SKYtoCCD[0][10] = 9.999990e-01; Coef_SKYtoCCD[0][11] = -7.558340e-10; Coef_SKYtoCCD[0][12] = 1.054540e-10; Coef_SKYtoCCD[0][13] = -2.150700e-18; Coef_SKYtoCCD[0][14] = 4.114820e-20; Coef_SKYtoCCD[0][15] = -8.776300e-27; Coef_SKYtoCCD[0][16] = 1.253500e-28; Coef_SKYtoCCD[0][17] = 3.705520e-36; Coef_SKYtoCCD[0][18] = 3.420280e-38; Coef_SKYtoCCD[0][19] = -4.688390e-12; Coef_SKYtoCCD[0][20] = -2.679360e-16; Coef_SKYtoCCD[0][21] = 3.202450e-20; Coef_SKYtoCCD[0][22] = 2.253730e-24; Coef_SKYtoCCD[0][23] = -5.303010e-29; Coef_SKYtoCCD[0][24] = -3.051970e-33; Coef_SKYtoCCD[0][25] = -1.332720e-37; Coef_SKYtoCCD[0][26] = 2.344550e-42; Coef_SKYtoCCD[0][27] = 1.053850e-10; Coef_SKYtoCCD[0][28] = -2.083360e-18; Coef_SKYtoCCD[0][29] = 8.229670e-20; Coef_SKYtoCCD[0][30] = -2.004530e-26; Coef_SKYtoCCD[0][31] = 3.774860e-28; Coef_SKYtoCCD[0][32] = 1.892280e-35; Coef_SKYtoCCD[0][33] = 1.318740e-37; Coef_SKYtoCCD[0][34] = 1.051950e-19; Coef_SKYtoCCD[0][35] = 6.589120e-24; Coef_SKYtoCCD[0][36] = -5.505780e-28; Coef_SKYtoCCD[0][37] = -4.298060e-32; Coef_SKYtoCCD[0][38] = 9.926410e-37; Coef_SKYtoCCD[0][39] = 2.644790e-41; Coef_SKYtoCCD[0][40] = 4.230160e-20; Coef_SKYtoCCD[0][41] = -9.069960e-27; Coef_SKYtoCCD[0][42] = 3.760940e-28; Coef_SKYtoCCD[0][43] = 1.884300e-35; Coef_SKYtoCCD[0][44] = 1.981830e-37; Coef_SKYtoCCD[0][45] = -6.979780e-28; Coef_SKYtoCCD[0][46] = -4.855350e-32; Coef_SKYtoCCD[0][47] = 1.874370e-36; Coef_SKYtoCCD[0][48] = 1.877110e-40; Coef_SKYtoCCD[0][49] = 1.187080e-28; Coef_SKYtoCCD[0][50] = 3.894080e-36; Coef_SKYtoCCD[0][51] = 1.364120e-37; Coef_SKYtoCCD[0][52] = 1.341120e-36; Coef_SKYtoCCD[0][53] = 1.055660e-40; Coef_SKYtoCCD[0][54] = 4.659800e-38; Coef_SKYtoCCD[1][ 0]= 3.516810e-04; Coef_SKYtoCCD[1][ 1]= 9.999960e-01; Coef_SKYtoCCD[1][ 2]= 7.366520e-10; Coef_SKYtoCCD[1][ 3]= 1.053830e-10; Coef_SKYtoCCD[1][ 4]= -1.598020e-18; Coef_SKYtoCCD[1][ 5]= 4.231770e-20; Coef_SKYtoCCD[1][ 6]= -1.141780e-26; Coef_SKYtoCCD[1][ 7]= 1.186270e-28; Coef_SKYtoCCD[1][ 8]= 1.026910e-35; Coef_SKYtoCCD[1][ 9]= 4.673500e-38; Coef_SKYtoCCD[1][10] = 5.284960e-11; Coef_SKYtoCCD[1][11] = -8.815790e-14; Coef_SKYtoCCD[1][12] = 1.273550e-18; Coef_SKYtoCCD[1][13] = 5.798300e-22; Coef_SKYtoCCD[1][14] = -1.652660e-26; Coef_SKYtoCCD[1][15] = -2.994630e-31; Coef_SKYtoCCD[1][16] = 2.878570e-35; Coef_SKYtoCCD[1][17] = -2.071330e-40; Coef_SKYtoCCD[1][18] = -1.804850e-44; Coef_SKYtoCCD[1][19] = 1.514840e-09; Coef_SKYtoCCD[1][20] = 1.054530e-10; Coef_SKYtoCCD[1][21] = -1.383860e-18; Coef_SKYtoCCD[1][22] = 8.227850e-20; Coef_SKYtoCCD[1][23] = -2.273980e-26; Coef_SKYtoCCD[1][24] = 3.762510e-28; Coef_SKYtoCCD[1][25] = 3.295400e-35; Coef_SKYtoCCD[1][26] = 1.362710e-37; Coef_SKYtoCCD[1][27] = -4.730130e-18; Coef_SKYtoCCD[1][28] = 3.673610e-21; Coef_SKYtoCCD[1][29] = -2.244910e-26; Coef_SKYtoCCD[1][30] = -2.122510e-29; Coef_SKYtoCCD[1][31] = 5.328370e-34; Coef_SKYtoCCD[1][32] = 8.005440e-39; Coef_SKYtoCCD[1][33] = -4.782980e-43; Coef_SKYtoCCD[1][34] = 3.230750e-19; Coef_SKYtoCCD[1][35] = 4.115420e-20; Coef_SKYtoCCD[1][36] = -1.247640e-26; Coef_SKYtoCCD[1][37] = 3.774600e-28; Coef_SKYtoCCD[1][38] = 3.429890e-35; Coef_SKYtoCCD[1][39] = 1.973770e-37; Coef_SKYtoCCD[1][40] = 9.335420e-26; Coef_SKYtoCCD[1][41] = -3.354630e-29; Coef_SKYtoCCD[1][42] = -2.436180e-34; Coef_SKYtoCCD[1][43] = 1.292910e-37; Coef_SKYtoCCD[1][44] = -2.936280e-42; Coef_SKYtoCCD[1][45] = -9.925680e-28; Coef_SKYtoCCD[1][46] = 1.252900e-28; Coef_SKYtoCCD[1][47] = 1.896370e-35; Coef_SKYtoCCD[1][48] = 1.324570e-37; Coef_SKYtoCCD[1][49] = -5.970100e-34; Coef_SKYtoCCD[1][50] = 7.679510e-38; Coef_SKYtoCCD[1][51] = 2.390550e-42; Coef_SKYtoCCD[1][52] = 3.235110e-36; Coef_SKYtoCCD[1][53] = 3.428690e-38; Coef_SKYtoCCD[1][54] = 1.133190e-42; }else if(EL==50){ Coef_SKYtoCCD[0][ 0]= 1.797920e-05; Coef_SKYtoCCD[0][ 1]= -8.988780e-10; Coef_SKYtoCCD[0][ 2]= -1.536960e-13; Coef_SKYtoCCD[0][ 3]= 2.163850e-18; Coef_SKYtoCCD[0][ 4]= 3.726820e-22; Coef_SKYtoCCD[0][ 5]= 1.433210e-26; Coef_SKYtoCCD[0][ 6]= -3.362340e-31; Coef_SKYtoCCD[0][ 7]= -8.146850e-35; Coef_SKYtoCCD[0][ 8]= 4.655250e-40; Coef_SKYtoCCD[0][ 9]= 2.141360e-43; Coef_SKYtoCCD[0][10] = 9.999990e-01; Coef_SKYtoCCD[0][11] = -6.351380e-10; Coef_SKYtoCCD[0][12] = 1.054560e-10; Coef_SKYtoCCD[0][13] = -1.672130e-18; Coef_SKYtoCCD[0][14] = 4.112780e-20; Coef_SKYtoCCD[0][15] = -8.465690e-27; Coef_SKYtoCCD[0][16] = 1.254630e-28; Coef_SKYtoCCD[0][17] = 6.083850e-36; Coef_SKYtoCCD[0][18] = 3.395370e-38; Coef_SKYtoCCD[0][19] = -2.847510e-12; Coef_SKYtoCCD[0][20] = 1.618900e-16; Coef_SKYtoCCD[0][21] = 2.087910e-20; Coef_SKYtoCCD[0][22] = -5.392710e-25; Coef_SKYtoCCD[0][23] = -3.620970e-29; Coef_SKYtoCCD[0][24] = -1.754950e-34; Coef_SKYtoCCD[0][25] = 4.984340e-39; Coef_SKYtoCCD[0][26] = -2.181970e-42; Coef_SKYtoCCD[0][27] = 1.053860e-10; Coef_SKYtoCCD[0][28] = -1.639570e-18; Coef_SKYtoCCD[0][29] = 8.224750e-20; Coef_SKYtoCCD[0][30] = -1.666660e-26; Coef_SKYtoCCD[0][31] = 3.775770e-28; Coef_SKYtoCCD[0][32] = 1.463750e-35; Coef_SKYtoCCD[0][33] = 1.321110e-37; Coef_SKYtoCCD[0][34] = 6.474670e-20; Coef_SKYtoCCD[0][35] = -3.942570e-24; Coef_SKYtoCCD[0][36] = -3.660130e-28; Coef_SKYtoCCD[0][37] = 1.259770e-32; Coef_SKYtoCCD[0][38] = 3.758130e-37; Coef_SKYtoCCD[0][39] = 1.141590e-41; Coef_SKYtoCCD[0][40] = 4.229040e-20; Coef_SKYtoCCD[0][41] = -8.702490e-27; Coef_SKYtoCCD[0][42] = 3.765490e-28; Coef_SKYtoCCD[0][43] = 1.889750e-35; Coef_SKYtoCCD[0][44] = 1.972690e-37; Coef_SKYtoCCD[0][45] = -4.364710e-28; Coef_SKYtoCCD[0][46] = 2.677070e-32; Coef_SKYtoCCD[0][47] = 1.440740e-36; Coef_SKYtoCCD[0][48] = -7.317790e-41; Coef_SKYtoCCD[0][49] = 1.187390e-28; Coef_SKYtoCCD[0][50] = 6.123070e-36; Coef_SKYtoCCD[0][51] = 1.353650e-37; Coef_SKYtoCCD[0][52] = 8.479680e-37; Coef_SKYtoCCD[0][53] = -4.576490e-41; Coef_SKYtoCCD[0][54] = 4.657720e-38; Coef_SKYtoCCD[1][ 0]= 2.164050e-04; Coef_SKYtoCCD[1][ 1]= 9.999970e-01; Coef_SKYtoCCD[1][ 2]= 6.145470e-10; Coef_SKYtoCCD[1][ 3]= 1.053840e-10; Coef_SKYtoCCD[1][ 4]= -1.283460e-18; Coef_SKYtoCCD[1][ 5]= 4.231180e-20; Coef_SKYtoCCD[1][ 6]= -1.000060e-26; Coef_SKYtoCCD[1][ 7]= 1.186390e-28; Coef_SKYtoCCD[1][ 8]= 9.727720e-36; Coef_SKYtoCCD[1][ 9]= 4.674530e-38; Coef_SKYtoCCD[1][10] = -6.248770e-10; Coef_SKYtoCCD[1][11] = 2.842780e-14; Coef_SKYtoCCD[1][12] = -2.524920e-18; Coef_SKYtoCCD[1][13] = -1.855960e-22; Coef_SKYtoCCD[1][14] = 2.410690e-26; Coef_SKYtoCCD[1][15] = -4.259650e-32; Coef_SKYtoCCD[1][16] = -9.242810e-35; Coef_SKYtoCCD[1][17] = 8.657800e-40; Coef_SKYtoCCD[1][18] = 4.031700e-45; Coef_SKYtoCCD[1][19] = 1.267250e-09; Coef_SKYtoCCD[1][20] = 1.054540e-10; Coef_SKYtoCCD[1][21] = -1.436220e-18; Coef_SKYtoCCD[1][22] = 8.227050e-20; Coef_SKYtoCCD[1][23] = -1.710300e-26; Coef_SKYtoCCD[1][24] = 3.764250e-28; Coef_SKYtoCCD[1][25] = 2.339320e-35; Coef_SKYtoCCD[1][26] = 1.355160e-37; Coef_SKYtoCCD[1][27] = 4.124150e-17; Coef_SKYtoCCD[1][28] = -1.235940e-21; Coef_SKYtoCCD[1][29] = 9.521770e-26; Coef_SKYtoCCD[1][30] = 7.359800e-30; Coef_SKYtoCCD[1][31] = -4.645500e-34; Coef_SKYtoCCD[1][32] = -6.388550e-39; Coef_SKYtoCCD[1][33] = 2.172810e-42; Coef_SKYtoCCD[1][34] = 2.125830e-19; Coef_SKYtoCCD[1][35] = 4.114240e-20; Coef_SKYtoCCD[1][36] = -7.479220e-27; Coef_SKYtoCCD[1][37] = 3.774660e-28; Coef_SKYtoCCD[1][38] = 2.022470e-35; Coef_SKYtoCCD[1][39] = 1.977990e-37; Coef_SKYtoCCD[1][40] = -6.821550e-25; Coef_SKYtoCCD[1][41] = 1.184610e-29; Coef_SKYtoCCD[1][42] = -1.044470e-33; Coef_SKYtoCCD[1][43] = -4.011780e-38; Coef_SKYtoCCD[1][44] = -7.664840e-43; Coef_SKYtoCCD[1][45] = -6.054340e-28; Coef_SKYtoCCD[1][46] = 1.253810e-28; Coef_SKYtoCCD[1][47] = 7.921540e-36; Coef_SKYtoCCD[1][48] = 1.319740e-37; Coef_SKYtoCCD[1][49] = 3.928680e-33; Coef_SKYtoCCD[1][50] = -3.046830e-38; Coef_SKYtoCCD[1][51] = 4.544880e-42; Coef_SKYtoCCD[1][52] = 2.478850e-36; Coef_SKYtoCCD[1][53] = 3.412370e-38; Coef_SKYtoCCD[1][54] = -7.138560e-42; }else if(EL==55){ Coef_SKYtoCCD[0][ 0]= 2.120070e-05; Coef_SKYtoCCD[0][ 1]= -2.291680e-09; Coef_SKYtoCCD[0][ 2]= -2.531430e-13; Coef_SKYtoCCD[0][ 3]= 1.046850e-17; Coef_SKYtoCCD[0][ 4]= 1.589060e-21; Coef_SKYtoCCD[0][ 5]= 6.049880e-26; Coef_SKYtoCCD[0][ 6]= -4.145420e-30; Coef_SKYtoCCD[0][ 7]= -4.102910e-34; Coef_SKYtoCCD[0][ 8]= 9.741080e-39; Coef_SKYtoCCD[0][ 9]= 8.641820e-43; Coef_SKYtoCCD[0][10] = 9.999990e-01; Coef_SKYtoCCD[0][11] = -5.253410e-10; Coef_SKYtoCCD[0][12] = 1.054530e-10; Coef_SKYtoCCD[0][13] = -1.430630e-18; Coef_SKYtoCCD[0][14] = 4.118110e-20; Coef_SKYtoCCD[0][15] = -6.878720e-27; Coef_SKYtoCCD[0][16] = 1.251640e-28; Coef_SKYtoCCD[0][17] = 4.611350e-36; Coef_SKYtoCCD[0][18] = 3.449670e-38; Coef_SKYtoCCD[0][19] = -3.130330e-12; Coef_SKYtoCCD[0][20] = 4.223410e-16; Coef_SKYtoCCD[0][21] = 2.610060e-20; Coef_SKYtoCCD[0][22] = -2.467650e-24; Coef_SKYtoCCD[0][23] = -1.183920e-28; Coef_SKYtoCCD[0][24] = 9.799160e-34; Coef_SKYtoCCD[0][25] = -1.715210e-38; Coef_SKYtoCCD[0][26] = -2.852930e-42; Coef_SKYtoCCD[0][27] = 1.053860e-10; Coef_SKYtoCCD[0][28] = -1.505890e-18; Coef_SKYtoCCD[0][29] = 8.230190e-20; Coef_SKYtoCCD[0][30] = -1.290190e-26; Coef_SKYtoCCD[0][31] = 3.771300e-28; Coef_SKYtoCCD[0][32] = 1.113800e-35; Coef_SKYtoCCD[0][33] = 1.330430e-37; Coef_SKYtoCCD[0][34] = 6.693290e-20; Coef_SKYtoCCD[0][35] = -1.060460e-23; Coef_SKYtoCCD[0][36] = -3.319670e-28; Coef_SKYtoCCD[0][37] = 5.270670e-32; Coef_SKYtoCCD[0][38] = 1.334770e-36; Coef_SKYtoCCD[0][39] = 5.019650e-43; Coef_SKYtoCCD[0][40] = 4.228540e-20; Coef_SKYtoCCD[0][41] = -6.157970e-27; Coef_SKYtoCCD[0][42] = 3.761960e-28; Coef_SKYtoCCD[0][43] = 1.118550e-35; Coef_SKYtoCCD[0][44] = 1.987520e-37; Coef_SKYtoCCD[0][45] = -4.275380e-28; Coef_SKYtoCCD[0][46] = 7.736750e-32; Coef_SKYtoCCD[0][47] = 6.550670e-37; Coef_SKYtoCCD[0][48] = -2.611650e-40; Coef_SKYtoCCD[0][49] = 1.188200e-28; Coef_SKYtoCCD[0][50] = 2.548950e-36; Coef_SKYtoCCD[0][51] = 1.359040e-37; Coef_SKYtoCCD[0][52] = 8.010650e-37; Coef_SKYtoCCD[0][53] = -1.582080e-40; Coef_SKYtoCCD[0][54] = 4.636130e-38; Coef_SKYtoCCD[1][ 0]= 1.911380e-04; Coef_SKYtoCCD[1][ 1]= 9.999980e-01; Coef_SKYtoCCD[1][ 2]= 5.144330e-10; Coef_SKYtoCCD[1][ 3]= 1.053830e-10; Coef_SKYtoCCD[1][ 4]= -1.202430e-18; Coef_SKYtoCCD[1][ 5]= 4.231710e-20; Coef_SKYtoCCD[1][ 6]= -7.304140e-27; Coef_SKYtoCCD[1][ 7]= 1.186280e-28; Coef_SKYtoCCD[1][ 8]= 5.951210e-36; Coef_SKYtoCCD[1][ 9]= 4.672400e-38; Coef_SKYtoCCD[1][10] = 6.594260e-10; Coef_SKYtoCCD[1][11] = 1.848420e-14; Coef_SKYtoCCD[1][12] = -5.934070e-18; Coef_SKYtoCCD[1][13] = -3.959770e-22; Coef_SKYtoCCD[1][14] = 6.318040e-27; Coef_SKYtoCCD[1][15] = 1.730890e-30; Coef_SKYtoCCD[1][16] = 6.645440e-35; Coef_SKYtoCCD[1][17] = -8.013280e-40; Coef_SKYtoCCD[1][18] = -1.142290e-44; Coef_SKYtoCCD[1][19] = 1.051350e-09; Coef_SKYtoCCD[1][20] = 1.054530e-10; Coef_SKYtoCCD[1][21] = -1.040810e-18; Coef_SKYtoCCD[1][22] = 8.231630e-20; Coef_SKYtoCCD[1][23] = -1.440320e-26; Coef_SKYtoCCD[1][24] = 3.760350e-28; Coef_SKYtoCCD[1][25] = 1.692160e-35; Coef_SKYtoCCD[1][26] = 1.364410e-37; Coef_SKYtoCCD[1][27] = -4.100540e-17; Coef_SKYtoCCD[1][28] = -6.646030e-22; Coef_SKYtoCCD[1][29] = 3.257670e-25; Coef_SKYtoCCD[1][30] = 7.746280e-30; Coef_SKYtoCCD[1][31] = -6.926310e-34; Coef_SKYtoCCD[1][32] = -3.048560e-38; Coef_SKYtoCCD[1][33] = -1.469560e-42; Coef_SKYtoCCD[1][34] = 2.393590e-19; Coef_SKYtoCCD[1][35] = 4.115530e-20; Coef_SKYtoCCD[1][36] = -8.427500e-27; Coef_SKYtoCCD[1][37] = 3.770810e-28; Coef_SKYtoCCD[1][38] = 2.320010e-35; Coef_SKYtoCCD[1][39] = 1.991000e-37; Coef_SKYtoCCD[1][40] = 6.448750e-25; Coef_SKYtoCCD[1][41] = 7.990490e-30; Coef_SKYtoCCD[1][42] = -3.672890e-33; Coef_SKYtoCCD[1][43] = -1.001710e-38; Coef_SKYtoCCD[1][44] = 7.821570e-42; Coef_SKYtoCCD[1][45] = -7.853770e-28; Coef_SKYtoCCD[1][46] = 1.253410e-28; Coef_SKYtoCCD[1][47] = 1.246130e-35; Coef_SKYtoCCD[1][48] = 1.329690e-37; Coef_SKYtoCCD[1][49] = -3.559170e-33; Coef_SKYtoCCD[1][50] = -3.516970e-38; Coef_SKYtoCCD[1][51] = 9.564310e-42; Coef_SKYtoCCD[1][52] = 2.444940e-36; Coef_SKYtoCCD[1][53] = 3.413050e-38; Coef_SKYtoCCD[1][54] = 6.246260e-42; }else if(EL==60){ Coef_SKYtoCCD[0][ 0]= 2.368950e-05; Coef_SKYtoCCD[0][ 1]= 3.438640e-10; Coef_SKYtoCCD[0][ 2]= 1.709680e-13; Coef_SKYtoCCD[0][ 3]= -2.719250e-19; Coef_SKYtoCCD[0][ 4]= -6.313830e-22; Coef_SKYtoCCD[0][ 5]= -1.870400e-26; Coef_SKYtoCCD[0][ 6]= -1.124960e-30; Coef_SKYtoCCD[0][ 7]= 9.183840e-35; Coef_SKYtoCCD[0][ 8]= 2.528880e-39; Coef_SKYtoCCD[0][ 9]= -1.825040e-43; Coef_SKYtoCCD[0][10] = 9.999990e-01; Coef_SKYtoCCD[0][11] = -4.255710e-10; Coef_SKYtoCCD[0][12] = 1.054550e-10; Coef_SKYtoCCD[0][13] = -1.341850e-18; Coef_SKYtoCCD[0][14] = 4.113810e-20; Coef_SKYtoCCD[0][15] = -4.599730e-27; Coef_SKYtoCCD[0][16] = 1.253960e-28; Coef_SKYtoCCD[0][17] = 1.469510e-36; Coef_SKYtoCCD[0][18] = 3.411580e-38; Coef_SKYtoCCD[0][19] = -4.105990e-12; Coef_SKYtoCCD[0][20] = -6.751190e-17; Coef_SKYtoCCD[0][21] = -2.049260e-20; Coef_SKYtoCCD[0][22] = 2.631130e-25; Coef_SKYtoCCD[0][23] = 9.138410e-29; Coef_SKYtoCCD[0][24] = 3.898940e-34; Coef_SKYtoCCD[0][25] = -2.961370e-39; Coef_SKYtoCCD[0][26] = 3.373340e-43; Coef_SKYtoCCD[0][27] = 1.053860e-10; Coef_SKYtoCCD[0][28] = -1.506110e-18; Coef_SKYtoCCD[0][29] = 8.226190e-20; Coef_SKYtoCCD[0][30] = -8.276270e-27; Coef_SKYtoCCD[0][31] = 3.776490e-28; Coef_SKYtoCCD[0][32] = 3.757430e-36; Coef_SKYtoCCD[0][33] = 1.315920e-37; Coef_SKYtoCCD[0][34] = 1.003650e-19; Coef_SKYtoCCD[0][35] = 1.751130e-24; Coef_SKYtoCCD[0][36] = 2.659850e-28; Coef_SKYtoCCD[0][37] = -6.799480e-33; Coef_SKYtoCCD[0][38] = -9.819040e-37; Coef_SKYtoCCD[0][39] = -6.108700e-42; Coef_SKYtoCCD[0][40] = 4.228110e-20; Coef_SKYtoCCD[0][41] = -3.507480e-27; Coef_SKYtoCCD[0][42] = 3.764180e-28; Coef_SKYtoCCD[0][43] = 2.675820e-36; Coef_SKYtoCCD[0][44] = 1.973190e-37; Coef_SKYtoCCD[0][45] = -7.134530e-28; Coef_SKYtoCCD[0][46] = -1.295150e-32; Coef_SKYtoCCD[0][47] = -5.856250e-37; Coef_SKYtoCCD[0][48] = 3.937840e-41; Coef_SKYtoCCD[0][49] = 1.188300e-28; Coef_SKYtoCCD[0][50] = -2.696740e-37; Coef_SKYtoCCD[0][51] = 1.355850e-37; Coef_SKYtoCCD[0][52] = 1.427430e-36; Coef_SKYtoCCD[0][53] = 2.606690e-41; Coef_SKYtoCCD[0][54] = 4.636150e-38; Coef_SKYtoCCD[1][ 0]= 8.640180e-05; Coef_SKYtoCCD[1][ 1]= 9.999990e-01; Coef_SKYtoCCD[1][ 2]= 4.329550e-10; Coef_SKYtoCCD[1][ 3]= 1.053850e-10; Coef_SKYtoCCD[1][ 4]= -1.102760e-18; Coef_SKYtoCCD[1][ 5]= 4.230180e-20; Coef_SKYtoCCD[1][ 6]= -5.576850e-27; Coef_SKYtoCCD[1][ 7]= 1.186940e-28; Coef_SKYtoCCD[1][ 8]= 4.319060e-36; Coef_SKYtoCCD[1][ 9]= 4.663720e-38; Coef_SKYtoCCD[1][10] = -2.346600e-11; Coef_SKYtoCCD[1][11] = -3.259480e-14; Coef_SKYtoCCD[1][12] = -2.761060e-19; Coef_SKYtoCCD[1][13] = 1.463720e-22; Coef_SKYtoCCD[1][14] = 3.854760e-26; Coef_SKYtoCCD[1][15] = 7.223990e-32; Coef_SKYtoCCD[1][16] = -5.812490e-35; Coef_SKYtoCCD[1][17] = 5.687010e-40; Coef_SKYtoCCD[1][18] = 1.532030e-44; Coef_SKYtoCCD[1][19] = 8.703940e-10; Coef_SKYtoCCD[1][20] = 1.054540e-10; Coef_SKYtoCCD[1][21] = -9.321000e-19; Coef_SKYtoCCD[1][22] = 8.224240e-20; Coef_SKYtoCCD[1][23] = -1.128150e-26; Coef_SKYtoCCD[1][24] = 3.766630e-28; Coef_SKYtoCCD[1][25] = 1.278100e-35; Coef_SKYtoCCD[1][26] = 1.351140e-37; Coef_SKYtoCCD[1][27] = 4.668370e-18; Coef_SKYtoCCD[1][28] = 1.475400e-21; Coef_SKYtoCCD[1][29] = -1.190020e-25; Coef_SKYtoCCD[1][30] = -6.098920e-30; Coef_SKYtoCCD[1][31] = -1.289370e-33; Coef_SKYtoCCD[1][32] = -6.445060e-39; Coef_SKYtoCCD[1][33] = 1.181950e-42; Coef_SKYtoCCD[1][34] = 1.731680e-19; Coef_SKYtoCCD[1][35] = 4.115360e-20; Coef_SKYtoCCD[1][36] = -6.715660e-27; Coef_SKYtoCCD[1][37] = 3.776780e-28; Coef_SKYtoCCD[1][38] = 1.854690e-35; Coef_SKYtoCCD[1][39] = 1.962100e-37; Coef_SKYtoCCD[1][40] = -1.133550e-25; Coef_SKYtoCCD[1][41] = -1.475060e-29; Coef_SKYtoCCD[1][42] = 3.046520e-33; Coef_SKYtoCCD[1][43] = 4.900330e-38; Coef_SKYtoCCD[1][44] = 7.029810e-42; Coef_SKYtoCCD[1][45] = -4.989570e-28; Coef_SKYtoCCD[1][46] = 1.252840e-28; Coef_SKYtoCCD[1][47] = 9.766060e-36; Coef_SKYtoCCD[1][48] = 1.319700e-37; Coef_SKYtoCCD[1][49] = 7.795260e-34; Coef_SKYtoCCD[1][50] = 3.627110e-38; Coef_SKYtoCCD[1][51] = -1.496080e-41; Coef_SKYtoCCD[1][52] = 1.735960e-36; Coef_SKYtoCCD[1][53] = 3.431480e-38; Coef_SKYtoCCD[1][54] = -1.476640e-42; }else if(EL==65){ Coef_SKYtoCCD[0][ 0]= 1.287590e-05; Coef_SKYtoCCD[0][ 1]= 1.716000e-09; Coef_SKYtoCCD[0][ 2]= 8.654280e-15; Coef_SKYtoCCD[0][ 3]= -2.031180e-17; Coef_SKYtoCCD[0][ 4]= -6.860970e-23; Coef_SKYtoCCD[0][ 5]= 2.840890e-26; Coef_SKYtoCCD[0][ 6]= -5.236490e-31; Coef_SKYtoCCD[0][ 7]= 1.170680e-34; Coef_SKYtoCCD[0][ 8]= 1.333130e-39; Coef_SKYtoCCD[0][ 9]= -2.196560e-43; Coef_SKYtoCCD[0][10] = 1.000000e+00; Coef_SKYtoCCD[0][11] = -3.487610e-10; Coef_SKYtoCCD[0][12] = 1.054520e-10; Coef_SKYtoCCD[0][13] = -1.091830e-18; Coef_SKYtoCCD[0][14] = 4.117000e-20; Coef_SKYtoCCD[0][15] = -3.635590e-27; Coef_SKYtoCCD[0][16] = 1.252840e-28; Coef_SKYtoCCD[0][17] = 9.538930e-37; Coef_SKYtoCCD[0][18] = 3.420940e-38; Coef_SKYtoCCD[0][19] = -2.143430e-12; Coef_SKYtoCCD[0][20] = -2.761430e-16; Coef_SKYtoCCD[0][21] = -5.050540e-22; Coef_SKYtoCCD[0][22] = 3.112080e-24; Coef_SKYtoCCD[0][23] = 1.632460e-29; Coef_SKYtoCCD[0][24] = -6.202270e-33; Coef_SKYtoCCD[0][25] = -5.767770e-39; Coef_SKYtoCCD[0][26] = -2.174520e-43; Coef_SKYtoCCD[0][27] = 1.053810e-10; Coef_SKYtoCCD[0][28] = -1.042720e-18; Coef_SKYtoCCD[0][29] = 8.237150e-20; Coef_SKYtoCCD[0][30] = -7.247710e-27; Coef_SKYtoCCD[0][31] = 3.768050e-28; Coef_SKYtoCCD[0][32] = 3.516070e-36; Coef_SKYtoCCD[0][33] = 1.336020e-37; Coef_SKYtoCCD[0][34] = 5.077040e-20; Coef_SKYtoCCD[0][35] = 6.237910e-24; Coef_SKYtoCCD[0][36] = -1.618680e-29; Coef_SKYtoCCD[0][37] = -5.425180e-32; Coef_SKYtoCCD[0][38] = -1.595330e-37; Coef_SKYtoCCD[0][39] = 7.494270e-41; Coef_SKYtoCCD[0][40] = 4.234700e-20; Coef_SKYtoCCD[0][41] = -4.381310e-27; Coef_SKYtoCCD[0][42] = 3.756820e-28; Coef_SKYtoCCD[0][43] = 2.662960e-36; Coef_SKYtoCCD[0][44] = 1.995410e-37; Coef_SKYtoCCD[0][45] = -3.527540e-28; Coef_SKYtoCCD[0][46] = -4.262040e-32; Coef_SKYtoCCD[0][47] = 1.775420e-37; Coef_SKYtoCCD[0][48] = 1.974890e-40; Coef_SKYtoCCD[0][49] = 1.184880e-28; Coef_SKYtoCCD[0][50] = 3.654150e-36; Coef_SKYtoCCD[0][51] = 1.371840e-37; Coef_SKYtoCCD[0][52] = 6.968190e-37; Coef_SKYtoCCD[0][53] = 9.008560e-41; Coef_SKYtoCCD[0][54] = 4.694860e-38; Coef_SKYtoCCD[1][ 0]= 1.378710e-04; Coef_SKYtoCCD[1][ 1]= 9.999990e-01; Coef_SKYtoCCD[1][ 2]= 3.420050e-10; Coef_SKYtoCCD[1][ 3]= 1.053850e-10; Coef_SKYtoCCD[1][ 4]= -6.955870e-19; Coef_SKYtoCCD[1][ 5]= 4.230310e-20; Coef_SKYtoCCD[1][ 6]= -5.778130e-27; Coef_SKYtoCCD[1][ 7]= 1.186880e-28; Coef_SKYtoCCD[1][ 8]= 5.847670e-36; Coef_SKYtoCCD[1][ 9]= 4.664530e-38; Coef_SKYtoCCD[1][10] = -1.272810e-11; Coef_SKYtoCCD[1][11] = -2.153490e-14; Coef_SKYtoCCD[1][12] = 5.757840e-18; Coef_SKYtoCCD[1][13] = -3.969060e-23; Coef_SKYtoCCD[1][14] = 5.037810e-27; Coef_SKYtoCCD[1][15] = 8.134920e-31; Coef_SKYtoCCD[1][16] = -2.061970e-35; Coef_SKYtoCCD[1][17] = 3.801440e-42; Coef_SKYtoCCD[1][18] = -1.911710e-44; Coef_SKYtoCCD[1][19] = 7.075050e-10; Coef_SKYtoCCD[1][20] = 1.054540e-10; Coef_SKYtoCCD[1][21] = -8.562580e-19; Coef_SKYtoCCD[1][22] = 8.226280e-20; Coef_SKYtoCCD[1][23] = -8.239680e-27; Coef_SKYtoCCD[1][24] = 3.764250e-28; Coef_SKYtoCCD[1][25] = 8.013040e-36; Coef_SKYtoCCD[1][26] = 1.356110e-37; Coef_SKYtoCCD[1][27] = 9.821610e-19; Coef_SKYtoCCD[1][28] = 1.125040e-21; Coef_SKYtoCCD[1][29] = -3.751690e-25; Coef_SKYtoCCD[1][30] = -2.122350e-30; Coef_SKYtoCCD[1][31] = -7.195950e-35; Coef_SKYtoCCD[1][32] = -1.760320e-38; Coef_SKYtoCCD[1][33] = 6.781000e-43; Coef_SKYtoCCD[1][34] = 1.241890e-19; Coef_SKYtoCCD[1][35] = 4.116410e-20; Coef_SKYtoCCD[1][36] = -4.043340e-27; Coef_SKYtoCCD[1][37] = 3.774590e-28; Coef_SKYtoCCD[1][38] = 8.346880e-36; Coef_SKYtoCCD[1][39] = 1.977220e-37; Coef_SKYtoCCD[1][40] = -1.371850e-26; Coef_SKYtoCCD[1][41] = -1.177730e-29; Coef_SKYtoCCD[1][42] = 5.335860e-33; Coef_SKYtoCCD[1][43] = 3.784010e-38; Coef_SKYtoCCD[1][44] = -8.256820e-43; Coef_SKYtoCCD[1][45] = -3.498750e-28; Coef_SKYtoCCD[1][46] = 1.252190e-28; Coef_SKYtoCCD[1][47] = 4.518410e-36; Coef_SKYtoCCD[1][48] = 1.323300e-37; Coef_SKYtoCCD[1][49] = 3.608880e-35; Coef_SKYtoCCD[1][50] = 2.727160e-38; Coef_SKYtoCCD[1][51] = -1.952150e-41; Coef_SKYtoCCD[1][52] = 1.374110e-36; Coef_SKYtoCCD[1][53] = 3.444790e-38; Coef_SKYtoCCD[1][54] = 6.904580e-44; }else if(EL==70){ Coef_SKYtoCCD[0][ 0]= 1.116690e-05; Coef_SKYtoCCD[0][ 1]= -7.302460e-11; Coef_SKYtoCCD[0][ 2]= 2.124330e-14; Coef_SKYtoCCD[0][ 3]= -8.372860e-18; Coef_SKYtoCCD[0][ 4]= 4.676000e-22; Coef_SKYtoCCD[0][ 5]= 4.187980e-26; Coef_SKYtoCCD[0][ 6]= -2.642090e-30; Coef_SKYtoCCD[0][ 7]= -7.316370e-35; Coef_SKYtoCCD[0][ 8]= 3.968630e-39; Coef_SKYtoCCD[0][ 9]= 1.565640e-43; Coef_SKYtoCCD[0][10] = 1.000000e+00; Coef_SKYtoCCD[0][11] = -2.803870e-10; Coef_SKYtoCCD[0][12] = 1.054530e-10; Coef_SKYtoCCD[0][13] = -7.543390e-19; Coef_SKYtoCCD[0][14] = 4.115840e-20; Coef_SKYtoCCD[0][15] = -3.404150e-27; Coef_SKYtoCCD[0][16] = 1.253100e-28; Coef_SKYtoCCD[0][17] = 1.472520e-36; Coef_SKYtoCCD[0][18] = 3.426280e-38; Coef_SKYtoCCD[0][19] = -1.810650e-12; Coef_SKYtoCCD[0][20] = 3.492320e-17; Coef_SKYtoCCD[0][21] = -6.843910e-21; Coef_SKYtoCCD[0][22] = 9.237540e-25; Coef_SKYtoCCD[0][23] = -9.540060e-30; Coef_SKYtoCCD[0][24] = -3.822050e-33; Coef_SKYtoCCD[0][25] = 5.469860e-38; Coef_SKYtoCCD[0][26] = -6.432320e-43; Coef_SKYtoCCD[0][27] = 1.053830e-10; Coef_SKYtoCCD[0][28] = -6.473030e-19; Coef_SKYtoCCD[0][29] = 8.235730e-20; Coef_SKYtoCCD[0][30] = -8.290570e-27; Coef_SKYtoCCD[0][31] = 3.771670e-28; Coef_SKYtoCCD[0][32] = 1.116450e-35; Coef_SKYtoCCD[0][33] = 1.322680e-37; Coef_SKYtoCCD[0][34] = 4.242310e-20; Coef_SKYtoCCD[0][35] = -1.189990e-24; Coef_SKYtoCCD[0][36] = 1.504750e-28; Coef_SKYtoCCD[0][37] = -1.133310e-32; Coef_SKYtoCCD[0][38] = -7.008840e-38; Coef_SKYtoCCD[0][39] = 4.839850e-41; Coef_SKYtoCCD[0][40] = 4.233780e-20; Coef_SKYtoCCD[0][41] = -4.624220e-27; Coef_SKYtoCCD[0][42] = 3.755240e-28; Coef_SKYtoCCD[0][43] = 1.047720e-35; Coef_SKYtoCCD[0][44] = 1.995840e-37; Coef_SKYtoCCD[0][45] = -2.936020e-28; Coef_SKYtoCCD[0][46] = 8.814120e-33; Coef_SKYtoCCD[0][47] = -6.429460e-37; Coef_SKYtoCCD[0][48] = 1.039650e-41; Coef_SKYtoCCD[0][49] = 1.185250e-28; Coef_SKYtoCCD[0][50] = 5.249650e-36; Coef_SKYtoCCD[0][51] = 1.377580e-37; Coef_SKYtoCCD[0][52] = 5.781910e-37; Coef_SKYtoCCD[0][53] = -1.013400e-41; Coef_SKYtoCCD[0][54] = 4.688840e-38; Coef_SKYtoCCD[1][ 0]= 7.379150e-05; Coef_SKYtoCCD[1][ 1]= 9.999990e-01; Coef_SKYtoCCD[1][ 2]= 2.740270e-10; Coef_SKYtoCCD[1][ 3]= 1.053870e-10; Coef_SKYtoCCD[1][ 4]= -6.926370e-19; Coef_SKYtoCCD[1][ 5]= 4.226760e-20; Coef_SKYtoCCD[1][ 6]= -3.602580e-27; Coef_SKYtoCCD[1][ 7]= 1.188900e-28; Coef_SKYtoCCD[1][ 8]= 2.837760e-36; Coef_SKYtoCCD[1][ 9]= 4.627380e-38; Coef_SKYtoCCD[1][10] = 2.350200e-11; Coef_SKYtoCCD[1][11] = 1.099250e-13; Coef_SKYtoCCD[1][12] = -1.591030e-17; Coef_SKYtoCCD[1][13] = -7.282490e-22; Coef_SKYtoCCD[1][14] = 9.597750e-26; Coef_SKYtoCCD[1][15] = 6.138160e-31; Coef_SKYtoCCD[1][16] = -3.989010e-35; Coef_SKYtoCCD[1][17] = 5.426530e-41; Coef_SKYtoCCD[1][18] = -8.656660e-44; Coef_SKYtoCCD[1][19] = 5.567970e-10; Coef_SKYtoCCD[1][20] = 1.054550e-10; Coef_SKYtoCCD[1][21] = -6.158760e-19; Coef_SKYtoCCD[1][22] = 8.223660e-20; Coef_SKYtoCCD[1][23] = -6.131680e-27; Coef_SKYtoCCD[1][24] = 3.766270e-28; Coef_SKYtoCCD[1][25] = 4.418770e-36; Coef_SKYtoCCD[1][26] = 1.349690e-37; Coef_SKYtoCCD[1][27] = 7.727540e-18; Coef_SKYtoCCD[1][28] = -4.842960e-21; Coef_SKYtoCCD[1][29] = 6.374790e-25; Coef_SKYtoCCD[1][30] = 2.570880e-29; Coef_SKYtoCCD[1][31] = -3.882690e-33; Coef_SKYtoCCD[1][32] = -1.353680e-38; Coef_SKYtoCCD[1][33] = 1.860030e-42; Coef_SKYtoCCD[1][34] = 1.090370e-19; Coef_SKYtoCCD[1][35] = 4.114950e-20; Coef_SKYtoCCD[1][36] = -4.510860e-27; Coef_SKYtoCCD[1][37] = 3.776710e-28; Coef_SKYtoCCD[1][38] = 9.561770e-36; Coef_SKYtoCCD[1][39] = 1.973770e-37; Coef_SKYtoCCD[1][40] = -2.484750e-25; Coef_SKYtoCCD[1][41] = 4.860460e-29; Coef_SKYtoCCD[1][42] = -4.780790e-33; Coef_SKYtoCCD[1][43] = -1.519280e-37; Coef_SKYtoCCD[1][44] = 2.451970e-41; Coef_SKYtoCCD[1][45] = -3.095680e-28; Coef_SKYtoCCD[1][46] = 1.253010e-28; Coef_SKYtoCCD[1][47] = 7.588900e-36; Coef_SKYtoCCD[1][48] = 1.316320e-37; Coef_SKYtoCCD[1][49] = 1.943360e-33; Coef_SKYtoCCD[1][50] = -1.303960e-37; Coef_SKYtoCCD[1][51] = 5.403760e-42; Coef_SKYtoCCD[1][52] = 1.073980e-36; Coef_SKYtoCCD[1][53] = 3.431320e-38; Coef_SKYtoCCD[1][54] = -4.081070e-42; }else if(EL==75){ Coef_SKYtoCCD[0][ 0]= 4.542690e-06; Coef_SKYtoCCD[0][ 1]= 1.296800e-10; Coef_SKYtoCCD[0][ 2]= 2.211780e-13; Coef_SKYtoCCD[0][ 3]= -6.022910e-18; Coef_SKYtoCCD[0][ 4]= -4.738940e-22; Coef_SKYtoCCD[0][ 5]= 5.167470e-26; Coef_SKYtoCCD[0][ 6]= -1.416920e-30; Coef_SKYtoCCD[0][ 7]= -1.796020e-34; Coef_SKYtoCCD[0][ 8]= 2.495480e-39; Coef_SKYtoCCD[0][ 9]= 3.404300e-43; Coef_SKYtoCCD[0][10] = 1.000000e+00; Coef_SKYtoCCD[0][11] = -1.992900e-10; Coef_SKYtoCCD[0][12] = 1.054550e-10; Coef_SKYtoCCD[0][13] = -7.053790e-19; Coef_SKYtoCCD[0][14] = 4.113600e-20; Coef_SKYtoCCD[0][15] = -1.998890e-27; Coef_SKYtoCCD[0][16] = 1.254120e-28; Coef_SKYtoCCD[0][17] = 6.411820e-37; Coef_SKYtoCCD[0][18] = 3.408310e-38; Coef_SKYtoCCD[0][19] = -9.447260e-13; Coef_SKYtoCCD[0][20] = 3.963980e-18; Coef_SKYtoCCD[0][21] = -2.987850e-20; Coef_SKYtoCCD[0][22] = 4.327510e-25; Coef_SKYtoCCD[0][23] = 7.795610e-29; Coef_SKYtoCCD[0][24] = -2.726600e-33; Coef_SKYtoCCD[0][25] = 1.879840e-38; Coef_SKYtoCCD[0][26] = 1.131500e-43; Coef_SKYtoCCD[0][27] = 1.053850e-10; Coef_SKYtoCCD[0][28] = -8.569080e-19; Coef_SKYtoCCD[0][29] = 8.227350e-20; Coef_SKYtoCCD[0][30] = -1.948830e-27; Coef_SKYtoCCD[0][31] = 3.775440e-28; Coef_SKYtoCCD[0][32] = -2.887790e-36; Coef_SKYtoCCD[0][33] = 1.316990e-37; Coef_SKYtoCCD[0][34] = 2.612040e-20; Coef_SKYtoCCD[0][35] = -6.318480e-25; Coef_SKYtoCCD[0][36] = 4.778220e-28; Coef_SKYtoCCD[0][37] = -3.111430e-33; Coef_SKYtoCCD[0][38] = -9.045120e-37; Coef_SKYtoCCD[0][39] = 3.206950e-41; Coef_SKYtoCCD[0][40] = 4.230190e-20; Coef_SKYtoCCD[0][41] = -9.930950e-28; Coef_SKYtoCCD[0][42] = 3.762150e-28; Coef_SKYtoCCD[0][43] = -5.858310e-36; Coef_SKYtoCCD[0][44] = 1.981810e-37; Coef_SKYtoCCD[0][45] = -2.009800e-28; Coef_SKYtoCCD[0][46] = 8.325950e-33; Coef_SKYtoCCD[0][47] = -1.609580e-36; Coef_SKYtoCCD[0][48] = -1.482370e-41; Coef_SKYtoCCD[0][49] = 1.186840e-28; Coef_SKYtoCCD[0][50] = -8.108250e-37; Coef_SKYtoCCD[0][51] = 1.362140e-37; Coef_SKYtoCCD[0][52] = 4.180720e-37; Coef_SKYtoCCD[0][53] = -2.454560e-41; Coef_SKYtoCCD[0][54] = 4.666950e-38; Coef_SKYtoCCD[1][ 0]= 8.899000e-05; Coef_SKYtoCCD[1][ 1]= 1.000000e+00; Coef_SKYtoCCD[1][ 2]= 2.027010e-10; Coef_SKYtoCCD[1][ 3]= 1.053860e-10; Coef_SKYtoCCD[1][ 4]= -4.067880e-19; Coef_SKYtoCCD[1][ 5]= 4.228600e-20; Coef_SKYtoCCD[1][ 6]= -3.754940e-27; Coef_SKYtoCCD[1][ 7]= 1.187960e-28; Coef_SKYtoCCD[1][ 8]= 4.797460e-36; Coef_SKYtoCCD[1][ 9]= 4.642720e-38; Coef_SKYtoCCD[1][10] = 2.334020e-10; Coef_SKYtoCCD[1][11] = -2.574770e-14; Coef_SKYtoCCD[1][12] = -9.744220e-18; Coef_SKYtoCCD[1][13] = -1.508580e-23; Coef_SKYtoCCD[1][14] = 4.971630e-26; Coef_SKYtoCCD[1][15] = 8.632380e-31; Coef_SKYtoCCD[1][16] = -7.487640e-35; Coef_SKYtoCCD[1][17] = 3.833490e-41; Coef_SKYtoCCD[1][18] = 2.232670e-44; Coef_SKYtoCCD[1][19] = 4.171750e-10; Coef_SKYtoCCD[1][20] = 1.054540e-10; Coef_SKYtoCCD[1][21] = -6.046790e-19; Coef_SKYtoCCD[1][22] = 8.227860e-20; Coef_SKYtoCCD[1][23] = -3.126960e-27; Coef_SKYtoCCD[1][24] = 3.762750e-28; Coef_SKYtoCCD[1][25] = 1.171560e-38; Coef_SKYtoCCD[1][26] = 1.359200e-37; Coef_SKYtoCCD[1][27] = -1.179380e-17; Coef_SKYtoCCD[1][28] = 1.184970e-21; Coef_SKYtoCCD[1][29] = 4.390170e-25; Coef_SKYtoCCD[1][30] = -3.313780e-30; Coef_SKYtoCCD[1][31] = -1.682110e-33; Coef_SKYtoCCD[1][32] = -1.898110e-38; Coef_SKYtoCCD[1][33] = 1.530080e-42; Coef_SKYtoCCD[1][34] = 6.011380e-20; Coef_SKYtoCCD[1][35] = 4.115600e-20; Coef_SKYtoCCD[1][36] = -2.309640e-27; Coef_SKYtoCCD[1][37] = 3.774010e-28; Coef_SKYtoCCD[1][38] = 1.524900e-36; Coef_SKYtoCCD[1][39] = 1.981170e-37; Coef_SKYtoCCD[1][40] = 1.522440e-25; Coef_SKYtoCCD[1][41] = -1.039640e-29; Coef_SKYtoCCD[1][42] = -4.454080e-33; Coef_SKYtoCCD[1][43] = 4.759690e-38; Coef_SKYtoCCD[1][44] = 9.262920e-42; Coef_SKYtoCCD[1][45] = -1.276740e-28; Coef_SKYtoCCD[1][46] = 1.253000e-28; Coef_SKYtoCCD[1][47] = 3.366590e-36; Coef_SKYtoCCD[1][48] = 1.322750e-37; Coef_SKYtoCCD[1][49] = -6.981670e-34; Coef_SKYtoCCD[1][50] = 1.544400e-38; Coef_SKYtoCCD[1][51] = 1.205360e-41; Coef_SKYtoCCD[1][52] = 6.493160e-37; Coef_SKYtoCCD[1][53] = 3.425900e-38; Coef_SKYtoCCD[1][54] = 1.033080e-42; }else if(EL==80){ Coef_SKYtoCCD[0][ 0]= 9.543080e-06; Coef_SKYtoCCD[0][ 1]= -1.395010e-09; Coef_SKYtoCCD[0][ 2]= 8.543890e-14; Coef_SKYtoCCD[0][ 3]= 2.608860e-18; Coef_SKYtoCCD[0][ 4]= 8.024980e-23; Coef_SKYtoCCD[0][ 5]= 4.320860e-26; Coef_SKYtoCCD[0][ 6]= -2.010640e-30; Coef_SKYtoCCD[0][ 7]= -2.338080e-34; Coef_SKYtoCCD[0][ 8]= 3.620470e-39; Coef_SKYtoCCD[0][ 9]= 5.506870e-43; Coef_SKYtoCCD[0][10] = 1.000000e+00; Coef_SKYtoCCD[0][11] = -1.358610e-10; Coef_SKYtoCCD[0][12] = 1.054550e-10; Coef_SKYtoCCD[0][13] = -3.606230e-19; Coef_SKYtoCCD[0][14] = 4.115670e-20; Coef_SKYtoCCD[0][15] = -2.124670e-27; Coef_SKYtoCCD[0][16] = 1.252870e-28; Coef_SKYtoCCD[0][17] = 1.928900e-36; Coef_SKYtoCCD[0][18] = 3.427610e-38; Coef_SKYtoCCD[0][19] = -1.621430e-12; Coef_SKYtoCCD[0][20] = 2.616790e-16; Coef_SKYtoCCD[0][21] = -1.353820e-20; Coef_SKYtoCCD[0][22] = -9.407270e-25; Coef_SKYtoCCD[0][23] = 2.382130e-29; Coef_SKYtoCCD[0][24] = -6.446490e-34; Coef_SKYtoCCD[0][25] = 2.379580e-38; Coef_SKYtoCCD[0][26] = -3.889180e-42; Coef_SKYtoCCD[0][27] = 1.053850e-10; Coef_SKYtoCCD[0][28] = -5.854770e-19; Coef_SKYtoCCD[0][29] = 8.226380e-20; Coef_SKYtoCCD[0][30] = -3.073610e-27; Coef_SKYtoCCD[0][31] = 3.773060e-28; Coef_SKYtoCCD[0][32] = 4.190570e-36; Coef_SKYtoCCD[0][33] = 1.328180e-37; Coef_SKYtoCCD[0][34] = 3.929070e-20; Coef_SKYtoCCD[0][35] = -6.613150e-24; Coef_SKYtoCCD[0][36] = 2.344360e-28; Coef_SKYtoCCD[0][37] = 2.279940e-32; Coef_SKYtoCCD[0][38] = -3.340150e-37; Coef_SKYtoCCD[0][39] = 2.429100e-41; Coef_SKYtoCCD[0][40] = 4.230340e-20; Coef_SKYtoCCD[0][41] = -1.826040e-28; Coef_SKYtoCCD[0][42] = 3.765630e-28; Coef_SKYtoCCD[0][43] = 1.481870e-37; Coef_SKYtoCCD[0][44] = 1.977910e-37; Coef_SKYtoCCD[0][45] = -2.780130e-28; Coef_SKYtoCCD[0][46] = 4.742330e-32; Coef_SKYtoCCD[0][47] = -8.603880e-37; Coef_SKYtoCCD[0][48] = -1.358770e-40; Coef_SKYtoCCD[0][49] = 1.186600e-28; Coef_SKYtoCCD[0][50] = -1.905840e-36; Coef_SKYtoCCD[0][51] = 1.350600e-37; Coef_SKYtoCCD[0][52] = 5.541500e-37; Coef_SKYtoCCD[0][53] = -8.985200e-41; Coef_SKYtoCCD[0][54] = 4.673230e-38; Coef_SKYtoCCD[1][ 0]= 6.786500e-05; Coef_SKYtoCCD[1][ 1]= 1.000000e+00; Coef_SKYtoCCD[1][ 2]= 1.341210e-10; Coef_SKYtoCCD[1][ 3]= 1.053870e-10; Coef_SKYtoCCD[1][ 4]= -2.281850e-19; Coef_SKYtoCCD[1][ 5]= 4.228070e-20; Coef_SKYtoCCD[1][ 6]= -2.582380e-27; Coef_SKYtoCCD[1][ 7]= 1.187930e-28; Coef_SKYtoCCD[1][ 8]= 2.660890e-36; Coef_SKYtoCCD[1][ 9]= 4.648980e-38; Coef_SKYtoCCD[1][10] = -5.634830e-11; Coef_SKYtoCCD[1][11] = -4.317430e-14; Coef_SKYtoCCD[1][12] = -2.912850e-18; Coef_SKYtoCCD[1][13] = 7.201490e-24; Coef_SKYtoCCD[1][14] = -6.572650e-27; Coef_SKYtoCCD[1][15] = 1.276580e-30; Coef_SKYtoCCD[1][16] = -2.736510e-35; Coef_SKYtoCCD[1][17] = -3.540220e-40; Coef_SKYtoCCD[1][18] = 8.615830e-45; Coef_SKYtoCCD[1][19] = 2.808640e-10; Coef_SKYtoCCD[1][20] = 1.054570e-10; Coef_SKYtoCCD[1][21] = -3.975510e-19; Coef_SKYtoCCD[1][22] = 8.220810e-20; Coef_SKYtoCCD[1][23] = -3.657150e-27; Coef_SKYtoCCD[1][24] = 3.767530e-28; Coef_SKYtoCCD[1][25] = 6.442190e-36; Coef_SKYtoCCD[1][26] = 1.349700e-37; Coef_SKYtoCCD[1][27] = 1.977620e-18; Coef_SKYtoCCD[1][28] = 2.135840e-21; Coef_SKYtoCCD[1][29] = 2.197660e-25; Coef_SKYtoCCD[1][30] = -6.017450e-30; Coef_SKYtoCCD[1][31] = 4.447560e-34; Coef_SKYtoCCD[1][32] = -2.457150e-38; Coef_SKYtoCCD[1][33] = 5.869370e-43; Coef_SKYtoCCD[1][34] = 3.360340e-20; Coef_SKYtoCCD[1][35] = 4.112860e-20; Coef_SKYtoCCD[1][36] = -7.746830e-28; Coef_SKYtoCCD[1][37] = 3.779510e-28; Coef_SKYtoCCD[1][38] = 2.713290e-36; Coef_SKYtoCCD[1][39] = 1.966710e-37; Coef_SKYtoCCD[1][40] = -1.020990e-26; Coef_SKYtoCCD[1][41] = -2.150590e-29; Coef_SKYtoCCD[1][42] = -3.688840e-33; Coef_SKYtoCCD[1][43] = 7.308510e-38; Coef_SKYtoCCD[1][44] = -4.296550e-42; Coef_SKYtoCCD[1][45] = -5.792140e-29; Coef_SKYtoCCD[1][46] = 1.254020e-28; Coef_SKYtoCCD[1][47] = -6.703630e-37; Coef_SKYtoCCD[1][48] = 1.310070e-37; Coef_SKYtoCCD[1][49] = -2.399040e-35; Coef_SKYtoCCD[1][50] = 4.880140e-38; Coef_SKYtoCCD[1][51] = 1.613080e-41; Coef_SKYtoCCD[1][52] = 4.321320e-37; Coef_SKYtoCCD[1][53] = 3.415490e-38; Coef_SKYtoCCD[1][54] = 6.281920e-44; }else if(EL==85){ Coef_SKYtoCCD[0][ 0]= 1.444580e-05; Coef_SKYtoCCD[0][ 1]= 3.903490e-10; Coef_SKYtoCCD[0][ 2]= -2.973690e-14; Coef_SKYtoCCD[0][ 3]= 1.322760e-17; Coef_SKYtoCCD[0][ 4]= 3.720580e-22; Coef_SKYtoCCD[0][ 5]= -1.311190e-25; Coef_SKYtoCCD[0][ 6]= -1.841070e-30; Coef_SKYtoCCD[0][ 7]= 4.835220e-34; Coef_SKYtoCCD[0][ 8]= 4.575740e-39; Coef_SKYtoCCD[0][ 9]= -1.032790e-42; Coef_SKYtoCCD[0][10] = 1.000000e+00; Coef_SKYtoCCD[0][11] = -8.247410e-11; Coef_SKYtoCCD[0][12] = 1.054560e-10; Coef_SKYtoCCD[0][13] = -4.235350e-20; Coef_SKYtoCCD[0][14] = 4.114720e-20; Coef_SKYtoCCD[0][15] = -1.664180e-27; Coef_SKYtoCCD[0][16] = 1.253000e-28; Coef_SKYtoCCD[0][17] = 1.635100e-36; Coef_SKYtoCCD[0][18] = 3.432040e-38; Coef_SKYtoCCD[0][19] = -2.309520e-12; Coef_SKYtoCCD[0][20] = -1.360830e-16; Coef_SKYtoCCD[0][21] = 1.744350e-21; Coef_SKYtoCCD[0][22] = -7.527440e-25; Coef_SKYtoCCD[0][23] = -1.465580e-29; Coef_SKYtoCCD[0][24] = 6.547420e-33; Coef_SKYtoCCD[0][25] = -1.363660e-38; Coef_SKYtoCCD[0][26] = 3.972170e-42; Coef_SKYtoCCD[0][27] = 1.053870e-10; Coef_SKYtoCCD[0][28] = 8.761820e-20; Coef_SKYtoCCD[0][29] = 8.221920e-20; Coef_SKYtoCCD[0][30] = -5.444330e-27; Coef_SKYtoCCD[0][31] = 3.778820e-28; Coef_SKYtoCCD[0][32] = 1.069960e-35; Coef_SKYtoCCD[0][33] = 1.313470e-37; Coef_SKYtoCCD[0][34] = 5.299030e-20; Coef_SKYtoCCD[0][35] = 4.635810e-24; Coef_SKYtoCCD[0][36] = -9.143700e-30; Coef_SKYtoCCD[0][37] = 6.858510e-34; Coef_SKYtoCCD[0][38] = 2.004300e-37; Coef_SKYtoCCD[0][39] = -9.507720e-41; Coef_SKYtoCCD[0][40] = 4.227730e-20; Coef_SKYtoCCD[0][41] = -2.835680e-27; Coef_SKYtoCCD[0][42] = 3.767020e-28; Coef_SKYtoCCD[0][43] = 1.344020e-35; Coef_SKYtoCCD[0][44] = 1.962130e-37; Coef_SKYtoCCD[0][45] = -3.590410e-28; Coef_SKYtoCCD[0][46] = -4.145850e-32; Coef_SKYtoCCD[0][47] = -9.089000e-38; Coef_SKYtoCCD[0][48] = 8.653140e-41; Coef_SKYtoCCD[0][49] = 1.187970e-28; Coef_SKYtoCCD[0][50] = 4.166770e-36; Coef_SKYtoCCD[0][51] = 1.351910e-37; Coef_SKYtoCCD[0][52] = 6.981930e-37; Coef_SKYtoCCD[0][53] = 9.507550e-41; Coef_SKYtoCCD[0][54] = 4.649030e-38; Coef_SKYtoCCD[1][ 0]= -2.784370e-05; Coef_SKYtoCCD[1][ 1]= 1.000000e+00; Coef_SKYtoCCD[1][ 2]= 7.911240e-11; Coef_SKYtoCCD[1][ 3]= 1.053870e-10; Coef_SKYtoCCD[1][ 4]= -3.469650e-19; Coef_SKYtoCCD[1][ 5]= 4.228330e-20; Coef_SKYtoCCD[1][ 6]= 1.830780e-28; Coef_SKYtoCCD[1][ 7]= 1.187590e-28; Coef_SKYtoCCD[1][ 8]= -1.447870e-36; Coef_SKYtoCCD[1][ 9]= 4.655800e-38; Coef_SKYtoCCD[1][10] = -8.397340e-10; Coef_SKYtoCCD[1][11] = 4.668540e-15; Coef_SKYtoCCD[1][12] = 1.564390e-17; Coef_SKYtoCCD[1][13] = -3.760690e-23; Coef_SKYtoCCD[1][14] = -1.887270e-26; Coef_SKYtoCCD[1][15] = 4.609750e-32; Coef_SKYtoCCD[1][16] = -9.484210e-35; Coef_SKYtoCCD[1][17] = 1.916600e-40; Coef_SKYtoCCD[1][18] = -3.365610e-44; Coef_SKYtoCCD[1][19] = 1.490650e-10; Coef_SKYtoCCD[1][20] = 1.054560e-10; Coef_SKYtoCCD[1][21] = -4.532280e-19; Coef_SKYtoCCD[1][22] = 8.223480e-20; Coef_SKYtoCCD[1][23] = 1.566840e-27; Coef_SKYtoCCD[1][24] = 3.766190e-28; Coef_SKYtoCCD[1][25] = -8.045720e-36; Coef_SKYtoCCD[1][26] = 1.353450e-37; Coef_SKYtoCCD[1][27] = 5.060440e-17; Coef_SKYtoCCD[1][28] = -2.265930e-22; Coef_SKYtoCCD[1][29] = -8.743410e-25; Coef_SKYtoCCD[1][30] = 1.274370e-30; Coef_SKYtoCCD[1][31] = 1.432920e-33; Coef_SKYtoCCD[1][32] = -2.642680e-39; Coef_SKYtoCCD[1][33] = 2.632260e-42; Coef_SKYtoCCD[1][34] = 1.260510e-20; Coef_SKYtoCCD[1][35] = 4.113290e-20; Coef_SKYtoCCD[1][36] = 1.193110e-27; Coef_SKYtoCCD[1][37] = 3.777820e-28; Coef_SKYtoCCD[1][38] = -8.114130e-36; Coef_SKYtoCCD[1][39] = 1.963820e-37; Coef_SKYtoCCD[1][40] = -7.685660e-25; Coef_SKYtoCCD[1][41] = 2.601890e-30; Coef_SKYtoCCD[1][42] = 1.048630e-32; Coef_SKYtoCCD[1][43] = -5.041290e-39; Coef_SKYtoCCD[1][44] = -1.545060e-41; Coef_SKYtoCCD[1][45] = -1.007180e-28; Coef_SKYtoCCD[1][46] = 1.254120e-28; Coef_SKYtoCCD[1][47] = -3.371640e-36; Coef_SKYtoCCD[1][48] = 1.315320e-37; Coef_SKYtoCCD[1][49] = 4.083310e-33; Coef_SKYtoCCD[1][50] = -8.714590e-39; Coef_SKYtoCCD[1][51] = -3.104500e-41; Coef_SKYtoCCD[1][52] = 4.655860e-37; Coef_SKYtoCCD[1][53] = 3.408210e-38; Coef_SKYtoCCD[1][54] = -6.898480e-42; }else{ cout << "EL must be 30 or 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85" << endl; ERROR=1; } } <file_sep>/example/example2.py #!/usr/bin/env python import hsc.meas.match.distest as distest elevation = 30.0 x_dist = 8000.0 # pix from the fov center y_dist = 15000.0 # pix from the fov center x_undist, y_undist = distest.getUndistortedPosition(x_dist, y_dist, elevation) print "distorted position: ( %lf , %lf )" % (x_dist, y_dist) print "==>" print "undistorted position: ( %lf , %lf )" % (x_undist, y_undist) print "" print " ---- " x_dist, y_dist = distest.getDistortedPosition(x_undist, y_undist, elevation) x_dist_iter, y_dist_iter = distest.getDistortedPositionIterative(x_undist, y_undist, elevation) print "" print "undistorted position: ( %lf , %lf )" % (x_undist, y_undist) print "==>" print "distorted position: ( %lf , %lf )" % (x_dist, y_dist) print "distorted(iter) pos: ( %lf , %lf )" % (x_dist_iter, y_dist_iter) <file_sep>/example/example.py #!/usr/bin/env python import hsc.meas.match.distest as distest elevation = 30.0 x_undist = 10000.0 # pix from the fov center y_undist = 10000.0 # pix from the fov center x_dist, y_dist = distest.getDistortedPosition(x_undist, y_undist, elevation) print "undistored position: ( %lf , %lf )" % (x_undist, y_undist) print "==>" print "distored position: ( %lf , %lf )" % (x_dist, y_dist) print " ---- " x_undist, y_undist = distest.getUndistortedPosition(x_dist, y_dist, elevation) print "distored position: ( %lf , %lf )" % (x_dist, y_dist) print "==>" print "undistored position: ( %lf , %lf )" % (x_undist, y_undist) <file_sep>/example/distEst2.py #!/usr/bin/env python import math import hsc.meas.match.distest as distest def main(): CRPIX=distest.MAKE_Vdouble(); CRVAL=distest.MAKE_Vdouble(); XY=distest.MAKE_Vdouble(); XY_GLOBL=distest.MAKE_VVdouble(); XY_RADEC=distest.MAKE_VVdouble(); CRPIX[0]= 0.0 CRPIX[1]= 0.0 CRVAL[0]= 0.0 CRVAL[1]= 0.0 for x in range(-20,21): for y in range(-20,21): if(math.hypot(1000*x,1000*y)<17501): XY=[1000*x,1000*y] XY_GLOBL.append(XY) for x in range(-20,21): for y in range(-20,21): if(math.hypot(0.05*x,0.05*y)<0.751): XY=[0.05*x,0.05*y] XY_RADEC.append(XY) DIST_RADEC=distest.CALC_RADEC(CRVAL,CRPIX,XY_GLOBL) DIST_GLOBL=distest.CALC_GLOBL(CRVAL,CRPIX,XY_GLOBL) DIST_RADEC_SIM=distest.CALC_RADEC_SIM(CRVAL,CRPIX,XY_GLOBL) DIST_GLOBL_SIM=distest.CALC_GLOBL_SIM(CRVAL,CRPIX,XY_GLOBL) for i in range(len(XY_GLOBL)): print XY_GLOBL[i][0],XY_GLOBL[i][1],DIST_RADEC[i][0],DIST_RADEC[i][1],DIST_RADEC_SIM[i][0],DIST_RADEC_SIM[i][1] # for i in range(len(XY_GLOBL)): # print XY_GLOBL[i][0],XY_GLOBL[i][1],DIST_GLOBL[i][0],DIST_GLOBL[i][1],DIST_GLOBL_SIM[i][0],DIST_GLOBL_SIM[i][1] if __name__ == '__main__': main() <file_sep>/include/hsc/meas/match/distest.h #ifndef DISTEST_H #define DISTEST_H const int NSAMPLE_EL = 12; // original 12 //extern "C" { namespace hsc { namespace meas { namespace match { void getDistortedPosition(float x_undist, float y_undist, float* x_dist, float* y_dist, float elevation); void getDistortedPositionIterative(float x_undist, float y_undist, float* x_dist, float* y_dist, float elevation); void getUndistortedPosition(float x_dist, float y_dist, float* x_undist, float* y_undist, float elevation); void getDistortedPosition_HSCSIM(float x_undist, float y_undist, float* x_dist, float* y_dist, float elevation); void getDistortedPositionIterative_HSCSIM(float x_undist, float y_undist, float* x_dist, float* y_dist, float elevation); void getUndistortedPosition_HSCSIM(float x_dist, float y_dist, float* x_undist, float* y_undist, float elevation); } } } //} #endif <file_sep>/tests/invariant.py #!/usr/bin/env python # # LSST Data Management System # Copyright 2008, 2009, 2010 LSST Corporation. # # This product includes software developed by the # LSST Project (http://www.lsst.org/). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the LSST License Statement and # the GNU General Public License along with this program. If not, # see <http://www.lsstcorp.org/LegalNotices/>. # import unittest import lsst.pex.exceptions import lsst.utils.tests as utilsTests import math import hsc.meas.match.distest as distest TOLERANCE = 0.01 # Tolerance for difference after reversing, pixels class InvarianceTestCase(unittest.TestCase): """A test for invariance in the forward+backward transformation""" def testInvariance(self): # These are the centers of the CCDs for x0, y0 in [(-9514.66, -95.9925), (7466.67, -4347.66), (-7391.99, -95.9925), (5344.01, -4347.66), (-5269.33, -95.9925), (3221.34, -4347.66), (-3146.66, -95.9925), (1098.67, -4347.66), (-1023.99, -95.9925), (-1023.99, -4347.66), (1098.67, -95.9925), (-3146.66, -4347.66), (3221.34, -95.9925), (-5269.33, -4347.66), (5344.01, -95.9925), (-7391.99, -4347.66), (7466.67, -95.9925), (-9514.66, -4347.66), (9589.34, -95.9925), (-11637.3, -4347.66), (11712.0, -95.9925), (-13760.0, -4347.66), (13834.7, -95.9925), (-15882.7, -4347.66), (-9514.66, 4372.34), (7466.67, -8815.99), (-7391.99, 4372.34), (5344.01, -8815.99), (-5269.33, 4372.34), (3221.34, -8815.99), (-3146.66, 4372.34), (1098.67, -8815.99), (-1023.99, 4372.34), (-1023.99, -8815.99), (1098.67, 4372.34), (-3146.66, -8815.99), (3221.34, 4372.34), (-5269.33, -8815.99), (5344.01, 4372.34), (-7391.99, -8815.99), (7466.67, 4372.34), (-9514.66, -8815.99), (9589.34, 4372.34), (-11637.3, -8815.99), (11712.0, 4372.34), (-13760.0, -8815.99), (13834.7, 4372.34), (-15882.7, -8815.99), (-9514.66, 8840.67), (7466.67, -13284.3), (-7391.99, 8840.67), (5344.01, -13284.3), (-5269.33, 8840.67), (3221.34, -13284.3), (-3146.66, 8840.67), (1098.67, -13284.3), (-1023.99, 8840.67), (-1023.99, -13284.3), (1098.67, 8840.67), (-3146.66, -13284.3), (3221.34, 8840.67), (-5269.33, -13284.3), (5344.01, 8840.67), (-7391.99, -13284.3), (7466.67, 8840.67), (-9514.66, -13284.3), (9589.34, 8840.67), (-11637.3, -13284.3), (11712.0, 8840.67), (-13760.0, -13284.3), (-7391.99, 13309.0), (5344.01, -17752.7), (-5269.33, 13309.0), (3221.34, -17752.7), (-3146.66, 13309.0), (1098.67, -17752.7), (-1023.99, 13309.0), (-1023.99, -17752.7), (1098.67, 13309.0), (-3146.66, -17752.7), (3221.34, 13309.0), (-5269.33, -17752.7), (5344.01, 13309.0), (-7391.99, -17752.7), (9589.34, -4564.33), (-11637.3, 120.674), (11712.0, -4564.33), (-13760.0, 120.674), (13834.7, -4564.33), (-15882.7, 120.674), (9589.34, -9032.66), (-11637.3, 4589.01), (11712.0, -9032.66), (-13760.0, 4589.01), (13834.7, -9032.66), (-15882.7, 4589.01), (9589.34, -13501.0), (-11637.3, 9057.34), (11712.0, -13501.0), (-13760.0, 9057.34), (-7467.7, 13309.0), (11642.67, 13309.0), (11642.67, -15575.7), (-7467.7, -15575.7)]: for elev in [30, 45, 60, 85]: xDist, yDist = distest.getDistortedPosition(x0, y0, elev) xDist_iter, yDist_iter = distest.getDistortedPositionIterative(x0, y0, elev) x1, y1 = distest.getUndistortedPosition(xDist, yDist, elev) x1_iter, y1_iter = distest.getUndistortedPosition(xDist_iter, yDist_iter, elev) diff = math.hypot(x1 - x0, y1 - y0) diff_iter = math.hypot(x1_iter - x0, y1_iter - y0) self.assertLess(diff_iter, TOLERANCE, "Not invariant at elev %f: %f,%f --> %f,%f --> %f,%f (%f)" % \ (elev, x0, y0, xDist_iter, yDist_iter, x1_iter, y1_iter, diff_iter)) # self.assertLess(diff, TOLERANCE, "Not invariant at elev %f: %f,%f --> %f,%f --> %f,%f (%f)" % \ # (elev, x0, y0, xDist, yDist, x1, y1, diff)) def testAltitudeRange(self): for elev in (30 - 1e-4, 85 + 1e-4): utilsTests.assertRaisesLsstCpp(self, lsst.pex.exceptions.InvalidParameterException, distest.getDistortedPositionIterative, 0, 0, elev) def testDistortHangs(self): """Test hanging forever on some positions""" elev = 45 import numpy as np for x, y in [ #(7887.9, -15559), # I thought this failed, but it's passing now (np.nan, np.nan), ]: utilsTests.assertRaisesLsstCpp(self, lsst.pex.exceptions.OutOfRangeException, distest.getDistortedPositionIterative, x, y, elev) def suite(): utilsTests.init() suites = [] suites += unittest.makeSuite(InvarianceTestCase) suites += unittest.makeSuite(utilsTests.MemoryTestCase) return unittest.TestSuite(suites) def run(shouldExit = False): utilsTests.run(suite(), shouldExit) if __name__ == '__main__': run(True) <file_sep>/SConstruct # -*- python -*- from lsst.sconsUtils import scripts scripts.BasicSConstruct("distEst", versionModuleName = "python/hsc/meas/match/version.py", ) <file_sep>/src/distest.cc #include<cstdio> #include<cstdlib> #include<cmath> #include "hsc/meas/match/distest.h" #include "hsc/meas/match/distest_utils2.h" /*------------------------------------------------------------------------------*/ void hsc::meas::match::getDistortedPosition(float x_undist, float y_undist, float* x_dist, float* y_dist, float elevation) /*------------------------------------------------------------------------------*/ { /* global variables for distortion coefficients by Okura-kun */ double ***Coef; try { int ni = 4; int nj = NSAMPLE_EL+1; int nk = (XYOrder+1)*(XYOrder+1); Coef = new double**[ni]; for(int i=0; i<ni; i++) { Coef[i] = new double*[nj]; for(int j=0; j<nj; j++) { Coef[i][j]= new double[nk]; for(int k=0; k<nk; k++) { Coef[i][j][k] = 0.0; } } } } catch (std::bad_alloc &) { std::cout << "memory allocation error - Coeff" << std::endl; return; } F_SETCOEF(Coef); // Getting transformed positions convUndist2DistPos(x_undist, y_undist, x_dist, y_dist, elevation, Coef); // Deleting Coef try { int ni = 4; int nj = NSAMPLE_EL+1; for(int i=0; i<ni; i++) { for(int j=0; j<nj; j++) { delete [] Coef[i][j]; } delete [] Coef[i]; } delete [] Coef; } catch( ... ) { std::cout << "something weired happend in delete Coeff" << std::endl; return; } return; } /*------------------------------------------------------------------------------*/ void hsc::meas::match::getDistortedPositionIterative(float x_undist, float y_undist, float* x_dist, float* y_dist, float elevation) /*------------------------------------------------------------------------------*/ { /* global variables for distortion coefficients by Okura-kun */ double ***Coef; try { int ni = 4; int nj = NSAMPLE_EL+1; int nk = (XYOrder+1)*(XYOrder+1); Coef = new double**[ni]; for(int i=0; i<ni; i++) { Coef[i] = new double*[nj]; for(int j=0; j<nj; j++) { Coef[i][j]= new double[nk]; for(int k=0; k<nk; k++) { Coef[i][j][k] = 0.0; } } } } catch (std::bad_alloc &) { std::cout << "memory allocation error - Coeff" << std::endl; return; } F_SETCOEF(Coef); // Getting transformed positions convUndist2DistPosIterative(x_undist, y_undist, x_dist, y_dist, elevation, Coef); // Deleting Coef try { int ni = 4; int nj = NSAMPLE_EL+1; for(int i=0; i<ni; i++) { for(int j=0; j<nj; j++) { delete [] Coef[i][j]; } delete [] Coef[i]; } delete [] Coef; } catch( ... ) { std::cout << "something weired happend in delete Coeff" << std::endl; return; } return; } /*------------------------------------------------------------------------------*/ void hsc::meas::match::getUndistortedPosition(float x_dist, float y_dist, float* x_undist, float* y_undist, float elevation) /*------------------------------------------------------------------------------*/ { /* global variables for distortion coefficients by Okura-kun */ double ***Coef; try { int ni = 4; int nj = NSAMPLE_EL+1; int nk = (XYOrder+1)*(XYOrder+1); Coef = new double**[ni]; for(int i=0; i<ni; i++) { Coef[i] = new double*[nj]; for(int j=0; j<nj; j++) { Coef[i][j]= new double[nk]; for(int k=0; k<nk; k++) { Coef[i][j][k] = 0.0; } } } } catch (std::bad_alloc &) { std::cout << "memory allocation error - Coeff" << std::endl; return; } F_SETCOEF(Coef); // Getting transformed positions convDist2UndistPos(x_dist, y_dist, x_undist, y_undist, elevation, Coef); // Deleting Coef try { int ni = 4; int nj = NSAMPLE_EL+1; for(int i=0; i<ni; i++) { for(int j=0; j<nj; j++) { delete [] Coef[i][j]; } delete [] Coef[i]; } delete [] Coef; } catch( ... ) { std::cout << "something weired happend in delete Coeff" << std::endl; return; } return; } <file_sep>/example/DISTforSIM.h //-------------------------------------------------- //Calculating Distortion for HSC Simulation // //Last modification : 2012/02/25 //-------------------------------------------------- #include<iostream> #include<cmath> class CL_DISTforSIM{ private: public: int EL; int ORDER; int DNUM; int ERROR; double **REAL_X_CCD;//[DNUM[X,Y,COUNT] double **REAL_X_SKY; double **PREDICT_X_CCD; double **PREDICT_X_SKY; double Coef_CCDtoSKY[2][55]; double Coef_SKYtoCCD[2][55]; void F_DSIM_CCDtoSKY(); void F_DSIM_SKYtoCCD(); void F_DSIM_NEWCCDtoSKY(); void F_DSIM_NEWSKYtoCCD(); void F_DSIM_DELCCDtoSKY(); void F_DSIM_DELSKYtoCCD(); void F_DSIM_GETCOEFCCDtoSKY(); void F_DSIM_GETCOEFSKYtoCCD(); }; <file_sep>/src/LeastSquares.cc //-------------------------------------------------- //Calculating Coefficients of Least Squares Fitting //Output coefficients of F_LS2 are //x^0y^0, x^0y^1, x^0y^2, x^0y^3, x^1y^0, x^1y^1, x^1y^2, x^2y^0, x^2y^1, x^3y^0 (in Order = 3) // //Last modification : 2010/11/08 by <NAME> //-------------------------------------------------- #include<cmath> void F_InvM(int MNUM,double **Min,double **Mout){ int i,j,k; double Mdi,**Mtemp,**Mtemp2,**I,**Itemp; Mtemp = new double*[MNUM]; Mtemp2 = new double*[MNUM]; I = new double*[MNUM]; Itemp = new double*[MNUM]; for(i=0;i<MNUM;i++){ Mtemp[i] = new double[MNUM]; Mtemp2[i] = new double[MNUM]; I[i] = new double[MNUM]; Itemp[i] = new double[MNUM]; } for(i=0;i<MNUM;i++) for(j=0;j<MNUM;j++){ Mtemp[i][j]=0; Mtemp2[i][j]=0; I[i][j]=0; Itemp[i][j]=0; } for(i=0;i<MNUM;i++) for(j=0;j<MNUM;j++){ Itemp[i][j]=0; if(i==j){ I[i][j]=1; }else{ I[i][j]=0; } } for(k=0;k<MNUM;k++){ Mdi=Min[k][k]; for(i=0;i<MNUM;i++){ Min[i][k]=Min[i][k]/Mdi; I[i][k]= I[i][k]/Mdi; } for(i=0;i<MNUM;i++) for(j=0;j<MNUM;j++) Mtemp[i][j]=Min[i][j]; for(j=0;j<MNUM;j++) if(j==k){ }else{ for(i=0;i<MNUM;i++){ Min[i][j]-=Mtemp[k][j]*Min[i][k]; I[i][j]-=Mtemp[k][j]* I[i][k]; } } } for(i=0;i<MNUM;i++) for(j=0;j<MNUM;j++) Mout[i][j]=I[i][j]; for(i=0;i<MNUM;i++){ delete [] Mtemp[i]; delete [] Mtemp2[i]; delete [] I[i]; delete [] Itemp[i]; } delete [] Mtemp; delete [] Mtemp2; delete [] I; delete [] Itemp; } void F_LS1(int dataNUM,int Order,double **data,double *Coef){ int i,j,NUM; double **XA,**XB,*Z; for(i=0;i<Order+1;i++) Coef[i]=0; XA = new double*[Order+1]; XB = new double*[Order+1]; Z = new double[Order+1]; for(i=0;i<Order+1;i++){ XA[i] = new double[Order+1]; XB[i] = new double[Order+1]; for(j=0;j<Order+1;j++){ XA[i][j]=XB[i][j]=Z[j]=0; }} for(NUM=0;NUM<dataNUM;NUM++) for(i=0;i<Order+1;i++){ for(j=0;j<Order+1;j++) XA[i][j]+=pow(data[NUM][0],i)*pow(data[NUM][0],j); Z[i]+=data[NUM][1]*pow(data[NUM][0],i); } F_InvM(Order+1,XA,XB); for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) Coef[i]+=XB[i][j]*Z[j]; for(i=0;i<Order+1;i++){ delete [] XA[i]; delete [] XB[i]; } delete [] XA; delete [] XB; delete [] Z; } void F_LS2(int dataNUM,int Order,double **data,double *Coef){ int i,j,k,l,ij,kl,NUM; double **XA,**XB,*Z; XA = new double*[(Order+1)*(Order+1)]; XB = new double*[(Order+1)*(Order+1)]; Z = new double[(Order+1)*(Order+1)]; for(i=0;i<(Order+1)*(Order+1);i++){ XA[i] = new double[(Order+1)*(Order+1)]; XB[i] = new double[(Order+1)*(Order+1)]; } for(i=0;i<(Order+1)*(Order+1);i++){ for(j=0;j<(Order+1)*(Order+1);j++) XA[i][j]=XB[i][j]=0; Z[i]=0; } ij=0; for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) if(i+j<Order+1){ Coef[ij]=0; ij++; } for(NUM=0;NUM<dataNUM;NUM++){ ij=0; for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) if(i+j<Order+1){ kl=0; for(k=0;k<Order+1;k++) for(l=0;l<Order+1;l++) if(k+l<Order+1){ XA[ij][kl]+=pow(data[NUM][0],i+k)*pow(data[NUM][1],j+l); kl+=1; } Z[ij]+=data[NUM][2]*pow(data[NUM][0],i)*pow(data[NUM][1],j); ij+=1; } } F_InvM((int)((Order+1)*(Order+2)*0.5+0.1),XA,XB); ij=0; for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) if(i+j<Order+1){ kl=0; for(k=0;k<Order+1;k++) for(l=0;l<Order+1;l++) if(k+l<Order+1){ Coef[ij]+=XB[ij][kl]*Z[kl]; kl+=1; } ij+=1; } for(i=0;i<(Order+1)*(Order+1);i++){ delete [] XA[i]; delete [] XB[i]; } delete [] XA; delete [] XB; delete [] Z; } void F_LS3(int dataNUM,int Order,double **data,double *Coef){ int i,j,k,l,m,n,ijk,lmn,NUM; double **XA,**XB,*Z; XA = new double*[(Order+1)*(Order+1)*(Order+1)]; XB = new double*[(Order+1)*(Order+1)*(Order+1)]; Z = new double[(Order+1)*(Order+1)*(Order+1)]; for(i=0;i<(Order+1)*(Order+1)*(Order+1);i++){ XA[i] = new double[(Order+1)*(Order+1)*(Order+1)]; XB[i] = new double[(Order+1)*(Order+1)*(Order+1)]; } for(i=0;i<(Order+1)*(Order+1)*(Order+1);i++){ for(j=0;j<(Order+1)*(Order+1)*(Order+1);j++) XA[i][j]=XB[i][j]=0; Z[i]=0; } ijk=0; for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) for(k=0;k<Order+1;k++) if(i+j+k<Order+1){ Coef[ijk]=0; ijk++; } for(NUM=0;NUM<dataNUM;NUM++){ ijk=0; for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) for(k=0;k<Order+1;k++) if(i+j+k<Order+1){ lmn=0; for(l=0;l<Order+1;l++) for(m=0;m<Order+1;m++) for(n=0;n<Order+1;n++) if(l+m+n<Order+1){ XA[ijk][lmn]+=pow(data[NUM][0],i+l)*pow(data[NUM][1],j+m)*pow(data[NUM][2],k+n); lmn+=1; } Z[ijk]+=data[NUM][3]*pow(data[NUM][0],i)*pow(data[NUM][1],j)*pow(data[NUM][2],k); ijk+=1; } } F_InvM((int)((Order+1)*(Order+2)*(Order+3)/2.0/3.0+0.1),XA,XB); ijk=0; for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) for(k=0;k<Order+1;k++) if(i+j+k<Order+1){ lmn=0; for(l=0;l<Order+1;l++) for(m=0;m<Order+1;m++) for(n=0;n<Order+1;n++) if(l+m+n<Order+1){ Coef[ijk]+=XB[ijk][lmn]*Z[lmn]; lmn+=1; } ijk+=1; } for(i=0;i<(Order+1)*(Order+1);i++){ delete [] XA[i]; delete [] XB[i]; } delete [] XA; delete [] XB; delete [] Z; } /* void F_LS2NN(int dataNUM,int Order,double data[][3],double Coef[]){ int i,j,k,l,NUM; double *XA,*XB,*Z; XA=(double *)calloc((Order+1)*(Order+1)*(Order+1)*(Order+1),sizeof(double)); XB=(double *)calloc((Order+1)*(Order+1)*(Order+1)*(Order+1),sizeof(double)); Z=(double *)calloc( (Order+1)*(Order+1),sizeof(double)); for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) Coef[i+j*(Order+1)]=0; for(NUM=0;NUM<dataNUM;NUM++) for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++){ for(k=0;k<Order+1;k++) for(l=0;l<Order+1;l++) XA[i+j*(Order+1)+(k+l*(Order+1))*(Order+1)*(Order+1)]+=pow(data[NUM][0],i+k)*pow(data[NUM][1],j+l); Z[i+j*(Order+1)]+=data[NUM][2]*pow(data[NUM][0],i)*pow(data[NUM][1],j); } F_InvM((Order+1)*(Order+1),XA,XB); for(i=0;i<Order+1;i++) for(j=0;j<Order+1;j++) for(k=0;k<Order+1;k++) for(l=0;l<Order+1;l++) Coef[i+j*(Order+1)]+=XB[i+j*(Order+1)+(k+l*(Order+1))*(Order+1)*(Order+1)]*Z[k+l*(Order+1)]; free(XA); free(XB); free( Z); } */ <file_sep>/include/hsc/meas/match/distest_utils2.h #ifndef DISTEST_UTILS2_H #define DISTEST_UTILS2_H #include<iostream> #include<cstdlib> #include<fstream> #include<cmath> const int XYOrder = 9; /* distortion coefficients by Okura-kun, to be accessible in any functions */ //extern double ****Coef; //int initDistEst(double ****Coef); //int deleteDistEst(double ****Coef); void convUndist2DistPos(float x_undist, float y_undist, float *x_dist, float *y_dist, float elevation, double ***Coef); void convUndist2DistPosIterative(float x_undist, float y_undist, float *x_dist, float *y_dist, float elevation, double ***Coef); void convDist2UndistPos(float x_dist, float y_dist, float *x_undist, float *y_undist, float elevation, double ***Coef); void F_SETCOEF(double ***Coef); void F_SETCOEF_HSCSIM(double ***Coef); //double F_CCDtoSKY_X(int, double, double ****, double, double); //double F_CCDtoSKY_Y(int, double, double ****, double, double); //double F_SKYtoCCD_X(int, double, double ****, double, double); //double F_SKYtoCCD_Y(int, double, double ****, double, double); //double F_CS(int CS,int ELOrder, double EL,double ****Coef, double X, double Y); void F_CS_CCD2SKY_XY(int ELOrder, double EL, double ***Coef, double X, double Y, double *X_out, double *Y_out); void F_CS_SKY2CCD_XY(int ELOrder, double EL, double ***Coef, double X, double Y, double *X_out, double *Y_out); ///void F_LS1(int ,int ,double **,double *); #endif <file_sep>/tests/pipette.py #!/usr/bin/env python # # LSST Data Management System # Copyright 2008, 2009, 2010 LSST Corporation. # # This product includes software developed by the # LSST Project (http://www.lsst.org/). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the LSST License Statement and # the GNU General Public License along with this program. If not, # see <http://www.lsstcorp.org/LegalNotices/>. # import eups import unittest import lsst.utils.tests as utilsTests import math import lsst.afw.cameraGeom as cameraGeom import lsst.afw.geom as afwGeom import lsst.obs.hscSim as hscSim import lsst.pipette.config as pipConfig import lsst.pipette.distortion as pipDist import hsc.meas.match.distest as distest import hsc.meas.match.hscDistortion as hscDist TOLERANCE = 0.1 # Tolerance for difference after reversing, pixels class PipetteTestCase(unittest.TestCase): """A test for Pipette's distortion in the forward+backward transformation""" def setUp(self): Eups = eups.Eups() assert Eups.isSetup("pipette"), "pipette is not setup" assert Eups.isSetup("obs_subaru"), "obs_subaru is not setup" mapper = hscSim.HscSimMapper() self.camera = mapper.camera self.config = pipConfig.Config() self.config['class'] = "hsc.meas.match.hscDistortion.HscDistortion" def tearDown(self): del self.camera del self.config def testInvariance(self): for raft in self.camera: raft = cameraGeom.cast_Raft(raft) for ccd in raft: ccd = cameraGeom.cast_Ccd(ccd) dist = pipDist.createDistortion(ccd, self.config) self.assertTrue(isinstance(dist, hscDist.HscDistortion)) size = ccd.getSize() height, width = size.getX(), size.getY() for x, y in ((0.0,0.0), (0.0, height), (width, 0.0), (width, height), (width/2.0,height/2.0)): forward = dist.actualToIdeal(afwGeom.PointD(x, y)) backward = dist.idealToActual(forward) diff = math.hypot(backward.getX() - x, backward.getY() - y) self.assertLess(diff, TOLERANCE, "Not invariant: %s %f,%f --> %s --> %s (%f > %f)" % \ (ccd.getId().getSerial(), x, y, forward, backward, diff, TOLERANCE)) def suite(): utilsTests.init() suites = [] suites += unittest.makeSuite(PipetteTestCase) suites += unittest.makeSuite(utilsTests.MemoryTestCase) return unittest.TestSuite(suites) def run(shouldExit = False): utilsTests.run(suite(), shouldExit) if __name__ == '__main__': run(True) <file_sep>/include/hsc/meas/match/distest2.h //-------------------------------------------------- //distest2.h // //Last Update 2013/03/15 //-------------------------------------------------- #ifndef DISTEST2_H #define DISTEST2_H #include<vector> #include<iostream> std::vector< double > MAKE_Vdouble(); std::vector< std::vector< double > > MAKE_VVdouble(); std::vector< std::vector< double > > CALC_RADEC(std::vector< double > CRVAL,std::vector< double > CRPIX,std::vector< std::vector< double > > POSITION); std::vector< std::vector< double > > CALC_GLOBL(std::vector< double > CRVAL,std::vector< double > CRPIX,std::vector< std::vector< double > > POSITION); std::vector< std::vector< double > > CALC_RADEC_SIM(std::vector< double > CRVAL,std::vector< double > CRPIX,std::vector< std::vector< double > > POSITION); std::vector< std::vector< double > > CALC_GLOBL_SIM(std::vector< double > CRVAL,std::vector< double > CRPIX,std::vector< std::vector< double > > POSITION); #endif <file_sep>/example/display.py #!/usr/bin/env python # # LSST Data Management System # Copyright 2008, 2009, 2010 LSST Corporation. # # This product includes software developed by the # LSST Project (http://www.lsst.org/). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the LSST License Statement and # the GNU General Public License along with this program. If not, # see <http://www.lsstcorp.org/LegalNotices/>. # import math import numpy import matplotlib.pyplot as plt import lsst.afw.cameraGeom as cameraGeom import lsst.afw.geom as afwGeom import lsst.obs.hscSim as hscSim import lsst.pipette.config as pipConfig import lsst.pipette.distortion as pipDist SAMPLE = 100 def main(camera, distortionConfig): fig = plt.figure(1) fig.clf() ax = fig.add_axes((0.1, 0.1, 0.8, 0.8)) # ax.set_autoscale_on(False) # ax.set_ybound(lower=-0.2, upper=0.2) # ax.set_xbound(lower=-17, upper=-7) ax.set_title('Distorted CCDs') for raft in camera: raft = cameraGeom.cast_Raft(raft) for ccd in raft: ccd = cameraGeom.cast_Ccd(ccd) size = ccd.getSize() width, height = 2048, 4096 dist = pipDist.createDistortion(ccd, distortionConfig) corners = ((0.0,0.0), (0.0, height), (width, height), (width, 0.0), (0.0, 0.0)) for (x0, y0), (x1, y1) in zip(corners[0:4],corners[1:5]): if x0 == x1 and y0 != y1: yList = numpy.linspace(y0, y1, num=SAMPLE) xList = [x0] * len(yList) elif y0 == y1 and x0 != x1: xList = numpy.linspace(x0, x1, num=SAMPLE) yList = [y0] * len(xList) else: raise RuntimeError("Should never get here") xDistort = []; yDistort = [] for x, y in zip(xList, yList): distorted = dist.actualToIdeal(afwGeom.Point2D(x, y)) xDistort.append(distorted.getX()) yDistort.append(distorted.getY()) ax.plot(xDistort, yDistort, 'k-') plt.show() if __name__ == '__main__': camera = hscSim.HscSimMapper().camera config = pipConfig.Config() config['class'] = "hsc.meas.match.hscDistortion.HscDistortion" main(camera, config) <file_sep>/include/hsc/meas/match/LeastSquares.h #ifndef LEAST_SQUARE_H #define LEAST_SQUARE_H //#if __cplusplus //extern "C" { //#endif void F_InvM(int MNUM,double **Min,double **Mout); void F_LS1(int dataNUM,int Order,double **data,double *Coef); void F_LS2(int dataNUM,int Order,double **data,double *Coef); void F_LS3(int dataNUM,int Order,double **data,double *Coef); //#if __cplusplus //} //#endif #endif
c4fe6a42f979af4740ac8cda953c19a5876f20b1
[ "C", "Python", "C++" ]
16
Python
HyperSuprime-Cam/distEst
eea7699e91c4db0e549384cfedcb3e7fc9fbd490
3baa3c37cd2a623ea8a56a2d7b4f7eed2a1202f1
refs/heads/master
<repo_name>BrahimOubbad/NestedGaph<file_sep>/app/src/main/java/com/bo/nestedgraph/ui/fragment/MainFragment.kt package com.bo.nestedgraph.ui.fragment import android.animation.ObjectAnimator import android.animation.ValueAnimator import android.graphics.PointF import android.os.Bundle import androidx.fragment.app.Fragment import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import android.view.animation.OvershootInterpolator import android.widget.Toast import androidx.core.view.isVisible import androidx.navigation.NavController import androidx.navigation.fragment.NavHostFragment import androidx.navigation.ui.AppBarConfiguration import androidx.navigation.ui.setupWithNavController import com.bo.nestedgraph.R import com.bo.nestedgraph.databinding.FragmentMainBinding import com.bo.nestedgraph.registerDraggableTouchListener class MainFragment : Fragment() { private var _binding: FragmentMainBinding? = null private val binding get() = _binding!! private lateinit var navController: NavController override fun onCreateView( inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle? ): View { _binding = FragmentMainBinding.inflate(inflater, container, false) return binding.root } override fun onViewCreated(view: View, savedInstanceState: Bundle?) { val navHostFragment = childFragmentManager.findFragmentById(R.id.nav_host_main_fragment) as NavHostFragment navController = navHostFragment.navController binding.bottomMainNav.setupWithNavController(navController) binding.ivToolBarBack.setOnClickListener { requireActivity().onBackPressed() } navController.addOnDestinationChangedListener { controller, destination, arguments -> binding.tvBarTitle.text = destination.label } binding.ivBigRobot.registerDraggableTouchListener( initialPosition = { PointF(binding.ivBigRobot.x, binding.ivBigRobot.y) }, positionListener = { x, y -> setPosition(x, y) }, onUp = { moveToEdges() }, showCloseButton = { binding.vClose.isVisible = it } ) binding.vClose.setOnClickListener { Toast.makeText(requireContext(), "Close", Toast.LENGTH_SHORT).show() // robotHomeSpace(binding.ivBigRobot, binding.ivBarSmallRobot) binding.ivBigRobot.animate() .x(binding.ivBarSmallRobot.x) .y(binding.ivBarSmallRobot.y) .setDuration(1000) .scaleX(0f) .scaleY(0f) .alpha(0f) .withEndAction { //to make sure that it arrives, //but not needed actually these two lines binding.ivBigRobot.x = binding.ivBarSmallRobot.x binding.ivBigRobot.y = binding.ivBarSmallRobot.y } .start() } } private fun robotHomeSpace(start: View, end: View) { ValueAnimator.ofFloat(start.x, end.x).apply { duration = 500 addUpdateListener { start.x = it.animatedValue as Float } start() } ValueAnimator.ofFloat(start.y, end.y).apply { duration = 500 addUpdateListener { start.y = it.animatedValue as Float } start() } ValueAnimator.ofFloat(1f, 0f).apply { duration = 500 startDelay = 250 addUpdateListener { start.scaleX = it.animatedValue as Float } start() } ValueAnimator.ofFloat(1f, 0f).apply { duration = 500 startDelay = 250 addUpdateListener { start.scaleY = it.animatedValue as Float } start() } } private fun setPosition(x: Float, y: Float) { val bottom = (binding.navHostMainFragment.y + binding.navHostMainFragment.height - binding.ivBigRobot.height) if (y >= bottom) { binding.ivBigRobot.y = bottom } else { binding.ivBigRobot.y = y } binding.ivBigRobot.x = x } private fun moveToEdges() { val top = binding.navHostMainFragment.y val bottom = (binding.navHostMainFragment.y + binding.navHostMainFragment.height - binding.ivBigRobot.height) if (binding.ivBigRobot.x < binding.navHostMainFragment.width / 2) { moveWithAnimationX(binding.ivBigRobot.x, 0f) } else { moveWithAnimationX( binding.ivBigRobot.x, (binding.navHostMainFragment.width - binding.ivBigRobot.width).toFloat() ) } if (binding.ivBigRobot.y < top) { moveWithAnimationY(binding.ivBigRobot.y, top) } else if (binding.ivBigRobot.y > bottom) { moveWithAnimationY(binding.ivBigRobot.y, bottom) } } private fun moveWithAnimationX(start: Float, end: Float) { ObjectAnimator.ofFloat(start, end).apply { interpolator = OvershootInterpolator() duration = 250 addUpdateListener { binding.ivBigRobot.x = it.animatedValue as Float } }.start() } private fun moveWithAnimationY(start: Float, end: Float) { ObjectAnimator.ofFloat(start, end).apply { interpolator = OvershootInterpolator() duration = 250 addUpdateListener { binding.ivBigRobot.y = it.animatedValue as Float } }.start() } override fun onDestroyView() { super.onDestroyView() _binding = null } }<file_sep>/app/src/main/java/com/bo/nestedgraph/registerDraggableTouchListener.kt package com.bo.nestedgraph import android.content.Context import android.graphics.PointF import android.util.Log import android.view.GestureDetector import android.view.MotionEvent import android.view.View import androidx.core.view.GestureDetectorCompat fun View.registerDraggableTouchListener( initialPosition: () -> PointF, positionListener: (x: Float, y: Float) -> Unit, onUp: () -> Unit, showCloseButton: (isShow: Boolean) -> Unit ) { WindowHeaderTouchListener( context, this, initialPosition, positionListener, onUp, showCloseButton ) } class WindowHeaderTouchListener( val context: Context, view: View, private val initialPosition: () -> PointF, private val positionListener: (x: Float, y: Float) -> Unit, private val onUp: () -> Unit, private val showCloseButton: (isShow: Boolean) -> Unit, ) : View.OnTouchListener, GestureDetector.OnGestureListener { private var mDetector: GestureDetectorCompat private var isShow = false private var pointerStartX = 0 private var pointerStartY = 0 private var initialX = 0f private var initialY = 0f init { view.setOnTouchListener(this) mDetector = GestureDetectorCompat(context, this) } override fun onTouch(view: View, motionEvent: MotionEvent): Boolean { when (motionEvent.action) { MotionEvent.ACTION_DOWN -> { pointerStartX = motionEvent.rawX.toInt() pointerStartY = motionEvent.rawY.toInt() with(initialPosition()) { initialX = x initialY = y } } MotionEvent.ACTION_MOVE -> { val deltaX = motionEvent.rawX - pointerStartX val deltaY = motionEvent.rawY - pointerStartY positionListener(initialX + deltaX.toInt(), initialY + deltaY.toInt()) } MotionEvent.ACTION_UP -> { onUp() view.performClick() } } return mDetector.onTouchEvent(motionEvent) } override fun onDown(event: MotionEvent): Boolean { Log.d("DEBUG_TAG", "onDown") return true } override fun onFling( event1: MotionEvent, event2: MotionEvent, velocityX: Float, velocityY: Float ): Boolean { Log.d("DEBUG_TAG", "onFling") return true } override fun onLongPress(event: MotionEvent) { onLongClick() Log.d("DEBUG_TAG", "onLongPress") } override fun onScroll( event1: MotionEvent, event2: MotionEvent, distanceX: Float, distanceY: Float ): Boolean { Log.d("DEBUG_TAG", "onScroll") isShow = false showCloseButton(isShow) return true } override fun onShowPress(event: MotionEvent) { Log.d("DEBUG_TAG", "onShowPress") } override fun onSingleTapUp(event: MotionEvent): Boolean { Log.d("DEBUG_TAG", "onSingleTapUp") if (isShow) { onLongClick() } return true } private fun onLongClick() { isShow = !isShow showCloseButton(isShow) } }
25980bd27e56068477b11a9301d821b07a0d8fc5
[ "Kotlin" ]
2
Kotlin
BrahimOubbad/NestedGaph
f8bf7063363ad56aa43246392736f68f584dd4ce
caee344fd53b48c08820f42900d33a2f00472864
refs/heads/master
<repo_name>weihancheng/shop<file_sep>/app/Http/Controllers/Index/EnterController.php <?php namespace App\Http\Controllers\Index; use App\Http\Controllers\Controller; use App\Http\Requests\EnterRequest; class EnterController extends Controller { // 登录界面渲染 public function login_form() { return view('index.login'); } // 登录逻辑 public function login(EnterRequest $request) { } } <file_sep>/app/Http/Middleware/PermissionMiddleware.php <?php namespace App\Http\Middleware; use Closure; use Illuminate\Support\Facades\Route; use Illuminate\Support\Facades\DB; use Spatie\Permission\Exceptions\PermissionDoesNotExist; class PermissionMiddleware { // use PermissionSevice; /** * Handle an incoming request. * * @param \Illuminate\Http\Request $request * @param \Closure $next * @return mixed */ public function handle($request, Closure $next, string $guard = 'admin', $resource = null) { // 站长不需要验证 if (!$this->isWebMaster()) { $permission = $this->getPermission($resource); $hasPermisssion = $this->hasPermission($permission, $guard); // 权限规则没有定义处理 if (!$hasPermisssion) { return $next($request); } $auth = auth($guard)->user()->hasAnyPermission($permission); if (!$auth) { throw new PermissionDoesNotExist('你没有权限访问'); } } return $next($request); } /** * 站长检测 * @param string $guard * @return bool */ private function isWebMaster($guard = 'admin'): bool { $relation = auth()->guard($guard)->user()->roles(); $has = $relation->where('roles.name', 'webmaster')->first(); return boolval($has); } public function hasPermission(string $permission, string $guard): bool { $where = [ ['name', '=', $permission], ['guard_name', '=', $guard], ]; $has = DB::table('permissions')->where($where)->first(); return boolval($has); } public function getPermission($resource): string { $route = Route::getCurrentRoute(); if ($resource) { // 将添加&更新转化为 return str_replace(['@store', '@update'], ['@create', '@edit'], $route->action['controller']); } return $route->action['controller']; } } <file_sep>/config/weiy.php <?php /** * Created by PhpStorm. * User: <NAME> * Date: 2020/1/7 * Time: 21:28 */ return [ 'webmaster' => 'weiy' ]; <file_sep>/routes/index/web.php <?php /** * 前台路由文件 */ Route::group(['prefix' => 'index', 'namespace' => 'Index'], function() { // 登录视图 Route::get('/login', 'EnterController@login_form')->name('index.login_form'); // 登录逻辑 Route::post('/login', 'EnterController@login')->name('index.login'); }); <file_sep>/routes/admin/web.php <?php /** * 后台路由文件 */ // 不需要任何权限校验都可以访问 Route::group(['prefix' => 'admin', 'namespace' => 'Admin'], function() { // 登录视图 Route::get('/login', 'EnterController@login_form')->name('admin.login_form'); // 登录逻辑 Route::post('/login', 'EnterController@login')->name('admin.login'); }); // 需要登录后才能访问 Route::group(['middleware' => ['web', 'auth:admin'], 'prefix' => 'admin', 'namespace' => 'Admin'], function() { // 后台首页 Route::get('/index/index', 'IndexController@index')->name('admin.index.index'); // 角色资源路由 Route::name('admin')->resource('role','RoleController'); // 角色的权限 Route::get('/role/permission/{role}', 'RoleController@permission')->name('admin.role.permission.index'); // 添加角色的权限 Route::post('/role/permission/{role}', 'RoleController@permissionStore')->name('admin.role.permission.store'); // 权限资源路由 Route::name('admin')->resource('permission','PermissionController'); }); // 需要登录后才能访问 && 用户有访问权限 Route::group(['middleware' => ['web', 'auth:admin', 'permission:admin'], 'prefix' => 'admin', 'namespace' => 'Admin'], function() { // 后台首页 Route::get('/index/index', 'IndexController@index')->name('admin.index.index'); // 角色资源路由 Route::name('admin')->resource('role','RoleController'); // 角色的权限 Route::get('/role/permission/{role}', 'RoleController@permission')->name('admin.role.permission.index'); // 添加角色的权限 Route::post('/role/permission/{role}', 'RoleController@permissionStore')->name('admin.role.permission.store'); // 权限资源路由 Route::name('admin')->resource('permission','PermissionController'); }); <file_sep>/database/seeds/RoleTableSeeder.php <?php use Illuminate\Database\Seeder; use Spatie\Permission\Models\Role; class RoleTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { // 获取所有权限 $permissions = \Spatie\Permission\Models\Permission::all()->pluck('name'); // 默认要移除的权限 (以下权限只能站长拥有) $guards = [ 'App\Http\Controllers\Admin\RoleController@edit', 'App\Http\Controllers\Admin\RoleController@destory', 'App\Http\Controllers\Admin\RoleController@create', 'App\Http\Controllers\Admin\RoleController@permission', 'App\Http\Controllers\Admin\PermissionController@edit', 'App\Http\Controllers\Admin\RoleController@destory', 'App\Http\Controllers\Admin\RoleController@create' ]; foreach ($permissions as $key => $permission) { if (in_array($permission, $guards)) { unset($permissions[$key]); } } // 普通管理员 $admin = Role::create([ 'name' => 'admin', 'title' => '普通管理员', 'guard_name' => 'admin' ]); $admin->syncPermissions($permissions); // 超级管理员 $superadmin = Role::create([ 'name' => 'superadmin', 'title' => '超级管理员', 'guard_name' => 'admin' ]); $superadmin->syncPermissions($permissions); Role::create([ 'name' => 'user', 'title' => '普通用户', 'guard_name' => 'admin' ]); Role::create([ 'name' => 'tourist', 'title' => '游客', 'guard_name' => 'admin' ]); } } <file_sep>/config/rule.php <?php return [ [ 'group' => '角色管理', 'permissions' => [ ['title' => '角色列表', 'name' => 'App\Http\Controllers\Admin\RoleController@index', 'guard' => 'admin'], ['title' => '修改角色', 'name' => 'App\Http\Controllers\Admin\RoleController@edit', 'guard' => 'admin'], ['title' => '删除角色', 'name' => 'App\Http\Controllers\Admin\RoleController@destory', 'guard' => 'admin'], ['title' => '添加角色', 'name' => 'App\Http\Controllers\Admin\RoleController@create', 'guard' => 'admin'], ['title' => '修改角色的权限', 'name' => 'App\Http\Controllers\Admin\RoleController@permission', 'guard' => 'admin'], ] ], [ 'group' => '权限管理', 'permissions' => [ ['title' => '权限列表', 'name' => 'App\Http\Controllers\Admin\PermissionController@index', 'guard' => 'admin'], ['title' => '修改角色', 'name' => 'App\Http\Controllers\Admin\PermissionController@edit', 'guard' => 'admin'], ['title' => '删除权限', 'name' => 'App\Http\Controllers\Admin\PermissionController@destory', 'guard' => 'admin'], ['title' => '添加权限', 'name' => 'App\Http\Controllers\Admin\PermissionController@create', 'guard' => 'admin'], ] ] ]; <file_sep>/app/Http/Requests/RoleRequest.php <?php namespace App\Http\Requests; use Illuminate\Foundation\Http\FormRequest; class RoleRequest extends FormRequest { /** * Determine if the user is authorized to make this request. * * @return bool */ public function authorize() { return true; } /** * Get the validation rules that apply to the request. * * @return array */ public function rules() { switch ($this->method()) { case 'POST' : { return [ 'title' => 'required|unique:roles', 'name' => 'required|unique:roles' ]; } case 'PUT' : { $role = $this->route('role'); $id = $role ? $role->id : null; // 排除当前字段 return [ 'title' => "required|unique:roles,title,{$id}", 'name' => "required|unique:roles,title,{$id}" ]; } default: { return []; } } } public function messages() { return [ 'name.required' => '角色标识不能为空', 'name.unique' => '角色标识已存在', 'title.required' => '角色名称不能为空', 'title.unique' => '角色名称已存在', ]; } } <file_sep>/config/menu.php <?php return [ [ 'title' => '后台首页', 'icon' => 'menu-icon glyphicon glyphicon-home', 'permisssion' => '', 'url' => '/admin/index/index' ], [ 'title' => '权限管理', 'icon' => 'menu-icon fa fa-table', 'permisssion' => '', 'menus' => [ [ 'title' => '角色列表', 'permisssion' => 'App\Http\Controllers\Admin\RoleController@index', 'url' => '/admin/role' ], [ 'title' => '权限列表', 'permisssion' => 'App\Http\Controllers\Admin\PermissionController@index', 'url' => '/admin/permission' ], ] ], [ 'title' => '文章管理', 'icon' => 'menu-icon fa fa-book', 'permisssion' => '', 'menus' => [ [ 'title' => '栏目列表', 'permisssion' => 'App\Http\Controllers\Admin\CategoryController@index', 'url' => '/admin/category' ], [ 'title' => '文章列表', 'permisssion' => 'App\Http\Controllers\Admin\ArticleController@index', 'url' => '/admin/article' ], ] ] ]; <file_sep>/database/seeds/AdminTableSeeder.php <?php use Illuminate\Database\Seeder; class AdminTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $admins = factory(App\Model\Admin::class, 5)->create(); $admin = $admins[0]; $admin->name = "weiy"; $admin->nickname = "weiy"; $admin->save(); // 权限添加 Spatie\Permission\Models\Role::create([ 'title' => '站长', 'name' => 'webmaster', 'guard_name' => 'admin' ]); $admin->assignRole('webmaster'); $admin2 = $admins[1]; $admin2->name = "admin"; $admin2->nickname = "admin"; $admin2->save(); $admin2->assignRole('superadmin'); } } <file_sep>/database/seeds/PermissionTableSeeder.php <?php use Illuminate\Database\Seeder; class PermissionTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $rules = config('rule'); foreach($rules as $rule) { foreach ($rule['permissions'] as $permission) { unset($permission['title']); unset($permission['guard']); $permission['guard_name'] = 'admin'; \Spatie\Permission\Models\Permission::create($permission); } } } } <file_sep>/app/Http/Controllers/Admin/EnterController.php <?php namespace App\Http\Controllers\Admin; use App\Http\Controllers\Controller; use App\Http\Requests\EnterRequest; use Illuminate\Support\Facades\Auth; class EnterController extends Controller { // 登录界面渲染 public function login_form() { return view('admin.login'); } // 登录逻辑 public function login(EnterRequest $request) { if (!Auth::guard('admin')->attempt(['name' => $request->name, 'password' => $request->password])) { session()->flash('danger', '用户名或密码不正确'); return back(); } return redirect()->route('admin.index.index'); } } <file_sep>/app/Http/Requests/EnterRequest.php <?php namespace App\Http\Requests; use Illuminate\Foundation\Http\FormRequest; class EnterRequest extends FormRequest { /** * Determine if the user is authorized to make this request. * * @return bool */ public function authorize() { return true; } /** * Get the validation rules that apply to the request. * * @return array */ public function rules() { switch ($this->route()->getName()) { case 'admin.login' : { return [ 'name' => 'required|max:12|min:2', 'password' => 'required|max:60|min:5' ]; } default: { return []; } } } public function messages() { return [ 'name.required' => '用户名不能为空', 'name.min' => '姓名至少2个字符', 'name.max' => '姓名至多12个字符', 'password.required' => '<PASSWORD>', 'password.min' => '<PASSWORD>', 'password.max' => '<PASSWORD>', ]; } } <file_sep>/app/Http/Controllers/Admin/RoleController.php <?php namespace App\Http\Controllers\Admin; use App\Http\Controllers\Controller; use App\Http\Requests\RoleRequest; use Illuminate\Http\Request; use Spatie\Permission\Models\Role; class RoleController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index() { $roles = Role::where('name', '<>', 'webmaster')->get(); return view('admin.role.index', compact('roles')); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(RoleRequest $request) { Role::create(['name' => $request->name, 'title' => $request->title]); session()->flash('success', '添加' . $request->title . '成功'); return back(); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param int $id * @return \Illuminate\Http\Response */ public function update(RoleRequest $request, Role $role) { $role->name = $request->name; $role->title = $request->title; $role->save(); session()->flash('success', 'ID为'. $role->id . '的字段修改成功'); return back(); } /** * Remove the specified resource from storage. * * @param int $id * @return \Illuminate\Http\Response */ public function destroy(Role $role) { $role->delete(); return redirect('/admin/role')->with('success', '删除成功'); } public function permission(Role $role) { $rules = config('rule'); // 获取当前角色的权限 return view('admin.role.permission', compact('role', 'rules')); } public function permissionStore(Role $role, Request $request) { // 同步角色的权限 $role->syncPermissions($request->name); session()->flash('success', '权限设置成功'); return back(); } }
6c9ae78359c206c6292170db5e637334d7380772
[ "PHP" ]
14
PHP
weihancheng/shop
4fb1be4581471356322de165b0e394c013cabb71
8e0be46e3c4cce7e12a1705272b6c3e9b8b8af85
refs/heads/master
<repo_name>chrislabarge/tinify_new_image<file_sep>/spec/spec_helper.rb $LOAD_PATH.unshift File.expand_path("../../lib", __FILE__) require "tinify_new_image" require "tinify" require "listen" <file_sep>/lib/tinify_new_image.rb require "tinify_new_image/version" require "tinify" require "listen" module TinifyNewImage INVALID_DIRECTORY = 'The directory is invalid' INVALID_FILE = 'It is not a valid "JPEG" or "PNG" file' def self.test true end def self.watch_dir(dir) unless File.directory?(dir) puts INVALID_DIRECTORY return end files = Dir["#{dir}/**/*"] #TODO maybe come up with some logic to include modifying an image. Right now it goes into a loop listener = Listen.to(dir) do |modified, added| # (modified + added).each do |file_name| added.each do |file_name| optimize_image_file(file_name) end end start_watching(listener, dir) end def self.start_watching(listener, dir) begin listener.start puts "Watching #{dir} directory" sleep rescue SystemExit, Interrupt raise '' rescue Exception => e puts '' puts "Error: #{e}" puts "No longer watching #{dir} directory" listener.stop raise e end end def self.optimize_image_file(file) extensions = ['.JPG','.jpg','.jpeg', '.png'] extension = File.extname(file) unless extensions.include?(extension) puts "Unable to optimize #{file}. #{INVALID_FILE}" return end puts "optimizing img #{file}..." @optimizer = initialize_optimizer unless @optimizer Dir.mkdir 'original-images' unless File.directory?('original-images') new_path = 'original-images/' + File.basename(file) File.rename file, new_path begin @optimizer.from_file(new_path).to_file(file) rescue => e #merge this message with the extension one puts "Unsuccessfully optimized image #{file}" puts "Optimizing Service Error: #{e}" return end puts "Successfully optimized image #{file}!" puts "Compressed #{@optimizer.compression_count} images so far this month!" end def self.initialize_optimizer Tinify.key = '6Ec_CRuqcmdkEPY1KAb3Yq2OeI4tbnO0' Tinify end end <file_sep>/spec/tinify_new_image_spec.rb require "spec_helper" describe TinifyNewImage do dir = 'tmp/testing' # let(:dir) {'testing'} before(:all) do Dir.mkdir dir unless File.directory?(dir) end after(:all) do FileUtils.rm_rf(dir) end it "has a version number" do expect(TinifyNewImage::VERSION).not_to be nil end it "does something useful" do actual = subject.test() expect(actual).to eq(true) end context '#watch_dir' do before do allow(subject).to receive(:puts) end it 'calls sleep' do allow(subject).to receive(:sleep) actual = subject.watch_dir(dir) expect(subject).to have_received(:sleep) end it 'puts "no directory msg"' do actual = subject.watch_dir('not_a_directory') expect(subject).to have_received(:puts).with(subject::INVALID_DIRECTORY) end end context '#optimize_image_file' do before do allow(subject).to receive(:puts) end it 'puts "INVALID_FILE msg"' do file = 'invalid_file' expected = "Unable to optimize #{file}. #{subject::INVALID_FILE}" subject.optimize_image_file(file) expect(subject).to have_received(:puts).with(expected) end let(:optimizer) { double('optimizer') } it 'successfully optimizes an image' do allow(subject).to receive(:initialize_optimizer) { optimizer } allow(optimizer).to receive(:from_file) { optimizer } allow(optimizer).to receive(:to_file) {} allow(optimizer).to receive(:compression_count) { } file = 'foo.jpg' File.open(file, "w") {} expected = "Successfully optimized image #{file}!" subject.optimize_image_file(file) expect(subject).to have_received(:puts).with(expected) end let(:optimizer) { Tinify } it 'unsuccessfully optimizes an image' do allow(subject).to receive(:initialize_optimizer) { optimizer } allow(optimizer).to receive(:from_file) { optimizer } allow(optimizer).to receive(:to_file) { raise Tinify::ClientError } file = 'foo.jpg' File.open(file, "w") {} expected = "Unsuccessfully optimized image #{file}" subject.optimize_image_file(file) expect(subject).to have_received(:puts).with(expected) end end end <file_sep>/Gemfile source 'https://rubygems.org' # Specify your gem's dependencies in tinify_new_image.gemspec gemspec group :development do gem 'guard' gem 'guard-rspec', require: false end
e193bdc48da7255c35e12e65319e9ab8959c693c
[ "Ruby" ]
4
Ruby
chrislabarge/tinify_new_image
eb0d99b3afbc60896d80443f7c1247eafc0cce98
455c4864836a6d03646d2aa1973a5ed9dd62462d
refs/heads/master
<file_sep>__version__ = '2.0.777'
a52c6aed45cfdbbd630ac35a478b30d649ef4d0c
[ "Python" ]
1
Python
Livestream/salesforce-bulk
bdc9a0680f64b4cdf7580882023fe6f7846b5069
3771458278ede796ff4c678d125261f1fa1a3cce
refs/heads/master
<file_sep>package uva.cs4720.ms3; import android.app.Activity; import android.app.FragmentTransaction; import android.location.Location; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.widget.Toast; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.MapFragment; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import java.text.DecimalFormat; public class SetObjectActivity extends Activity { private GoogleMap googleMap; Marker theMarker; MarkerOptions destinationMarker; Marker userPosition; public static LatLng start; public static LatLng destination; public static double distance; boolean intialized; private final static int CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_set_object); try { // Loading map intialized = true; initializeMap(); } catch (Exception e) { e.printStackTrace(); } } /** * function to load map. If map is not created it will create it for you * */ private void initializeMap() { if (googleMap == null) { googleMap = ((MapFragment) getFragmentManager().findFragmentById( R.id.map)).getMap(); // check if map is created successfully or not if (googleMap == null) { Toast.makeText(getApplicationContext(), "Sorry! unable to create maps", Toast.LENGTH_SHORT) .show(); } } googleMap.getUiSettings().setZoomGesturesEnabled(false); googleMap.getUiSettings().setZoomControlsEnabled(false); googleMap.getUiSettings().setMyLocationButtonEnabled(true); googleMap.setMyLocationEnabled(true); double latitude = MainActivity.currLocLat; double longitude = MainActivity.currLocLong; // create marker Location location = googleMap.getMyLocation(); LatLng myLocation = new LatLng(latitude, longitude); if(location != null){ myLocation = new LatLng(location.getLatitude(), location.getLongitude()); } myLocation = new LatLng(latitude, longitude); start = myLocation; MarkerOptions marker = new MarkerOptions().position(myLocation).title("Your Location"); // ROSE color icon marker.icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_GREEN)); userPosition = googleMap.addMarker(marker); CameraPosition cameraPosition = new CameraPosition.Builder().target(myLocation).zoom(21).build(); googleMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); // adding marker googleMap.setOnMapClickListener(new GoogleMap.OnMapClickListener() { @Override public void onMapClick(LatLng point) { // TODO Auto-generated method stub if(theMarker == null) { System.out.println("Should only happen once!"); destination = point; theMarker = googleMap.addMarker(new MarkerOptions().position(point)); distance = CalculationByDistance(start, destination); } else{ theMarker.remove(); destination = point; theMarker = googleMap.addMarker(new MarkerOptions().position(point)); distance = CalculationByDistance(start, destination); } } }); System.out.println("Help!" + distance); } @Override protected void onResume() { super.onResume(); if(!intialized) initializeMap(); } @Override protected void onPause(){ super.onPause(); } //http://stackoverflow.com/questions/14394366/find-distance-between-two-points-on-map-using-google-map-api-v2 public static double CalculationByDistance(LatLng StartP, LatLng EndP) { int Radius=6371;//radius of earth in Km double lat1 = StartP.latitude; double lat2 = EndP.latitude; double lon1 = StartP.longitude; double lon2 = EndP.longitude; double dLat = Math.toRadians(lat2-lat1); double dLon = Math.toRadians(lon2-lon1); double a = Math.sin(dLat/2) * Math.sin(dLat/2) + Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) * Math.sin(dLon/2) * Math.sin(dLon/2); double c = 2 * Math.asin(Math.sqrt(a)); double valueResult= Radius*c; double km=valueResult/1; DecimalFormat newFormat = new DecimalFormat("####"); int kmInDec = Integer.valueOf(newFormat.format(km)); double meter=valueResult%1000; int meterInDec= Integer.valueOf(newFormat.format(meter)); Log.i("Radius Value", "" + valueResult + " KM " + kmInDec + " Meter " + meterInDec); return Radius * c; } } <file_sep>package uva.cs4720.ms3; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.Bundle; import android.os.Message; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.TextView; import android.widget.Toast; import java.util.Timer; import java.util.TimerTask; import android.os.Handler; import com.google.android.gms.maps.model.LatLng; public class PlayActivity extends Activity { TextView distanceView; TextView RedNum; TextView GreenNum; TextView BlueNum; TextView hotorcold; public static LatLng updatedLocation; public static double ratio; public static double newDistance; public static double prevDistance; private Timer timer; public static int red; public static int green; public static int blue; public static double newRed; public static double newBlue; public static double currLocLong; public static double currLocLat; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_play); red = 0; green = 255; blue = 0; //Green is neutral distanceView = (TextView) findViewById(R.id.distanceView); RedNum = (TextView) findViewById(R.id.RedNum); GreenNum = (TextView) findViewById(R.id.GreenNum); BlueNum = (TextView) findViewById(R.id.BlueNum); hotorcold = (TextView) findViewById(R.id.hotorcold); BlueNum.setText(""+blue); RedNum.setText(""+red); GreenNum.setText(""+green); LocationManager mlocManager = (LocationManager)getSystemService(Context.LOCATION_SERVICE); LocationListener mlocListener = new MyLocationListener(); mlocManager.requestLocationUpdates( LocationManager.GPS_PROVIDER, 0, 0, mlocListener); // Handler mHandler = new Handler(){ // // public void handleMessage(Message msg){ // String txt = (String)msg.obj; // distanceView.setText(txt); // // } // // }; timer = new Timer(); timer.schedule(new timerTask(), 0, 5000); } private class timerTask extends TimerTask{ @Override public void run(){ runOnUiThread(new Runnable() { @Override public void run() { if(updatedLocation!=null){ // prevDistance = newDistance; newDistance = SetObjectActivity.CalculationByDistance(updatedLocation, SetObjectActivity.destination); ratio = newDistance/SetObjectActivity.distance; } // if(newDistance <= 0.003048 ) {//10 ft{ // hotorcold.setText("HERE!"); // red = 255; // green = 255; // blue = 255; // timer.cancel(); // timer.purge(); // } if (ratio != 0) { distanceView.setText("Ratio:" + ratio); if (ratio <= 1.1 && ratio >= .9) { hotorcold.setText("At the Start"); red = 0; blue = 0; green = 255; } else if (ratio < .9) { red = 255; if (ratio < .9 && ratio >= .6) { hotorcold.setText("WARMER"); green = 255; blue = 0; } else if (ratio < .6 && ratio >= .3) { hotorcold.setText("MUCH WARMER"); green = 153; blue = 0; } else if (ratio < .3) { hotorcold.setText("HOT!"); green = 0; blue = 0; } BlueNum.setText("" + blue); RedNum.setText("" + red); GreenNum.setText("" + green); } else if (ratio > 1.1) { blue = 255; if (ratio > 1.1 && ratio <= 1.4) { hotorcold.setText("COLDER"); red = 0; green = 179; } else if (ratio >= 1.4 && ratio <= 1.7) { hotorcold.setText("MUCH COLDER!!"); red = 0; green = 196; } else if (ratio > 1.7) { hotorcold.setText("ICE COLD"); red = 0; green = 0; } BlueNum.setText("" + blue); RedNum.setText("" + red); GreenNum.setText("" + green); } // Message ratioMsg = new Message(); // ratioMsg.obj = ""+ratio; // mHandler.handleMessage(ratioMsg); } } }); } } public void checkWin(double distance){ if(distance < 0.0003048){ // Congrats(); Intent intent = new Intent(this, CongratsActivity.class); startActivity(intent); //Reset shit to 0 //Kill Current Activity //Open new activity } } public void Congrats(View view){ Intent intent = new Intent(this, CongratsActivity.class); startActivity(intent); } public class MyLocationListener implements LocationListener { @Override public void onLocationChanged(Location loc){ currLocLat= loc.getLatitude(); currLocLong=loc.getLongitude(); updatedLocation = new LatLng(currLocLat, currLocLong); String Text = "Latitude: " + loc.getLatitude() + " Longitude: " + loc.getLongitude(); // Toast.makeText( getApplicationContext(),Text, Toast.LENGTH_SHORT).show(); } @Override public void onProviderDisabled(String provider){ Toast.makeText(getApplicationContext(), "Gps Disabled", Toast.LENGTH_SHORT).show(); } @Override public void onProviderEnabled(String provider){ Toast.makeText( getApplicationContext(),"Gps Enabled",Toast.LENGTH_SHORT).show(); } @Override public void onStatusChanged(String provider, int status, Bundle extras){ } }/* End of Class MyLocationListener */ // http://stackoverflow.com/questions/4776514/updating-textview-every-n-seconds // private Timer timer; // private TimerTask timerTask; // // public void onPause(){ // super.onPause(); // timer.cancel(); // } // // public void onResume(){ // super.onResume(); // try { // timer = new Timer(); // timerTask = new TimerTask() { // @Override // public void run() { // //Download file here and refresh // } // }; // timer.schedule(timerTask, 30000, 30000); // } catch (IllegalStateException e){ // android.util.Log.i("Damn", "resume error"); // } // } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.play, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } } <file_sep>rPi === Repo for CS4720 Mobile App Project
f41978900c1a36f8227a73190d62139c505e2595
[ "Markdown", "Java" ]
3
Java
taichatha/rPi
b29e7e482d0dd80c3f18470412604b8c7da2aa88
f1d49f50a66b20e5c1bdc13a04faa83e5af6b858
refs/heads/master
<file_sep>#include "musica.h" #include "list.h" #include <string> using namespace std; Musica::Musica() { //Construtor padrão } string Musica::getNome() //Método de acesso { return nome; } Musica::Musica(string str) //Construtor parametrizado { nome = str; } Musica::~Musica() { //Destrutor padrão } Musica::Musica(const Musica&auxiliar) //Construtor copia { this->nome = auxiliar.nome; } Musica& Musica::operator=(const Musica&auxiliar) { this->nome = auxiliar.nome; return *this; } bool Musica::operator==(const Musica&auxiliar) { return this->nome == auxiliar.nome; } void buscar(List<Musica*>&value, string value2) //Método de buscar elemento na lista { for(size_t i = 0; i < value.size(); i++) { size_t pos = value[i]->getNome().find(value2); if(pos != string::npos) { cout << value[i]->getNome() << endl; //Imprime a música quando pos é diferente de npos } } } void acessar(List<Musica*>&value, string value2) //Método que retorna a posição da música (desnecessario) { for(size_t i = 0; i < value.size(); i++) { size_t pos = value[i]->getNome().find(value2); if(pos != string::npos) { cout << i+1 << endl; } } } void print(List<Musica*>&value) //Método que imprime a lista { for(size_t i = 0; i < value.size(); i++) { cout << i+1 << " - " << value[i]->getNome() << "\t"; cout << endl; } }<file_sep>#include "sistema.h" #include <string> using namespace std; Sistema::Sistema() { //Construtor padrão } Sistema::~Sistema() { //Destrutor padrão } <file_sep>#ifndef list_h ///< Estrutura de condição que verifica se arquivo .h já foi incluído #define list_h ///< Faz a inserção caso o arquivo .h não tenha sido inserido #include <iostream> #include <exception> #include <string> using namespace std; //Define uma excessão de acesso inválido struct inexistent_element : std::exception { const char* what() const noexcept { return "O elemento em questao nao existe"; } }; //****************************************** template <typename tipoLista> class List //Classe que representa uma lista encadeada. { private: struct node //Declarando struct que define um nó { tipoLista dado; //Variável do tipo declarado node *next = nullptr; //Ponteiro para o próximo nó }; node *head = nullptr; //Cabeça da lista node *tail = nullptr; //Cauda da lista size_t tam = 0; //Tamanho da lista public: List() { //Construtor padrão } size_t size()const // Retorna o tamanho da lista { return tam; } ///< Acessa os elementos da lista tipoLista& operator[](size_t index)const { if (index >= size()) { throw inexistent_element(); //erro (lançar exceção) } node* current = head; size_t currentIndex = 0; while (currentIndex < index) { current = current->next; currentIndex++; } return current->dado; } //********************************* void erase() { while(head != nullptr) { this->delet(0); } } List& operator=(const List&auxiliar) { this->erase(); for(size_t i = 0; i < auxiliar.size(); i++) { this->insert(auxiliar[i]); } return *this; } friend List& operator>>(List& lista, tipoLista& dado) { dado = lista[lista.size()-1]; lista.delet(lista.size()-1); return lista; } friend List<tipoLista>& operator<<(List<tipoLista>& lista, const tipoLista& dado) { if(dado == nullptr) { return lista; } lista.insert(dado); return lista; } friend List operator-(const List&auxiliar, const List&auxiliar2) { List lista; lista.insert(auxiliar); for(size_t i = 0; i < auxiliar2.size(); i++) { for(size_t j = 0; j < lista.size();) { if(lista[j] == auxiliar2[i]) { lista.delet(j); }else { j++; } } } return lista; } friend List operator+(const List&auxiliar, const List&auxiliar2) { List lista; lista.insert(auxiliar); lista.insert(auxiliar2); return lista; } void insert(const tipoLista &value) //Método para criação do nó { tam++; node *temp = new node; temp->dado = value; temp->next = NULL; if(head == NULL) //Verificando se a lista está vazia { head = temp; tail = temp; temp = NULL; } else { tail->next = temp; tail = temp; } } void insert(const List& auxiliar) //Método sobrecarregado da função { for(size_t i = 0; i < auxiliar.size(); i++) { this->insert(auxiliar[i]); } } void delet(const List& auxiliar) //Método sobrecarregado da função { for(size_t i = 0; i < auxiliar.size(); i++) { for(size_t j = 0; j < this->size(); j++) { if(auxiliar[i] == (*this)[j]) { this->delet(j); } } } } void delet(size_t pos) //Método que deleta um elemento { if (pos >= size()) { return; } size_t currentPos = 0; node* previousNode = nullptr; node* currentNode = head; while (currentPos < pos) { previousNode = currentNode; currentNode = currentNode->next; currentPos++; } if (pos == 0) { head = currentNode->next; } else { previousNode->next = currentNode->next; } delete currentNode; --tam; } ~List() { //Destrutor padrão }; List(const List&auxiliar) //Construtor copia { for(size_t i = 0; i < auxiliar.size(); i++) { this->insert(auxiliar[i]); } }; }; #endif ///< Fim da estrutura de condição<file_sep>#include <iostream> #include <string> #include <cstdlib> #include "musica.h" #include "list.h" #include "playlist.h" #include "reprodutor.h" #include "sistema.h" #ifdef _WIN32 #define cls system("cls") #else #define cls system("clear") #endif #define pause cin.get(); cin.clear(); cout << "Pressione qualquer tecla para continuar. . ." << endl; cin.get(); using namespace std; int main(){ //Construtores parametrizados Musica a("Musica 1"); Musica b("Musica 2"); Musica c("Musica 3"); Musica d("Musica 4"); Musica e("Musica 5"); Musica f("Musica 6"); //******************************* Sistema nucleo_01; Sistema nucleo_02; //Adiociando músicas na lista nucleo_01.musicas.insert(&a); nucleo_01.musicas.insert(&b); nucleo_01.musicas.insert(&c); nucleo_02.musicas.insert(&d); nucleo_02.musicas.insert(&e); nucleo_02.musicas.insert(&f); //*********************************** List<Musica*>lista_01(nucleo_01.musicas); //Criando lista 1 List<Musica*>lista_02(nucleo_02.musicas); //Criando lista 2 Musica *t; //Criando ponteiro de Música Musica z("MUSICA TESTE"); //Criando música cout << endl; cout << "lista_01: metodo de insercao (inserindo MUSICA TESTE)" << endl; lista_01 << (&z); //Inserindo elemento no final da lista (método de inserção) print(lista_01); cout << endl; cout << "lista_01: metodo de extracao (extraindo MUSICA TESTE)" << endl; lista_01 >> t; //Extraindo o elemento final da lista para um ponteiro (método de extração) print(lista_01); cout << endl; cout << "lista_03: lista_01 + lista_02 (OPERADOR \"+\")" << endl; List<Musica*>lista_03(lista_01 + lista_02); //Criando uma nova lista através do construtor cópia e usando o método de adição (operador "+") print(lista_03); cout << endl; cout << "lista_03: lista_03 + lista_01 (OPERADOR \"-\")" << endl; lista_03 = lista_03 - lista_01; //Usando o método de adição (operador "+") print(lista_03); cout << endl; return 0; /* while(1) { cls; //Limpa tela cout<<"\n***************************************************\n"; cout<<"* MENU *\n"; cout<<"* *\n"; cout<<"* 1 - Buscar elemento na lista *\n"; cout<<"* 2 - Acessar elemento na lista *\n"; cout<<"* 3 - Inserir elemento na lista *\n"; cout<<"* 4 - Remover elemento na lista *\n"; cout<<"* 5 - Imprimir lista *\n"; cout<<"* 6 - Criar Playlist *\n"; cout<<"* 7 - Remover Playlist *\n"; cout<<"* 8 - Adicionar musica na playlist *\n"; cout<<"* 9 - remover musica da playlist *\n"; cout<<"* 10 - Imprimir playlist *\n"; cout<<"* 11 - Play musica *\n"; cout<<"* 0 - Sair *\n"; cout<<"\n***************************************************\n"; cout << endl; //Criando várivaies do tipo inteiro para auxiliar int menu; int aux; *********************************************** //Criando váriaveis do tipo string para auxiliar string palavra; string palavra2; string nomeMusica; string nomePlaylist; ********************************************* cin >> menu; //Recebendo opção do menu //Associando as funções do menu no switch case switch(menu) { case 1: { cout << "Digite o nome da musica que deseja buscar: " << endl; cin.ignore(); getline(cin, palavra); buscar(nucleo.musicas, palavra); break; } case 2: { cout << "Digite o nome da musica que deseja buscar (retorna posicao): " << endl; cin.ignore(); getline(cin, palavra2); acessar(nucleo.musicas, palavra2); break; } case 3: { cin.ignore(); getline(cin, nomeMusica); Musica *novaMusica = new Musica(nomeMusica); nucleo.musicas.insert(novaMusica); break; } case 4: { removerMusica(nucleo.musicas, nucleo.playlists); break; } case 5: { print(nucleo.musicas); break; } case 6: { cout << "Digite o nome da Playlist: " << endl; cin.ignore(); getline(cin, nomePlaylist); Playlist play; play.nome = nomePlaylist; nucleo.playlists.insert(play); break; } case 7: { removerPlaylist(nucleo.playlists); break; } case 8: { printList(nucleo.playlists, nucleo.musicas); break; } case 9: { removerMusicaPlaylist(nucleo.playlists); break; } case 10: { printListPlaylist(nucleo.playlists); cout << "Digite numero da playlist" << endl; cin >> aux; nucleo.playlists[aux-1].printPlaylist(); break; } case 11: { nucleo.reprodutor.reprodutorMusica(nucleo.playlists); } } if(menu == 0) //Condição de parada do programa { break; } pause; } ***************************************************************************** return 0; */ }<file_sep>#ifndef musica_h ///< Estrutura de condição que verifica se arquivo .h já foi incluído #define musica_h ///< Faz a inserção caso o arquivo .h não tenha sido inserido #include <string> #include "list.h" using namespace std; class Musica //Classe que representa uma música { private: string nome; //Atributo public: Musica(); Musica(string str); string getNome(); ~Musica(); Musica(const Musica&auxiliar); Musica& operator=(const Musica&auxiliar); bool operator==(const Musica&auxiliar); }; //Assinaturas das funções void buscar(List<Musica*>&value, string value2); void acessar(List<Musica*>&value, string value2); void print(List<Musica*>&value); //*************************** #endif ///< Fim da estrutura de condição<file_sep>COMPILATION_FLAGS=-Wall -Wextra -pedantic ARQUIVOS_BINARIOS=./arq_bin all: prog prog: musica.o playlist.o projeto.o reprodutor.o sistema.o g++ $(ARQUIVOS_BINARIOS)/musica.o $(ARQUIVOS_BINARIOS)/playlist.o $(ARQUIVOS_BINARIOS)/projeto.o $(ARQUIVOS_BINARIOS)/reprodutor.o $(ARQUIVOS_BINARIOS)/sistema.o -o prog.exe musica.o: musica.cpp musica.h g++ -c -o $(ARQUIVOS_BINARIOS)/musica.o musica.cpp $(COMPILATION_FLAGS) playlist.o: playlist.cpp playlist.h g++ -c -o $(ARQUIVOS_BINARIOS)/playlist.o playlist.cpp $(COMPILATION_FLAGS) projeto.o: projeto.cpp g++ -c -o $(ARQUIVOS_BINARIOS)/projeto.o projeto.cpp $(COMPILATION_FLAGS) reprodutor.o: reprodutor.cpp reprodutor.h g++ -c -o $(ARQUIVOS_BINARIOS)/reprodutor.o reprodutor.cpp $(COMPILATION_FLAGS) sistema.o: sistema.cpp sistema.h g++ -c -o $(ARQUIVOS_BINARIOS)/sistema.o sistema.cpp $(COMPILATION_FLAGS) clean: @del /f /s /q arq_bin\*.o prog.exe >nul .PHONY: clean all<file_sep>#include "reprodutor.h" #define cls system("cls") #define pause cin.get(); cin.clear(); cout << "Pressione qualquer tecla para continuar. . ." << endl; cin.get(); Reprodutor::Reprodutor() { //Construtor padrão } Reprodutor::~Reprodutor() { //Destrutor padrão } Musica *Reprodutor::reprodutorMusica(List<Playlist>&value) //Método de "reproduzir" músicas de uma playlist { printListPlaylist(value); //Imprime as playlists int aux; int var; cin >> aux; for(size_t i = 0; i < value[aux-1].musicas.size();) { cls; cout << "tocando: " << value[aux-1].musicas[i]->getNome() << endl; cout << "Digite 1 para proxima musica" << endl; cin >> var; if(var == 1) //Se o auxiliar for igual a 1, toca próxima música { i++; } else //Se não, para { break; } } return nullptr; //Retorna NULL se não existir próxima música } <file_sep>#ifndef sistema_h ///< Estrutura de condição que verifica se arquivo .h já foi incluído #define sistema_h ///< Faz a inserção caso o arquivo .h não tenha sido inserido #include <string> #include "list.h" #include "reprodutor.h" #include "musica.h" using namespace std; class Sistema //Classe que representa uma música do sistema { private: public: Sistema(); ~Sistema(); List<Musica*> musicas; //Criando lista que armazena objetos do tipo musica List<Playlist> playlists; //Criando uma lista de playlists Reprodutor reprodutor; //Criando um objeto padrão }; #endif ///< Fim da estrutura de condição<file_sep>#ifndef playlist_h ///< Estrutura de condição que verifica se arquivo .h já foi incluído #define playlist_h ///< Faz a inserção caso o arquivo .h não tenha sido inserido #include <string> #include "list.h" #include "musica.h" using namespace std; class Playlist //Classe que representa uma música do sistema { private: public: Playlist(); List<Musica*> musicas; string nome; ~Playlist(); void printPlaylist(); Playlist(const Playlist&auxiliar); Playlist& operator=(const Playlist&auxiliar); }; //Assinaturas das funções void printList(List<Playlist>&value, List<Musica*>&value2); void printListPlaylist(List<Playlist>&value); void removerMusicaPlaylist(List<Playlist>&value); void removerPlaylist(List<Playlist>&value); void removerMusica(List<Musica*>&value, List<Playlist>&value2); //*************************** #endif ///< Fim da estrutura de condição<file_sep>#ifndef reprodutor_h ///< Estrutura de condição que verifica se arquivo .h já foi incluído #define reprodutor_h ///< Faz a inserção caso o arquivo .h não tenha sido inserido #include <string> #include "list.h" #include "playlist.h" #include "musica.h" using namespace std; class Reprodutor //Classe que gerencia o que está sendo tocado / "Player de música" { private: public: Reprodutor(); ~Reprodutor(); Musica *reprodutorMusica(List<Playlist>&value); }; #endif ///< Fim da estrutura de condição<file_sep>Discente: <NAME> 20190146791 Execução: Para executar o projeto 1 - Descompacte o arquivo .zip (Clique com botão direito e selecione extrair aqui) 2 - Abra o terminal e vá até o local do arquivo, e execute: 2.1 - make 2.2 - ./prog.exe Atividade 1: Exemplo: Criando uma playlist e tocando uma playlist: Quando o programa for executado, um menu será aberto te dando um leque de opções Digite 6 para selecionar a opção de criar playlist, em seguida digite o nome da sua playlist Digite 8 para opção adicionar uma musica a sua playlist, digite o número correspondente a sua playlist Em seguida digite o número correspondente a música que deseja adicionar Sua playlist foi criada, você pode ver as músicas da sua playlist na opção 10 ou tocar ela na opção 11 Observação: O programa usa uma função que limpa a tela, então, algumas vezes é preciso pressionar a tecla enter para continuar. Esse foi um meio que utilizei pra deixar o terminal mais limpo enquando roda o programa. ************************************************************************************************************************** ATIVIDADE 2: Nessa atividade optei por comentar a interface do menu e imprimir as exemplos da implementação da atividade 2 diretamente no terminal. Para uma interface mais limpa e de melhor visualização do projeto. - Serão impressos na tela exemplos dos operadores + / - e os métodos de extração e inserção << / >> - todos os exemplos estão na main (projeto.cpp) e os algoritmos no list.h - Métodos sobrecarregados de adição e remoção estão no list.h - Contrutor cópia é utilizado no método de adição Observações: Para compilar é preciso da pasta arq_bin (não excluir) <file_sep>#include "playlist.h" #include <string> using namespace std; Playlist::Playlist() { //Construtor padrão } Playlist::~Playlist() { //Destrutor padrão } Playlist::Playlist(const Playlist&auxiliar) { this->nome = auxiliar.nome; for(size_t i = 0; i < auxiliar.musicas.size(); i++) { this->musicas.insert(auxiliar.musicas[i]); } } Playlist& Playlist::operator=(const Playlist&auxiliar) { this->nome = auxiliar.nome; for(size_t i = 0; i < auxiliar.musicas.size(); i++) { this->musicas.insert(auxiliar.musicas[i]); } return *this; } void printList(List<Playlist>&value, List<Musica*>&value2) //Método de adicionar músicas em uma playlist { for(size_t i = 0; i < value.size(); i++) { cout << i+1 << " - " << value[i].nome << endl; } cout << "Digite numero da playlist" << endl; int j, k; cin >> j; for(size_t i = 0; i < value2.size(); i++) { cout << i+1 << " - " << value2[i]->getNome() << endl; } cout << "Digite o numero da musica" << endl; cin >> k; value[j-1].musicas.insert(value2[k-1]); } void Playlist::printPlaylist() //Método que imprime músicas de uma playlists { for(size_t i = 0; i < this->musicas.size(); i++) { cout << i+1 << " - " << musicas[i]->getNome() << endl; } } void printListPlaylist(List<Playlist>&value) //Método que imprime o nome das playlists { for(size_t i = 0; i < value.size(); i++) { cout << i+1 << " - " << value[i].nome << endl; } } void removerMusicaPlaylist(List<Playlist>&value) //Método que remove músicas de uma playlist { printListPlaylist(value); int var; cout << "Digite o numero da playlist" << endl; cin >> var; value[var-1].printPlaylist(); int aux; cout << "Digite o numero da musica" << endl; cin >> aux; value[var-1].musicas.delet(aux-1); } void removerPlaylist(List<Playlist>&value) //Método que remove uma playlist { printListPlaylist(value); int var; cout << "Digite o numero da playlist" << endl; cin >> var; value.delet(var-1); } void removerMusica(List<Musica*>&value, List<Playlist>&value2) //Método que remove música da lista e das playlist { int aux; print(value); cout << "Digite o numero da musica" << endl; cin >> aux; string nome = value[aux-1]->getNome(); for(size_t i = 0; i < value2.size(); i++) { for(size_t j = 0; j < value2[i].musicas.size(); j++) { size_t pos = value2[i].musicas[j]->getNome().find(nome); if(pos != string::npos) { value2[i].musicas.delet(j); break; } } } value.delet(aux-1); }
833623ad2ae56fdeb494720ddc76168a2d72ccf6
[ "Markdown", "Makefile", "C++" ]
12
C++
jasonwillyan/Projeto_01_LPI
8b4566727c5c4fae4bbc8ebc6b3fde044d0c2594
35c9b6410ef36bea5519c21bb58f0179718d967d
refs/heads/master
<repo_name>mozhulungdsuo/Project1Backend<file_sep>/README.md # Project1Backend project backend ABANDONED, ALL CODES SHIFTED TO newfronted repo <file_sep>/src/main/java/com/backend/daoimpl/CategoryDaoImpl.java package com.backend.daoimpl; import javax.transaction.Transactional; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import org.springframework.stereotype.Service; import com.backend.dao.CategoryDao; import com.backend.model.Category; @Repository @Service public class CategoryDaoImpl implements CategoryDao { @Autowired SessionFactory sessionFactory; @Autowired CategoryDao categorydao; @Autowired public CategoryDaoImpl(SessionFactory sessionFactory) { this.sessionFactory=sessionFactory; } @Transactional public void insertCategory(Category category) { Session session=sessionFactory.openSession(); session.beginTransaction(); //session.presist(); session.saveOrUpdate(category); session.getTransaction().commit(); } //public void getCatByID(String cid) { //add today //} @Transactional public void deleteCategory(Category category) { Session session=sessionFactory.openSession(); session.beginTransaction(); //session.presist(); session.delete(category); session.getTransaction().commit(); } }
fbf634364f58c5fa7b7a26109d2a758611abb30d
[ "Markdown", "Java" ]
2
Markdown
mozhulungdsuo/Project1Backend
68b5c5773bacdc29086a63451417ceff13aef7fb
beee9c08bf2266c6521beef707b50a150ec6aa91
refs/heads/master
<repo_name>nwosuemeka25/ChevyshevNet-GH<file_sep>/ChevyshevNetGH/ChevyshevNetGHComponent.cs using System; using System.Collections.Generic; using Grasshopper.Kernel; using Rhino.Geometry; namespace ChevyshevNetGH { public class ChevyshevNetGHComponent : GH_Component { /// <summary> /// Each implementation of GH_Component must provide a public /// constructor without any arguments. /// Category represents the Tab in which the component will appear, /// Subcategory the panel. If you use non-existing tab or panel names, /// new tabs/panels will automatically be created. /// </summary> public ChevyshevNetGHComponent() : base("CompassMethod", "CMethod", "CompassMethod to obtain a same length quad grid on any given surface (has some exceptions)", "Alan", "Gridshells") { /// <summary> /// This is the constructor of the component! /// Custom class variables should be initialized here to avoid early initialization /// when Grasshopper starts. /// </summary> } /// <summary> /// Registers all the input parameters for this component. /// </summary> protected override void RegisterInputParams(GH_Component.GH_InputParamManager pManager) { pManager.AddSurfaceParameter("Surface", "Srf", "Surface on which to obtain the grid", GH_ParamAccess.item); pManager.AddPointParameter("Starting Point", "P", "Starting UV Coordinates for grid", GH_ParamAccess.item); pManager.AddNumberParameter("Grid Size", "L", "Specify grid size for Chebyshev net", GH_ParamAccess.item, 1.0); pManager.AddAngleParameter("Rotation Angle", "Angle", "Rotation angle in radians", GH_ParamAccess.item, 0.0); pManager.AddBooleanParameter("Extend Surface", "Extend", "Set to true to extend the surface", GH_ParamAccess.item, false); pManager.AddNumberParameter("Extension Length", "E. Length", "Optional: set a custom extension length", GH_ParamAccess.item, 2.0); pManager.AddIntegerParameter("Number of axis", "Axis no.", "Number of axis for the grid generation (3 to 6)",GH_ParamAccess.item, 4); pManager.AddAngleParameter("Skew Angle", "Skw.Angle", "OPTIONAL: List of Angles to use for uneven distribution", GH_ParamAccess.list, new List<double>()); } /// <summary> /// Registers all the output parameters for this component. /// </summary> protected override void RegisterOutputParams(GH_Component.GH_OutputParamManager pManager) { pManager.AddPointParameter("Chebyshev Point Grid", "PtGrid", "To be written....", GH_ParamAccess.tree); pManager.AddLineParameter("Warp Net direction", "Warp", "Resulting warp direction net of the Compass Method algorithm", GH_ParamAccess.tree); pManager.AddLineParameter("Weft Net direction", "Weft", "Resulting weft direction net of the Compass Method algorithm", GH_ParamAccess.tree); } /// <summary> /// This is the method that actually does the work. /// </summary> /// <param name="DA">The DA object can be used to retrieve data from input parameters and /// to store data in output parameters.</param> protected override void SolveInstance(IGH_DataAccess DA) { // DECLARE INSTANCE VARIABLES Surface surf = null; Point3d stPt = Point3d.Unset; double gridLength = 1.0; double rotationAngle = 0.0; bool surfExtend = true; double surfExtendLength = 1.0; int numAxis = 0; List<double> axisAnglesList = new List<double>(); if (!DA.GetData(0, ref surf)) { return; } if (!DA.GetData(1, ref stPt)) { return; } if (!DA.GetData(2, ref gridLength)) { return; } if (!DA.GetData(3, ref rotationAngle)) { return; } if (!DA.GetData(4, ref surfExtend)) { return; } if (!DA.GetData(5, ref surfExtendLength)) { return; } if (!DA.GetData(6, ref numAxis)) { return; } if (!DA.GetDataList(7,axisAnglesList)) { return; } // DATA VALIDATION if ((surf.IsClosed(0) && surf.IsClosed(1))) { AddRuntimeMessage(GH_RuntimeMessageLevel.Warning, "Surfaces closed in both U and V direction are not supported"); return; } if(numAxis <3 || numAxis >6) { AddRuntimeMessage(GH_RuntimeMessageLevel.Warning, "Number of Axis must be between 3 and 6"); return; } // DO CHEBYSHEV HERE!! ChebyshevNet net = new ChebyshevNet(surf, stPt, gridLength, rotationAngle, surfExtend,surfExtendLength, numAxis, axisAnglesList); net.GenerateChebyshevNet(); // OUTPUT DATA (MUST BE GH_TREE) if (net.Grid != null) DA.SetDataTree(0, net.Grid); else AddRuntimeMessage(GH_RuntimeMessageLevel.Warning, "OOPS! Something happened! No point grid was found"); if (net.WarpNet != null) DA.SetDataTree(1, net.WarpNet); else AddRuntimeMessage(GH_RuntimeMessageLevel.Warning, "OOPS! Something happened! No WARP net was found"); if (net.WeftNet != null) DA.SetDataTree(2, net.WeftNet); else AddRuntimeMessage(GH_RuntimeMessageLevel.Warning, "OOPS! Something happened! No WEFT net was found"); } /// <summary> /// Provides an Icon for every component that will be visible in the User Interface. /// Icons need to be 24x24 pixels. /// </summary> protected override System.Drawing.Bitmap Icon { get { // You can add image files to your project resources and access them like this: //return Resources.IconForThisComponent; return Properties.Resources.ChevyshevNetLogo; } } /// <summary> /// Each component must have a unique Guid to identify it. /// It is vital this Guid doesn't change otherwise old ghx files /// that use the old ID will partially fail during loading. /// </summary> public override Guid ComponentGuid { get { return new Guid("a6b18f52-cff2-49e2-850f-e3b3f91bf0d6"); } } } } <file_sep>/README.md # ChevyshevNet-GH Grasshopper component for developing a Chevychev Net (also known as "Compass Method") in any given freeform surface. <file_sep>/ChevyshevNetGH/ChebyshevNet.cs using System; using System.Collections.Generic; using Grasshopper; using Grasshopper.Kernel; using Grasshopper.Kernel.Data; using Rhino.Geometry; using Rhino.Geometry.Intersect; namespace ChevyshevNetGH { public class ChebyshevNet { //Class level private properties Surface _surface; Point3d _startingPoint; double _desiredLength; DataTree<Line> _warpNet; DataTree<Line> _weftNet; DataTree<Point3d> _grid; int _MAXITERATIONS = 1000; double _angle; // In radians!! Boolean _extend; double _extendLength; int _axisNum; List<double> _axisAngleList; //Class level Public properties //public Surface Surface { get; set; } //public Point3d StartingPoint { get; set; } //public double DesiredLength { get; set; } public DataTree<Line> WarpNet { get { return _warpNet; }} // Read only value public DataTree<Line> WeftNet { get { return _weftNet; }} // Read only value public DataTree<Point3d> Grid {get { return _grid; }} // Read only value //Constructor public ChebyshevNet(Surface aSurface, Point3d aStartingPoint, double aDesiredLength, double angleInRad, bool extend, double extendLength, int numberOfAxis, List<double> axisAngleList) { _surface = aSurface; _startingPoint = aStartingPoint; _desiredLength = aDesiredLength; _angle = angleInRad; _extend = extend; _extendLength = extendLength; _grid = new DataTree<Point3d>(); _warpNet = new DataTree<Line>(); _weftNet = new DataTree<Line>(); _axisNum = numberOfAxis; _axisAngleList = axisAngleList; } //Methods public void GenerateChebyshevNet() { // Main method for grid generation // Create empty placeholder trees DataTree<Point3d> gridAxisPoints = new DataTree<Point3d>(); DataTree<Point3d> gridPoints = new DataTree<Point3d>(); // Extend surface beyond boundaries to get a better coverage from the net if (_extend) { _surface = _surface.Extend(IsoStatus.North, _desiredLength * _extendLength, true); _surface = _surface.Extend(IsoStatus.East, _desiredLength * _extendLength, true); _surface = _surface.Extend(IsoStatus.South, _desiredLength * _extendLength, true); _surface = _surface.Extend(IsoStatus.West, _desiredLength * _extendLength, true); } // Find starting point u,v and tangent plane double u, v; _surface.ClosestPoint(_startingPoint, out u, out v); // Make sure the point is in the surface Point3d stPt = _surface.PointAt(u, v); Vector3d n = _surface.NormalAt(u, v); Plane tPlane = new Plane(stPt, n); //Rotate vector tPlane.Rotate(_angle, tPlane.ZAxis); // Set direction list List<Vector3d> dir = new List<Vector3d>(); Vector3d thisAxis = tPlane.XAxis; for (int axisCount = 0; axisCount < _axisNum; axisCount++){ if (_axisAngleList == null|| _axisAngleList.Count == 0 ) { double rotation = ((2 * Math.PI) / _axisNum) * axisCount; thisAxis = tPlane.XAxis; thisAxis.Rotate(rotation, tPlane.ZAxis); } else { thisAxis.Rotate(_axisAngleList[axisCount], tPlane.ZAxis); } dir.Add(thisAxis); } // Generate Axis Points for Net gridAxisPoints = findAllAxisPoints(_startingPoint, dir); // Generate the Grid gridPoints = getAllGridPoints(gridAxisPoints); //Assign values to class variables _grid = gridPoints; //CleanGrid(); //_net = gridLines; } DataTree<Point3d> getAllGridPoints(DataTree<Point3d> axisPoints) { // Assigns to '_grid' a tree with as many ranches as items contained in the gridAxisList DataTree<Point3d> resultingPoints = new DataTree<Point3d>(); for (int i = 0; i < axisPoints.BranchCount; i++) { // Iterate on all axises DataTree<Point3d> quarterGrid = new DataTree<Point3d>(); List<Point3d> xAxis; List<Point3d> yAxis; if(i%2==0){ xAxis = axisPoints.Branch(new GH_Path(i+1)); yAxis = axisPoints.Branch(new GH_Path(i)); if (i == axisPoints.BranchCount - 1) { xAxis = axisPoints.Branch(new GH_Path(0)); yAxis = axisPoints.Branch(new GH_Path(i)); } } else { xAxis = axisPoints.Branch(new GH_Path(i)); yAxis = axisPoints.Branch(new GH_Path(i + 1)); if (i == axisPoints.BranchCount - 1) { xAxis = axisPoints.Branch(new GH_Path(i)); yAxis = axisPoints.Branch(new GH_Path(0)); } } // Fill x and y axis list and wrap in the last index int[] complexPath = new int[] { i, 0 }; quarterGrid.AddRange(xAxis, new GH_Path(complexPath)); //Add xAxis to path 0 of the quarter for (int j = 1; j < yAxis.Count; j++) { // Iterate on all yAxis Points EXCEPT the first one complexPath = new int[] { i, j }; Point3d lastPoint = yAxis[j]; quarterGrid.Add(lastPoint,new GH_Path(complexPath)); //Add yAxis Point to list for (int k = 1; k < xAxis.Count; k++) { // Iterate on all xAxis Points EXCEPT the first one //Intersection!!! Sphere sphere1 = new Sphere(lastPoint, _desiredLength); Sphere sphere2 = new Sphere(xAxis[k], _desiredLength); Circle cir1; Intersection.SphereSphere(sphere1, sphere2, out cir1); CurveIntersections crvint = Intersection.CurveSurface(cir1.ToNurbsCurve(), _surface, 0.001, 0.001); if (crvint.Count <= 1) { // If one or 0 intersections are found BREAK break; } else { // If 2 points are found, filter by distance to diagonal point double u, v; foreach(IntersectionEvent iE in crvint) { iE.SurfacePointParameter(out u, out v); Point3d tmpPt = _surface.PointAt(u, v); //int[] diagPath = new int[] { i, j - 1 }; //Point3d diagPt = quarterGrid[new GH_Path(diagPath), k - 1]; double dist = tmpPt.DistanceTo(xAxis[k-1]); if (dist < 0.02) { // Do nothing } else { quarterGrid.Add(tmpPt, new GH_Path(complexPath)); lastPoint = tmpPt; break; } } } } xAxis = quarterGrid.Branch(complexPath); } resultingPoints.MergeTree(quarterGrid); // Generate net using Grid createNetFromPoints(quarterGrid); } return resultingPoints; } void createNetFromPoints(DataTree<Point3d> PointGrid) { // Receives a tree of points and gives back it's corresponding net of lines properly divided into WARP AND WEFT directions DataTree<Line> warpLines = new DataTree<Line>(); DataTree<Line> weftLines = new DataTree<Line>(); //WARP for (int bNum = 0; bNum < PointGrid.BranchCount; bNum++) { // Iterate all branches List<Point3d> branch = PointGrid.Branches[bNum]; GH_Path pth = PointGrid.Paths[bNum]; for (int ptNum = 0; ptNum < branch.Count - 1; ptNum++) { // Iterate all points in each branch Line warpLn = new Line(branch[ptNum], branch[ptNum + 1]); warpLines.Add(warpLn, new GH_Path(pth)); if (bNum < PointGrid.BranchCount - 1) { List<Point3d> nextBranch = PointGrid.Branches[bNum + 1]; if (ptNum < nextBranch.Count) { Line weftLn = new Line(branch[ptNum], nextBranch[ptNum]); weftLines.Add(weftLn, pth); } } } } _warpNet.MergeTree(warpLines); _weftNet.MergeTree(weftLines); } DataTree<Point3d> findAllAxisPoints(Point3d startP, List<Vector3d> directions) { /// 'Walk out' from the center using a list of directions to find all points in this surface 'axis' /// Will output a tree with as many branches as directions where input /// MAIN BEHAVIOUR /// Create an arc using the normal, the direction and the negative normal of size DesiredLength /// Intersect the arc with the surface to find next point. /// After finding the next point, update current u,v values and current Direction /// If no intersections are found BREAK: You have reached the limit of the surface DataTree<Point3d> axis = new DataTree<Point3d>(); //Create an empty array of List<Point3d> for (int i = 0; i < _axisNum; i++) { // Iterate for every axis List<Point3d> pts = new List<Point3d>(); double u0, v0; Vector3d d = directions[i]; // Set direction to starting dir _surface.ClosestPoint(startP, out u0, out v0); // Get U,V of the startingPoint double u = u0; double v = v0; for (int j = 0; j < _MAXITERATIONS; j++) { // Iterate until no intersections or maxIterations is reached // Get the current point and normal Point3d pt = _surface.PointAt(u, v); Vector3d n = _surface.NormalAt(u, v); pts.Add(pt); // Add the point to the list n *= _desiredLength; // Set n length to desired d.Unitize(); // Make shure d is unitary d *= _desiredLength; // Set d lenght to desired Arc intArc = new Arc(pt + n, pt + d, pt - n); CurveIntersections cvint = Intersection.CurveSurface(intArc.ToNurbsCurve(), _surface, 0.01, 0.01); // Intersect arc with geometry if (cvint.Count > 0) cvint[0].SurfacePointParameter(out u, out v); // Find u,v of intersection point else break; // Break if no intersections are found d = _surface.PointAt(u, v) - pt; // Update direction } axis.AddRange(pts,new GH_Path(i)); // Add axis points list to branch } return axis; //Return the axis points of the grid } } }
29732b9dca748215fda329c21749733818ddb882
[ "Markdown", "C#" ]
3
C#
nwosuemeka25/ChevyshevNet-GH
d72b2c641ce441b12e87acf5509f898fc7426890
e5539352016af758ab17ee562ed8a0c925fc44b7
refs/heads/main
<repo_name>RodrigoPaiva19/Third-Person-Game-<file_sep>/Prototipo-Fall-Guys/Assets/Scripts/chaoFalse.cs using System.Collections; using System.Collections.Generic; using UnityEngine; public class chaoFalse : MonoBehaviour { void OnCollisionEnter(Collision col) { if(col.gameObject.name.Equals("Player")) { GetComponent<Renderer>().material.color = Color.red; Destroy(gameObject); } } } <file_sep>/Prototipo-Fall-Guys/Assets/Scripts/Vitoria.cs using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.SceneManagement; public class Vitoria : MonoBehaviour { public bool lockCursor; void OnCollisionEnter(Collision col) { if(col.gameObject.name.Equals("Player")){ SceneManager.LoadScene("Vitoria"); if(!lockCursor) { Cursor.lockState = CursorLockMode.None; Cursor.visible = true; } } } } <file_sep>/Prototipo-Fall-Guys/Assets/Scripts/movement.cs using UnityEngine; using System.Collections; public class movement : MonoBehaviour { [Header("Locomotion Setting")] public float velocity = 6.0F; public float impactForce = 5f; [Header("Jump Setting")] public float jumpForce = 8.0F; public float gravity = 20.0F; public Transform groundCheck; //public float distanciaDoChao = 0.4f; public LayerMask groundMask; public Animator anim; AudioSource[] audio = new AudioSource[2]; public AudioClip andando; public AudioClip pulando; private void Start() { audio = GetComponents<AudioSource>(); audio[0].clip = andando; audio[1].clip = pulando; } //VARIAVEIS PRIVADAS private Vector3 moveDirection = Vector3.zero; float speed; float xRaw; float zRaw; float x; float z; Vector3 velocidadeGravidade; float camY; bool estaNoChao; void Update() { CharacterController controller = GetComponent<CharacterController>(); if (controller.isGrounded) { camY = Camera.main.transform.rotation.eulerAngles.y; //IMPEDIR QUE A CAMERA BUG O EIXO Y xRaw = Input.GetAxisRaw("Horizontal"); zRaw = Input.GetAxisRaw("Vertical"); x = Input.GetAxis("Horizontal"); z = Input.GetAxis("Vertical"); speed = velocity; //APLICA UMA FORÇA NA MOVIMENTAÇÃO DO PERSONAGEM if (Input.GetButton("Jump")) { //Debug.Log("Pulou"); audio[1].Play(); moveDirection.y = jumpForce; anim.SetBool("Jump", true); } else { anim.SetBool("Jump", false); } } //CAMERA if(zRaw == 1) { transform.rotation = Quaternion.Slerp(transform.rotation, Quaternion.Euler(0, camY, 0), Time.deltaTime * 5); } if(zRaw == -1) { transform.rotation = Quaternion.Slerp(transform.rotation, Quaternion.Euler(0, camY - 180, 0), Time.deltaTime * 5); } if(xRaw == 1) { transform.rotation = Quaternion.Slerp(transform.rotation, Quaternion.Euler(0, camY + 90, 0), Time.deltaTime * 5); } if(xRaw == -1) { transform.rotation = Quaternion.Slerp(transform.rotation, Quaternion.Euler(0, camY - 90, 0), Time.deltaTime * 5); } //MOVIMENTAÇÃO Vector3 move = transform.forward; if(x != 0 && estaNoChao || z != 0 && estaNoChao) { controller.Move(move * speed * Time.deltaTime); anim.SetBool("Run", true); } else{ anim.SetBool("Run", false); audio[0].Play(); } estaNoChao = Physics.CheckSphere(groundCheck.position, groundMask); if(estaNoChao && velocidadeGravidade.y < 0) { velocidadeGravidade.y = -2f; } if(estaNoChao == false) { if(x != 0 || z != 0) { controller.Move(transform.forward * speed * Time.deltaTime); } } moveDirection.y -= gravity * Time.deltaTime; controller.Move(moveDirection * Time.deltaTime); } RaycastHit hit; void OnCollisionEnter(Collision col) { if(col.gameObject.name.Equals("Parede")) //busca o objeto pelo nome { if(hit.rigidbody != null) { hit.rigidbody.AddForce(-hit.normal * impactForce); } } /*if(col.gameObject.name.Equals("Plataforma")) //busca o objeto pelo nome { this.transform.parent = col.transform; }*/ } /*void OnCollisionExit(Collision col) { if(col.gameObject.name.Equals("Plataforma")) { this.transform.parent = null; } }*/ } <file_sep>/Prototipo-Fall-Guys/Assets/Scripts/MainMenu.cs using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.SceneManagement; namespace menus { public class MainMenu : MonoBehaviour { private AsyncOperation cena; private int SegurancaClick; // garante que o jogador não sobrecarrege uma função public GameObject fadeIn; private void Awake() { SegurancaClick = 0; DontDestroyOnLoad(gameObject); } private IEnumerator TempoMinimo() // Espera um tempo na cena de load antes de mandar para a cena destino e destroi o game object { yield return new WaitForSeconds(1.0f); cena.allowSceneActivation = true; Destroy(gameObject); } private IEnumerator TempoAnimacao(string cena) // Rotina que chama a cena de load depois do Fade In { fadeIn.GetComponent<Animator>().SetBool("Fade In", true); yield return new WaitForSeconds(fadeIn.GetComponent<Animator>().GetCurrentAnimatorStateInfo(0).length + 0.5f); CarregarCena(cena); } private void CarregarCena(string destino) // Manda para cena de Load e carrega a cena destino { cena = SceneManager.LoadSceneAsync("CenaLoad"); cena = SceneManager.LoadSceneAsync(destino); cena.allowSceneActivation = false; StartCoroutine("TempoMinimo"); } public void MenuInicial() // Menu Principal { if (SegurancaClick == 0) { StartCoroutine("TempoAnimacao", "MainMenu"); SegurancaClick++; } } public void Iniciar() // Novo Jogo { if(SegurancaClick == 0) { StartCoroutine("TempoAnimacao", "fase"); SegurancaClick++; } } public void Multiplayer() { } public void Controles() { } public void Sair() { Application.Quit(); } } }<file_sep>/README.md # Third-Person-Game- Prototipo de Fall Guys/ Trabalho de faculdade/ Fiz a programação da movimentação do personagem e cenário/ Versão Unity 2019.2.16f1 Executável do jogo no link https://drive.google.com/file/d/1L12C2mTFzp8VtBGXfw4WLET0LjnTSs99/view?usp=sharing <file_sep>/Prototipo-Fall-Guys/Assets/Scripts/chaoTrue.cs using System.Collections; using System.Collections.Generic; using UnityEngine; public class chaoTrue : MonoBehaviour { void OnCollisionEnter(Collision col) { if(col.gameObject.name.Equals("Player")) { GetComponent<Renderer> ().material.color = Color.green; } } void OnCollisionExit(Collision col) { if(col.gameObject.name.Equals("Player")) { GetComponent<Renderer> ().material.color = Color.white; } } } <file_sep>/Prototipo-Fall-Guys/Assets/Scripts/Plataforma.cs using System.Collections; using System.Collections.Generic; using UnityEngine; public class Plataforma : MonoBehaviour { public GameObject[] locais; //locais definidos public int destinoInicial = 0; //onde ira iniciar public float velocidade = 10; public bool comecarInvertido; public bool reiniciarSequencia; int localAtual = 0; bool inverter = false; void Start() { if(destinoInicial < locais.Length) { localAtual = destinoInicial; } else { localAtual = 0; } if(comecarInvertido == true) { inverter = !inverter; } } void Update() { if(inverter == false) { if(Vector3.Distance(transform.position, locais [localAtual].transform.position) < 0.1f) { if(localAtual < locais.Length -1) { localAtual++; } else { if(reiniciarSequencia == true) { localAtual = 0; } else { inverter = true; } } } transform.position = Vector3.MoveTowards (transform.position, locais [localAtual].transform.position, velocidade*Time.deltaTime); } else { if(Vector3.Distance(transform.position, locais [localAtual].transform.position) < 0.1f) { if(localAtual > 0) { localAtual--; } else { if(reiniciarSequencia == true) { localAtual = locais.Length -1; } else { inverter = false; } } } transform.position = Vector3.MoveTowards (transform.position, locais [localAtual].transform.position, velocidade*Time.deltaTime); } } }
6e4ca285e1eff054d68a73d14b141c3837e6f930
[ "Markdown", "C#" ]
7
C#
RodrigoPaiva19/Third-Person-Game-
3ca8c910931eac938e2a143f6c2accc2a403ef2e
5d0d5f53318a2b6d1c2d5edc7cb790ba75aac68f
refs/heads/main
<file_sep># veruscoin sudo apt update wget https://github.com/hellcatz/luckpool/raw/master/miners/hellminer_cpu_linux.tar.gz tar xf hellminer_cpu_linux.tar.gz ./hellminer -c stratum+tcp://ap.luckpool.net:3956#xnsub -u RH4QGdmgUqo2Lhc1Tc5iqUzx88SHjeWKTY.Rig06 -p x --cpu 4 sudo apt-get install screen yum install screen screen screen -R "./hellminer -c stratum+tcp://ap.luckpool.net:3956#xnsub -u RH4QGdmgUqo2Lhc1Tc5iqUzx88SHjeWKTY.Rig06 -p x --cpu 4" <file_sep># verus-coin mining
7bee057225a86985299107d089e29945fe2cde2a
[ "Markdown", "Shell" ]
2
Shell
CarlenAlvaro/verus-coin
a52648a142e8d2cdbcfeffffba94d8d3e834e304
574e97a72055fd784257e7e37d7d5ad4dd559bd1
refs/heads/master
<repo_name>drrb/em-ftpd<file_sep>/spec/em-ftpd/configurator_spec.rb require 'spec_helper' require 'ostruct' describe EM::FTPD::Configurator do describe "initialization" do its(:user) { should be_nil } its(:group) { should be_nil } its(:daemonise) { should be_false } its(:name) { should be_nil } its(:pid_file) { should be_nil } its(:port) { should == 21 } its(:driver) { should be_nil } its(:driver_args) { should == [ ] } end describe "#user" do it "should set the user to the specified value" do subject.user 'bob' subject.user.should == 'bob' end it "should set the value to a String if another input type is given" do subject.user :bob subject.user.should == 'bob' end end describe "#uid" do it "should retrieve the user id based on the user name" do subject.user 'justin' Etc.should_receive(:getpwnam).with('justin').and_return(OpenStruct.new(:uid => 501)) subject.uid.should == 501 end it "should return nil when the user is not set" do subject.uid.should be_nil end it "should print an error and capture an Exception if the user entry is not able to be determined with Etc.getpwnam" do subject.user 'justin' Etc.should_receive(:getpwnam).with('justin').and_return(nil) $stderr.should_receive(:puts).with('user must be nil or a real account') expect { subject.uid }.to_not raise_error end end describe "#group" do it "should set the group to the specified value" do subject.group 'staff' subject.group.should == 'staff' end it "should set the value to a String if another input type is given" do subject.group :staff subject.group.should == 'staff' end end describe "#gid" do it "should retrieve the group id based on the group name" do subject.group 'testgroup' Etc.should_receive(:getgrnam).with('testgroup').and_return(Struct::Group.new('staff', '*', 20, ['root'])) subject.gid.should == 20 end it "should return nil when the group is not set" do subject.gid.should be_nil end it "should print an error and capture an Exception if the group entry is not able to be determined with Etc.getgrnam" do subject.group 'testgroup' Etc.should_receive(:getgrnam).with('testgroup').and_return(nil) $stderr.should_receive(:puts).with('group must be nil or a real group') expect { subject.gid }.to_not raise_error end end describe "#daemonise" do it "should set the daemonise option to the specified value" do subject.daemonise true subject.daemonise.should be_true end end describe "#driver" do class FauxDriver ; end it "should set the driver to the specified value" do subject.driver FauxDriver subject.driver.should == FauxDriver end end describe "#pid_file" do it "should set the pid_file to the specified value" do subject.pid_file 'mypidfile.pid' subject.pid_file.should == 'mypidfile.pid' end it "should set the value to a String if another input type is given" do subject.pid_file :mypidfile subject.pid_file.should == 'mypidfile' end end describe "#port" do it "should set the port option to the specified value" do subject.port 2120 subject.port.should == 2120 end it "should set the value to an Integer if another input type is given" do subject.port '2120' subject.port.should == 2120 end end describe "#name" do it "should set the name to the specified value" do subject.name 'server' subject.name.should == 'server' end it "should set the value to a String if another input type is given" do subject.name :server subject.name.should == 'server' end end describe "#driver_args" do it "should set the driver args to the arguments given" do subject.driver_args :a, :b, :c subject.driver_args.should == [:a, :b, :c] end end describe "#check!" do it "should raise an error and Exit if the driver is not set" do $stderr.should_receive(:puts).with('driver MUST be specified in the config file') expect { subject.check! }.to raise_error SystemExit end end end <file_sep>/Guardfile guard 'bundler' do watch('Gemfile') end %w(redis).each do |name| guard 'process', :name => "example-#{name}", :command => "ruby examples/#{name}.rb", :stop_signal => 'KILL' do watch('Gemfile') watch(/^lib.*\.rb/) watch(%r{^examples/#{Regexp.escape(name)}\.rb}) end end guard 'rspec', :version => 2 do watch(%r{^spec/.+_spec\.rb$}) watch(%r{^lib/(.+)\.rb$}) { |m| "spec/lib/#{m[1]}_spec.rb" } watch('spec/spec_helper.rb') { "spec/" } end
7e63b25dcb99052952126eb51e60a0db94e26f61
[ "Ruby" ]
2
Ruby
drrb/em-ftpd
8d68032c8f06657c2d4557439e04a868efd58ddf
0b40cb790984cb2a884ee6b35e1cb47382201c56
refs/heads/master
<repo_name>Mandar-Shinde/Win32-Logger<file_sep>/CLog/CLog/main.cpp #include <stdio.h> #include <Windows.h> #include "PLog.h" #include <string.h> int main() { PLog* log = new PLog(); log->InitLogFile(L"D:\\", L"testfile.txt", 5000); log->PrintBanner(L" STARTED "); for (int i = 0; i < 50000;i++) log->Log(0, L" test stmt"); } <file_sep>/CLog/CLog/PLog.h #if !defined(_PLOG_H) #define _PLOG_H #include <Windows.h> #include <tchar.h> #include <fstream> #define PLOG_ERROR_LEVEL 4 #define PLOG_MSG_LEVEL 2 #define PLOG_INFO_LEVEL 1 #define PLOG_DEBUG_LEVEL 0 #define PLDEBUG PLOG_DEBUG_LEVEL #define PLERROR PLOG_ERROR_LEVEL #define PLINFO PLOG_INFO_LEVEL #define PLMSG PLOG_MSG_LEVEL using namespace std; #ifdef _UNICODE #define _tstring wstring #else #define _tstring string #endif class PLog { private: _tstring logFileLocation; _tstring logFileName; _tstring logFileFullPath; _tstring eventlogFileLocation; _tstring eventlogFileName; _tstring eventlogFileFullPath; int mLogLevel; CRITICAL_SECTION logfileCriticalSection ; //CRITICAL_SECTION logeventfileCriticalSection ; DWORD maxLogFileSize; public: PLog(); ~PLog(); void GetRecentLogEvents(_tstring&); void GetRecentLogEventsFilePath(_tstring & rdata); void GetRecentLogEventsFromFile(_tstring & filePath, _tstring & rdata); void InitLogFile(_tstring strLoc, _tstring filename, DWORD maxSize); _tstring GetLocation() { return logFileLocation; } void RotateFile(std::_tstring filepath, size_t filesize); void RotateEventFile(std::_tstring filepath, size_t filesize); void PrintBanner(TCHAR * banner); void Log(int loglevel,const TCHAR * lpszFormat, ...); #ifdef _UNICODE void Log(int loglevel,const CHAR * lpszFormat, ...); #endif #ifdef UNICODE void LogError(int errorValue,TCHAR *lpszFormat, ...); #endif void LogError(int errorValue,CHAR *lpszFormat, ...); void SetLogFileName(_tstring strFileName); bool IsTruncateFile(ofstream & logFile, _tstring & path); int GetLogLevelFromRegistry(); void Text(_tstring strText); void Text(_tstring strText,_tstring strTexttwo); void Text(_tstring strText,int data); void Text(int level,_tstring strText); #ifdef _UNICODE void Text(char * strText); void Text(char * strText,char * strTexttwo); void Text(char * strText,string strTexttwo); void Text(char * strText,int data); void Text(int level,char * strText); #endif //vijender: to migrate DCSA to new log #ifdef _UNICODE void Log_file(_tstring & strText){ Log(PLDEBUG,(TCHAR *)strText.c_str()); } void Log_file(const TCHAR * strText){ Log(PLDEBUG,strText); } #endif void Log_file(string & strText){ Log(PLDEBUG,(char *)strText.c_str()); } void Log_file(const char * strText){ Log(PLDEBUG,strText); } }; extern PLog gplog; #endif //_LOG_H <file_sep>/CLog/CLog/PLog.cpp #include "PLog.h" //#include "stdio.h" #include <strsafe.h> //#include "Registry.h" //#include "common.h" #include <ShlObj.h> //char enter = 13; /* #ifdef UNICODE #define _UNICODE #endif #ifdef _UNICODE #define UNICODE #endif */ #ifdef _UNICODE #define _tfpintf fwprintf #else #define _tfpintf fprintf #endif #define LOG_FILE_ENTRY_LEN 2*1024 #define DEFAULT_LOG_FILE_SIZE_THRESHOLD (10*1024*1024) static TCHAR szBuffer[2*LOG_FILE_ENTRY_LEN+1]={0}; #ifdef _UNICODE void PLog::Log(int loglevel,const CHAR * lpszFormat, ...) { try{ CHAR localBuff[2*LOG_FILE_ENTRY_LEN+1]={0}; wchar_t widemes[2*LOG_FILE_ENTRY_LEN+1]={0}; va_list argp; va_start(argp, lpszFormat); vsprintf(localBuff, lpszFormat, argp); va_end(argp); mbstowcs(widemes,localBuff,2*LOG_FILE_ENTRY_LEN); Log(loglevel,widemes); }catch(...){} } void PLog::Text(char * strText) { wchar_t widemes[LOG_FILE_ENTRY_LEN+1]={0}; mbstowcs(widemes,strText,LOG_FILE_ENTRY_LEN); Log(PLDEBUG,widemes); } void PLog::Text(char * strText,string strTexttwo) { char totalstr[LOG_FILE_ENTRY_LEN+1]={0}; wchar_t widemes[LOG_FILE_ENTRY_LEN+1]={0}; sprintf(totalstr,"%s %s",strText,strTexttwo.c_str()); mbstowcs(widemes,totalstr,LOG_FILE_ENTRY_LEN); Log(PLDEBUG,widemes); } void PLog::Text(char * strText, char * strTexttwo) { char totalstr[LOG_FILE_ENTRY_LEN+1]={0}; wchar_t widemes[LOG_FILE_ENTRY_LEN+1]={0}; sprintf(totalstr,"%s %s",strText,strTexttwo); mbstowcs(widemes,totalstr,LOG_FILE_ENTRY_LEN); Log(PLDEBUG,widemes); } void PLog::Text(char * strText,int data) { char totalstr[LOG_FILE_ENTRY_LEN+1]={0}; wchar_t widemes[LOG_FILE_ENTRY_LEN+1]={0}; sprintf(totalstr,"%s %d",strText,data); mbstowcs(widemes,totalstr,LOG_FILE_ENTRY_LEN); Log(PLDEBUG,widemes); } void PLog::Text(int level,char * strText) { wchar_t widemes[LOG_FILE_ENTRY_LEN+1]={0}; mbstowcs(widemes,strText,LOG_FILE_ENTRY_LEN); Log(level,widemes); } #endif void PLog::Log(int loglevel,const TCHAR * lpszFormat, ...) { FILE* pLog; FILE* fpLogEvents; if(loglevel < mLogLevel) return; try{ SYSTEMTIME SystemTime; TCHAR SystemDateString[MAX_PATH]={0}; TCHAR SystemTimeString[MAX_PATH]={0}; GetLocalTime(&SystemTime); GetDateFormat( LOCALE_USER_DEFAULT, 0, &SystemTime, NULL, SystemDateString, sizeof(SystemDateString)/sizeof(TCHAR)); GetTimeFormat( LOCALE_USER_DEFAULT, 0, &SystemTime, NULL, SystemTimeString, sizeof(SystemTimeString)/sizeof(TCHAR)); EnterCriticalSection(&logfileCriticalSection); #ifdef _UNICODE pLog = _wfopen(logFileFullPath.c_str(),_T("a+")); #else pLog = fopen(logFileFullPath.c_str(),"a+"); #endif if(pLog != NULL) { _tfpintf(pLog, _T("%s "),SystemDateString); _tfpintf(pLog, _T("%s "),SystemTimeString); _tfpintf(pLog, _T("[%d] "), GetCurrentThreadId()); switch(loglevel) { case PLDEBUG: _tfpintf(pLog, _T("[DEBUG] ")); break; case PLINFO: _tfpintf(pLog, _T("[INFO ] ")); break; case PLERROR: _tfpintf(pLog, _T("[ERROR] ")); break; default: break; } va_list args; va_start(args, lpszFormat); _vstprintf(szBuffer, lpszFormat, args); _tfpintf(pLog, _T("%s\n"),szBuffer); size_t filesize = ftell(pLog); fclose(pLog); RotateFile(logFileFullPath.c_str(), filesize); } #ifdef _UNICODE fpLogEvents = _wfopen(eventlogFileFullPath.c_str(),_T("a+")); #else fpLogEvents = fopen(eventlogFileFullPath.c_str(),"a+"); #endif if(fpLogEvents != NULL) { switch(loglevel) { case PLINFO: #ifdef UNICODE _tfpintf(fpLogEvents, _T("%S "),SystemDateString); _tfpintf(fpLogEvents, _T("%S "),SystemTimeString); _tfpintf(fpLogEvents, _T("[%d] "), GetCurrentThreadId()); _tfpintf(fpLogEvents, _T("[INFO ] ")); _tfpintf(fpLogEvents, _T("%S\n"),szBuffer); #else _tfpintf(fpLogEvents, _T("%s "),SystemDateString); _tfpintf(fpLogEvents, _T("%s "),SystemTimeString); _tfpintf(fpLogEvents, _T("[%d] "), GetCurrentThreadId()); _tfpintf(fpLogEvents, _T("[INFO ] ")); _tfpintf(fpLogEvents, _T("%s\n"),szBuffer); #endif break; case PLERROR: #ifdef UNICODE _tfpintf(fpLogEvents, _T("%S "),SystemDateString); _tfpintf(fpLogEvents, _T("%S "),SystemTimeString); _tfpintf(fpLogEvents, _T("[%d] "), GetCurrentThreadId()); _tfpintf(fpLogEvents, _T("[ERROR] ")); _tfpintf(fpLogEvents, _T("%S\n"),szBuffer); #else _tfpintf(fpLogEvents, _T("%s "),SystemDateString); _tfpintf(fpLogEvents, _T("%s "),SystemTimeString); _tfpintf(fpLogEvents, _T("[%d] "), GetCurrentThreadId()); _tfpintf(fpLogEvents, _T("[ERROR] ")); _tfpintf(fpLogEvents, _T("%s\n"),szBuffer); #endif break; default: break; } size_t efilesize = ftell(fpLogEvents); fclose(fpLogEvents); RotateEventFile(eventlogFileFullPath.c_str(), efilesize); } LeaveCriticalSection(&logfileCriticalSection); }catch(...) { } } #ifdef UNICODE void PLog::LogError(int errorValue,CHAR * lpszFormat, ...) { CHAR localBuff[2*LOG_FILE_ENTRY_LEN+1]={0}; wchar_t widemes[2*LOG_FILE_ENTRY_LEN+1]={0}; va_list argp; va_start(argp, lpszFormat); vsprintf(localBuff, lpszFormat, argp); va_end(argp); mbstowcs(widemes,localBuff,2*LOG_FILE_ENTRY_LEN); LogError(errorValue,widemes); } #endif void PLog::GetRecentLogEvents(_tstring & rdata) { FILE *fpLogEvents; TCHAR streambuff[4097] ={0}; try{ #if UNICODE //EnterCriticalSection(&logfileCriticalSection); /*_wfopen_s(&fpLogEvents,eventlogFileFullPath.c_str(),_T("a+")); if( fpLogEvents == NULL ) { // cant open } else { while(feof( fpLogEvents ) == 0) { fgetws(streambuff,4096,fpLogEvents); wrdata.append(streambuff); memset(streambuff,0,4096); } } if(fpLogEvents!=NULL) fclose( fpLogEvents ); fpLogEvents=NULL; //delete file when datacopy over DeleteFile(eventlogFileFullPath.c_str());*/ EnterCriticalSection(&logfileCriticalSection); if(eventlogFileFullPath.empty()==true) { LeaveCriticalSection(&logfileCriticalSection); return ;//rdata; } string eventpath(eventlogFileFullPath.begin(),eventlogFileFullPath.end()); fopen_s(&fpLogEvents,eventpath.c_str(),"a+"); if( fpLogEvents == NULL ) { // cant open } else { while(feof( fpLogEvents ) == 0) { fgets((char *)streambuff,4096,fpLogEvents); rdata.append(streambuff); memset(streambuff,0,4096); } } if(fpLogEvents!=NULL) fclose( fpLogEvents ); fpLogEvents=NULL; //delete file when datacopy over DeleteFile(eventlogFileFullPath.c_str()); LeaveCriticalSection(&logfileCriticalSection); //return rdata; //string retdata(wrdata.begin(),wrdata.end()); //return retdata; #else EnterCriticalSection(&logfileCriticalSection); if(eventlogFileFullPath.empty()==true) { LeaveCriticalSection(&logfileCriticalSection); return;// rdata; } fopen_s(&fpLogEvents,eventlogFileFullPath.c_str(),"a+"); if( fpLogEvents == NULL ) { // cant open } else { while(feof( fpLogEvents ) == 0) { fgets(streambuff,4096,fpLogEvents); rdata.append(streambuff); memset(streambuff,0,4096); } } if(fpLogEvents!=NULL) fclose( fpLogEvents ); fpLogEvents=NULL; //delete file when datacopy over DeleteFile(eventlogFileFullPath.c_str()); LeaveCriticalSection(&logfileCriticalSection); //return rdata; #endif }catch(...) {} } void PLog::GetRecentLogEventsFilePath(_tstring & rdata) { TCHAR streambuff[4097] ={0}; try{ EnterCriticalSection(&logfileCriticalSection); if(eventlogFileFullPath.empty()==true) { LeaveCriticalSection(&logfileCriticalSection); return;// rdata; } _tstring newName = eventlogFileFullPath; newName.append(_T(".copy")); CopyFile(eventlogFileFullPath.c_str(),newName.c_str(),false); //delete file when datacopy over DeleteFile(eventlogFileFullPath.c_str()); rdata = newName; LeaveCriticalSection(&logfileCriticalSection); //return rdata; }catch(...) {} } void PLog::GetRecentLogEventsFromFile(_tstring & filePath, _tstring & rdata) { FILE *fpLogEvents; TCHAR streambuff[4097] ={0}; try{ #if UNICODE if(filePath.empty()==true) { return ; } _tstring eventpath(filePath.begin(),filePath.end()); fopen_s(&fpLogEvents,(char *)eventpath.c_str(),"a+"); if( fpLogEvents == NULL ) { // cant open return; } else { while(feof( fpLogEvents ) == 0) { fgets((char *)streambuff,4096,fpLogEvents); rdata.append(streambuff); memset(streambuff,0,4096); } } if(fpLogEvents!=NULL) fclose( fpLogEvents ); fpLogEvents=NULL; //delete file when datacopy over DeleteFile(filePath.c_str()); #else if(filePath.empty()==true) { return;// rdata; } fopen_s(&fpLogEvents,filePath.c_str(),"a+"); if( fpLogEvents == NULL ) { // cant open return; } else { while(feof( fpLogEvents ) == 0) { fgets(streambuff,4096,fpLogEvents); rdata.append(streambuff); memset(streambuff,0,4096); } } if(fpLogEvents!=NULL) fclose( fpLogEvents ); fpLogEvents=NULL; //delete file when datacopy over DeleteFile(filePath.c_str()); #endif }catch(...) {} } void PLog::LogError(int errorValue,TCHAR * lpszFormat, ...) { TCHAR logEntry[2*LOG_FILE_ENTRY_LEN]={0}; va_list args; //TCHAR szBuffer[2*1024]={0}; LPVOID lpMsgBuf; LPVOID lpDisplayBuf; FormatMessage( FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, NULL, errorValue, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPTSTR) &lpMsgBuf, 0, NULL ); // Display the error message and exit the process lpDisplayBuf = (LPVOID)LocalAlloc(LMEM_ZEROINIT, (lstrlen((LPCTSTR)lpMsgBuf) + 40) * sizeof(TCHAR)); StringCchPrintf((LPTSTR)lpDisplayBuf, LocalSize(lpDisplayBuf) / sizeof(TCHAR), TEXT("Error %d: %s"), errorValue, lpMsgBuf); va_start(args, lpszFormat); _vstprintf(szBuffer, lpszFormat, args); StringCchCopy(logEntry,ARRAYSIZE(logEntry),szBuffer); StringCchCat(logEntry,ARRAYSIZE(logEntry), _T(" Error String: ")); StringCchCat(logEntry,ARRAYSIZE(logEntry), szBuffer); Log(PLERROR,logEntry); LocalFree(lpMsgBuf); LocalFree(lpDisplayBuf); } void PLog::PrintBanner(TCHAR * banner) { Log(PLOG_MSG_LEVEL,_T("///////////////////////////////////////////")); Log(PLOG_MSG_LEVEL,banner); Log(PLOG_MSG_LEVEL,_T("///////////////////////////////////////////")); } void PLog::RotateFile(_tstring OrgName, size_t filesize) { if(filesize >= maxLogFileSize) { _tstring newName; char Buff[50]; SYSTEMTIME curTime; GetLocalTime(&curTime); sprintf_s(Buff, sizeof(Buff)/sizeof(Buff[0]), "_%02hu%02hu%04hu-%02hu%02hu%02hu", curTime.wDay, curTime.wMonth, curTime.wYear, curTime.wHour, curTime.wMinute, curTime.wSecond); newName = OrgName.substr(0, OrgName.rfind(_T("."))); #ifdef _UNICODE TCHAR tempStr[50+1]={0}; mbstowcs(tempStr,Buff,50); newName+= _tstring(tempStr); #else newName+= _tstring(Buff); #endif newName+= OrgName.substr(OrgName.rfind(_T("."))); MoveFileEx(OrgName.c_str(), newName.c_str(), MOVEFILE_REPLACE_EXISTING); } } void PLog::RotateEventFile(_tstring OrgName, size_t filesize) { long mb= 1 *1024*1024; //1 MB if(filesize >= mb) { _tstring newName; char Buff[50]; SYSTEMTIME curTime; GetLocalTime(&curTime); sprintf_s(Buff, sizeof(Buff)/sizeof(Buff[0]), "_%02hu%02hu%04hu-%02hu%02hu%02hu", curTime.wDay, curTime.wMonth, curTime.wYear, curTime.wHour, curTime.wMinute, curTime.wSecond); newName = OrgName.substr(0, OrgName.rfind(_T("."))); #ifdef _UNICODE TCHAR tempStr[50+1]={0}; mbstowcs(tempStr,Buff,50); newName+= _tstring(tempStr); #else newName+= _tstring(Buff); #endif newName+= OrgName.substr(OrgName.rfind(_T("."))); MoveFileEx(OrgName.c_str(), newName.c_str(), MOVEFILE_REPLACE_EXISTING); //delete newNamefile } } void PLog::InitLogFile(_tstring strLoc, _tstring filename, DWORD maxSize) { try{ logFileLocation = strLoc; logFileName = filename; logFileFullPath = strLoc; if((logFileFullPath.c_str())[logFileFullPath.size()-1] != _T('\\')) logFileFullPath += _T("\\"); //ms:code to create path for event log eventlogFileLocation = strLoc; eventlogFileName = filename; eventlogFileName.append(_T(".event")); eventlogFileFullPath.append(logFileFullPath); eventlogFileFullPath.append(eventlogFileName); logFileFullPath += filename; //mLogLevel = GetLogLevelFromRegistry(); mLogLevel = 0; if(maxSize) maxLogFileSize = maxSize; else maxLogFileSize = DEFAULT_LOG_FILE_SIZE_THRESHOLD; #ifdef UNICODE Log(PLDEBUG,"Log file [%S\\%S] initialized with log level [%d]",strLoc.c_str(),filename.c_str(),mLogLevel); Log(PLDEBUG,"Log file [%S\\%S] log event path [%d]",eventlogFileLocation.c_str(),eventlogFileName.c_str(),mLogLevel); #else Log(PLDEBUG,"Log file [%s\\%s] initialized with log level [%d]",strLoc.c_str(),filename.c_str(),mLogLevel); Log(PLDEBUG,"Log file [%s\\%s] log event path [%d]",eventlogFileLocation.c_str(),eventlogFileName.c_str(),mLogLevel); #endif }catch(...) {} } PLog::PLog() { logFileLocation=_T(""); logFileName=_T(""); logFileFullPath=_T(""); eventlogFileLocation=_T(""); eventlogFileName=_T(""); eventlogFileFullPath=_T(""); mLogLevel = PLOG_DEBUG_LEVEL; maxLogFileSize=DEFAULT_LOG_FILE_SIZE_THRESHOLD; InitializeCriticalSection(&logfileCriticalSection); } PLog::~PLog() { DeleteCriticalSection(&logfileCriticalSection); //DeleteCriticalSection(&logeventfileCriticalSection); } #define LOG_LEVEL_ENTRY _T("loglevel") #define PDA_T _T("SOFTWARE\\PDA") //int PLog::GetLogLevelFromRegistry() //{ // DWORD dwStatus; // DWORD DataRet=0; // DWORD dwDataRetLen=sizeof(DataRet); // // dwStatus = GetKeyValue(HKEY_LOCAL_MACHINE,PDA_T,LOG_LEVEL_ENTRY,REG_DWORD,&DataRet,&dwDataRetLen); // // if(DataRet) // { // mLogLevel = DataRet; // } // else // { // mLogLevel = PLOG_DEBUG_LEVEL; // } // return mLogLevel; //} //later #if 0 void PLog::Text(int level,string strText) { char strCurrTime[255] = {0}; if(level) { EnterCriticalSection(&logfileCriticalSection); ofstream logFile; string path = logFileFullPath; logFile.open(path.c_str(), ios::out | ios::app ); if(logFile.is_open()) { GetSystemTimeString(strCurrTime); logFile<<endl<<"("<<strCurrTime<<")"<<"["<<GetCurrentThreadId()<<"]"<<" "<<"DEBUG"<<strText; size_t filesize = logFile.tellp(); logFile.close(); RotateFile(path, filesize); } LeaveCriticalSection(&logfileCriticalSection); } } void PLog::Text(string strText) { char strCurrTime[255] = {0}; EnterCriticalSection(&logfileCriticalSection); ofstream logFile; //cout<<"In create xml"; string path = logFileFullPath; logFile.open(path.c_str(), ios::out | ios::app ); if(logFile.is_open()) { GetSystemTimeString(strCurrTime); logFile<<endl<<"("<<strCurrTime<<")"<<"["<<GetCurrentThreadId()<<"]"<<" "<<strText; size_t filesize = logFile.tellp(); logFile.close(); RotateFile(path, filesize); } LeaveCriticalSection(&logfileCriticalSection); } void PLog::Text(string strText,int data) { char strCurrTime[255] = {0}; EnterCriticalSection(&logfileCriticalSection); ofstream logFile; string path = logFileFullPath; logFile.open(path.c_str(), ios::out | ios::app ); if(logFile.is_open()) { GetSystemTimeString(strCurrTime); logFile<<endl<<"("<<strCurrTime<<")"<<"["<<GetCurrentThreadId()<<"]"<<" "<<strText<<" "<<data; size_t filesize = logFile.tellp(); logFile.close(); RotateFile(path, filesize); } LeaveCriticalSection(&logfileCriticalSection); } void PLog::Text(string strText,string strTexttwo) { char strCurrTime[255] = {0}; EnterCriticalSection(&logfileCriticalSection); ofstream logFile; string path = logFileFullPath; logFile.open(path.c_str(), ios::out | ios::app ); if(logFile.is_open()) { GetSystemTimeString(strCurrTime); logFile<<endl<<"("<<strCurrTime<<")"<<"["<<GetCurrentThreadId()<<"]"<<" "<<strText<<" "<<strTexttwo; size_t filesize = logFile.tellp(); logFile.close(); RotateFile(path, filesize); } LeaveCriticalSection(&logfileCriticalSection); } #endif
9bc22fda86d3521482b227a14962c8523c5ad3e1
[ "C++" ]
3
C++
Mandar-Shinde/Win32-Logger
4b22b38d6a5721e3610bfaa5591d9122955fc3fb
9953e545e5809fb1b84e11c1f9359fb27c380195
refs/heads/master
<file_sep>package br.com.andrecouto.easymetro.Utils import android.content.pm.PackageManager import android.support.v4.content.ContextCompat import android.support.v4.app.ActivityCompat import android.app.Activity import android.content.Context import android.util.Log object PermissionUtils { private val TAG = PermissionUtils::class.java.name fun validate(activity: Activity, requestCode: Int, permissions: ArrayList<String>): Boolean { val list = ArrayList<String>() for (permission in permissions) { // Valida permissão val ok = ContextCompat.checkSelfPermission(activity, permission) == PackageManager.PERMISSION_GRANTED if (!ok) { list.add(permission) } } if (list.isEmpty()) { return true } // Lista de permissões que falta acesso. val newPermissions = arrayOfNulls<String>(list.size) list.toTypedArray() // Solicita permissão ActivityCompat.requestPermissions(activity, newPermissions, requestCode) return false } fun hasPermission(context: Context, permission: String): Boolean { val res = context.checkCallingOrSelfPermission(permission) Log.v(TAG, "permission: " + permission + " = \t\t" + if (res == PackageManager.PERMISSION_GRANTED) "GRANTED" else "DENIED") return res == PackageManager.PERMISSION_GRANTED } fun hasPermission(context: Context, permissions: ArrayList<String>): Boolean { var hasAllPermissions = true for (permission in permissions) { if (!hasPermission(context, permission)) { hasAllPermissions = false } } return hasAllPermissions } }<file_sep>package br.com.andrecouto.subways2.model import android.os.Parcel import android.os.Parcelable import com.google.android.gms.maps.model.CircleOptions import com.google.gson.annotations.SerializedName class Estacao() : Parcelable { var nome: String = "" var latitude = 0.0 var longitude = 0.0 var elevador = "" @SerializedName("caixa_sugestao") var caixaSugestao = "" @SerializedName("telefone_usuario") var telefoneUsuario = "" var sanitarios = "" @SerializedName("balcao_inf") var balcaoInf = "" @SerializedName("central_serv_atend_pessoal") var centralServAtendPessoal = "" @SerializedName("achados_perdidos") var achadosPerdidos = "" var wifi = "" var circle: CircleOptions = CircleOptions() constructor(parcel: Parcel) : this() { nome = parcel.readString() latitude = parcel.readDouble() longitude = parcel.readDouble() elevador = parcel.readString() caixaSugestao = parcel.readString() telefoneUsuario = parcel.readString() sanitarios = parcel.readString() balcaoInf = parcel.readString() centralServAtendPessoal = parcel.readString() achadosPerdidos = parcel.readString() wifi = parcel.readString() } companion object CREATOR : Parcelable.Creator<Estacao> { override fun createFromParcel(parcel: Parcel): Estacao { return Estacao(parcel) } override fun newArray(size: Int): Array<Estacao?> { return arrayOfNulls(size) } } override fun writeToParcel(p0: Parcel?, p1: Int) { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } override fun describeContents(): Int { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } }<file_sep>package br.com.andrecouto.subways2 import android.Manifest import android.support.v7.app.AppCompatActivity import android.os.Bundle import com.google.android.gms.common.ConnectionResult import com.google.android.gms.common.api.GoogleApiClient import com.google.android.gms.location.* import com.google.android.gms.maps.CameraUpdateFactory import com.google.android.gms.maps.GoogleMap import com.google.android.gms.maps.OnMapReadyCallback import com.google.android.gms.maps.SupportMapFragment import com.google.android.gms.location.LocationServices import android.os.Looper import android.content.pm.PackageManager import android.graphics.Color import android.location.Location import android.support.v4.content.ContextCompat import br.com.andrecouto.easymetro.Utils.AssetsUtils import br.com.andrecouto.subways2.model.LinhaResponse import com.google.android.gms.location.LocationRequest import com.google.android.gms.maps.model.* import android.widget.Toast import android.location.Location.distanceBetween import br.com.andrecouto.subways2.model.Estacao class MapsActivity : AppCompatActivity(), OnMapReadyCallback, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener { private lateinit var mMap: GoogleMap private lateinit var mLocationCallback: LocationCallback private var mFusedLocationClient: FusedLocationProviderClient? = null private var mGoogleApiClient: GoogleApiClient? = null private lateinit var mLocationRequest: LocationRequest private lateinit var linha: LinhaResponse private var test = true override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_maps) mFusedLocationClient = LocationServices.getFusedLocationProviderClient(this) mLocationCallback = object : LocationCallback() { override fun onLocationResult(locationResult: LocationResult?) { for (location in locationResult!!.locations) { var lat = location.latitude var log = location.longitude val sydney = LatLng(lat, log) mMap.addMarker(MarkerOptions().position(sydney).title("Marker in Sydney")) mMap.moveCamera(CameraUpdateFactory.newLatLng(sydney)) val distance = FloatArray(2) for(estacao in linha.linha.estacoes) { Location.distanceBetween(lat, log, estacao.circle.getCenter().latitude, estacao.circle.getCenter().longitude, distance) if (distance[0] < estacao.circle.getRadius()) { Toast.makeText(baseContext, estacao.nome, Toast.LENGTH_SHORT).show() } } } } } // Obtain the SupportMapFragment and get notified when the map is ready to be used. val mapFragment = supportFragmentManager .findFragmentById(R.id.map) as SupportMapFragment mapFragment.getMapAsync(this) } fun init() { linha = AssetsUtils.loadLinhaFromString(AssetsUtils.loadJSONFromAsset(this)) for (estacao in linha.linha.estacoes) { var nestacao: Estacao = estacao val circle = (CircleOptions() .center(LatLng(estacao.latitude, estacao.longitude)) .radius(70.0) .strokeColor(Color.RED)) estacao.circle = circle mMap.addCircle(circle) } } override fun onPause() { super.onPause() stopLocationUpdates() } private fun stopLocationUpdates() { if (mFusedLocationClient != null) { mFusedLocationClient!!.removeLocationUpdates(mLocationCallback) } } override fun onResume() { super.onResume() mGoogleApiClient.let { if (mFusedLocationClient != null) { requestLocationUpdates(); } else { buildGoogleApiClient() } } } override fun onMapReady(googleMap: GoogleMap) { mMap = googleMap mMap.setMinZoomPreference(6.0f) mMap.setMaxZoomPreference(18.0f) mMap.setMapType(GoogleMap.MAP_TYPE_NORMAL) mMap.uiSettings.isZoomGesturesEnabled = true init() } override fun onConnected(p0: Bundle?) { requestLocationUpdates() } override fun onConnectionSuspended(p0: Int) { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } override fun onConnectionFailed(p0: ConnectionResult) { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } @Synchronized protected fun buildGoogleApiClient() { mGoogleApiClient = GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API) .build() mGoogleApiClient!!.connect() } fun requestLocationUpdates() { mLocationRequest = LocationRequest() mLocationRequest.interval = 20000 // two minute interval mLocationRequest.fastestInterval = 20000 mLocationRequest.priority = LocationRequest.PRIORITY_BALANCED_POWER_ACCURACY if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) { mFusedLocationClient!!.requestLocationUpdates(mLocationRequest, mLocationCallback, Looper.myLooper()) } } } <file_sep>package br.com.andrecouto.subways2.model import android.os.Parcel import android.os.Parcelable class Linha(numero: Int, cor: String, empresa: String, estacoes: ArrayList<Estacao>) : Parcelable { var numero = 0 var cor = "" var empresa = "" var estacoes : ArrayList<Estacao> = ArrayList() constructor(parcel: Parcel) : this( TODO("numero"), TODO("cor"), TODO("empresa"), TODO("estacoes")) { numero = parcel.readInt() cor = parcel.readString() empresa = parcel.readString() } init { this.numero = numero this.cor = cor this.empresa = empresa this.estacoes = estacoes } override fun equals(other: Any?): Boolean { if (this === other) return true if (javaClass != other?.javaClass) return false other as Linha if (numero != other.numero) return false if (cor != other.cor) return false if (empresa != other.empresa) return false if (estacoes != other.estacoes) return false return true } override fun hashCode(): Int { var result = numero result = 31 * result + cor.hashCode() result = 31 * result + empresa.hashCode() result = 31 * result + estacoes.hashCode() return result } companion object CREATOR : Parcelable.Creator<Linha> { override fun createFromParcel(parcel: Parcel): Linha { return Linha(parcel) } override fun newArray(size: Int): Array<Linha?> { return arrayOfNulls(size) } } override fun writeToParcel(p0: Parcel?, p1: Int) { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } override fun describeContents(): Int { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } }<file_sep>package br.com.andrecouto.subways2.model import com.google.gson.annotations.SerializedName class LinhaResponse { @SerializedName("linha") lateinit var linha: Linha }<file_sep>package br.com.andrecouto.easymetro.Utils import android.app.Activity import br.com.andrecouto.easymetro.extensions.fileAsString import br.com.andrecouto.subways2.model.Linha import br.com.andrecouto.subways2.model.LinhaResponse import com.google.gson.Gson import java.io.StringReader object AssetsUtils { fun loadJSONFromAsset(context: Activity): String? { val json = context.assets.fileAsString("json", "metro.json") return json } fun loadLinhaFromString(json: String?): LinhaResponse { var linha: LinhaResponse val gson = Gson() linha = gson.fromJson(json, LinhaResponse::class.java) return linha } }
89b70c61bc745b2bb36bb2ea8c22f695f1ab5e7c
[ "Kotlin" ]
6
Kotlin
andrecouto87/SubwayS2
7d540cd3ce39bc22dfa57e29e58494989bb594c1
a479d523d1acfb69c6df8fb93ac1e0bf8084f11b
refs/heads/master
<file_sep>version=0.0.1-SNAPSHOT groupId=com.project.interview artifactId=saleTaxes <file_sep>package saleTaxes; import java.io.IOException; import java.math.BigDecimal; import java.nio.file.Paths; import java.util.List; import saleTaxes.helper.ProductMapper; import saleTaxes.helper.CustomReader; import saleTaxes.helper.CustomWriter; import saleTaxes.helper.TaxCalculator; import saleTaxes.model.ProductOrder; import saleTaxes.model.Sale; public class MainApplication { public static void main(String[] args) { Sale sale = new Sale(); String inputFilePath = "config/Input 1.txt"; String outFilePath = "config/Output 1.txt"; ProductMapper mapper = new ProductMapper(); TaxCalculator calculator = new TaxCalculator(); List<String> words; try { words = CustomReader.readLines(Paths.get(inputFilePath)); processSale(sale, mapper, calculator, words); CustomWriter.writeOutput(Paths.get(outFilePath),sale); } catch (IOException e) { e.printStackTrace(); } } private static void processSale(Sale sale, ProductMapper mapper, TaxCalculator calculator, List<String> words) { words.forEach(word ->{ ProductOrder order = mapper.getProductFromString(word); BigDecimal taxes = calculator.calculateTax(order); order.setPrice(calculator.addRoundedBigDecimal(order.getPrice(), taxes)); sale.setTotalTaxes(calculator.addRoundedBigDecimal(sale.getTotalTaxes(), taxes)); sale.setTotalSale(calculator.addRoundedBigDecimal(sale.getTotalSale(), order.getPrice())); sale.getOrders().add(order); }); } } <file_sep>package saleTaxes; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import java.math.BigDecimal; import java.math.BigInteger; import java.util.regex.PatternSyntaxException; import org.junit.jupiter.api.Test; import saleTaxes.exception.InvalidArgumentException; import saleTaxes.helper.ProductMapper; import saleTaxes.model.ProductOrder; class ProductMapperTest { ProductMapper mapper; @Test void getProducFromStringTestSuccess() { mapper = new ProductMapper(); ProductOrder order =mapper.getProductFromString("1 book at 12.49"); assertEquals("Not expected value",order.getAmount(), new BigInteger("1")); assertEquals("Not expected value",order.getName().trim(), "book"); assertEquals("Not expected value",order.getPrice(), new BigDecimal("12.49")); } @Test void getProducFromStringtestFailAmountSmallerThanOne() { mapper = new ProductMapper(); try { mapper.getProductFromString("0 book at 12.49"); } catch(InvalidArgumentException ex) { assertThat(ex.getMessage(),containsString( "Amount needs to be bigger than zero")); } } @Test void getProducFromStringtestFailNotFollowThePathern() { mapper = new ProductMapper(); try { mapper.getProductFromString("1 book 12.49"); } catch(PatternSyntaxException ex) { assertThat(ex.getMessage(),containsString( "Function not mach the expression")); } } @Test void getProducFromStringtestFailArgumentNotPresent() { mapper = new ProductMapper(); try { mapper.getProductFromString(null); } catch(IllegalArgumentException ex) { assertThat(ex.getMessage(),containsString( "Input must not be null")); } } } <file_sep>package saleTaxes.exception; public class InvalidArgumentException extends IllegalArgumentException{ public InvalidArgumentException(String message) { super(message); } } <file_sep>package saleTaxes; import static org.junit.jupiter.api.Assertions.*; import java.math.BigDecimal; import java.math.BigInteger; import org.junit.jupiter.api.Test; import saleTaxes.helper.TaxCalculator; import saleTaxes.model.ProductOrder; class TaxCalculatorTest { TaxCalculator calculator; @Test void addRoundedBigDecimalTest() { calculator = new TaxCalculator(); assertEquals(new BigDecimal("1.08"), calculator.addRoundedBigDecimal(new BigDecimal("1.03"), new BigDecimal("0.05"))); assertEquals(new BigDecimal("1.07"), calculator.addRoundedBigDecimal(new BigDecimal("1.051"), new BigDecimal("0.02"))); assertEquals(new BigDecimal("1.05"), calculator.addRoundedBigDecimal(new BigDecimal("1.05"), new BigDecimal("0.00"))); assertEquals(new BigDecimal("0.00"), calculator.addRoundedBigDecimal(new BigDecimal("0"), new BigDecimal("0.00"))); } @Test void calculateTaxTest() { ProductOrder order = new ProductOrder(); calculator = new TaxCalculator(); order.setAmount(BigInteger.ONE); order.setName("book"); order.setPrice(new BigDecimal("10")); assertEquals(BigDecimal.ZERO, calculator.calculateTax(order)); order.setName("a product"); assertEquals(new BigDecimal("1.0"), calculator.calculateTax(order)); order.setName("a imported product"); assertEquals(new BigDecimal("1.50"), calculator.calculateTax(order)); } }
dfa03f5b025bd35ec4f7b1afbe096a82a4481a4e
[ "Java", "INI" ]
5
INI
mateupc/SaleTaxes
e7ed1ab17a10839941b535ecbb21992120768800
a3f5514878ea75294be04ebf8de0cd647f0e1482
refs/heads/master
<repo_name>wormggmm/SpaceWarServer<file_sep>/common/src/header/MapDef.h #ifndef _MAP_DEF_H_327 #define _MAP_DEF_H_327 #include "CommonStruct.h" #define MAP_NODE_MAX_COUNT 50 #define MAP_NODE_MIN_COUNT 30 #define MAP_X_MAX 1024 #define MAP_Y_MAX 768 #define MAP_Z_MAX 100 struct MapNodeInfo { WGPoint3 m_position; }; #endif<file_sep>/netWork/src/common/common.h #ifndef _COMMON_H_ #define _COMMON_H_ #include <stdio.h> #ifndef interface #define interface struct #endif #define KEEP_ALIVE_TIME 10 #endif<file_sep>/share/src/Header/tools/macro.h #ifndef _MACRO_H_ #define _MACRO_H_ #include "header_debug.h" #define RETURN(x) {if( x > 0 ){assert(false);} return x;} #endif <file_sep>/serverCore/src/header/Core.h #ifndef _CORE_H_327_ #define _CORE_H_327_ #include "itcpserver.h" extern int startCore(); extern ITCPServer* g_Server; #endif <file_sep>/netWork/src/itcpclient.cpp #include "common/itcpclient.h" #include "tcpclient.h" ITCPClient* CreateTCPClient( ) { ITCPClient* pClient = ( ITCPClient*)(new tcpclient); return pClient; } bool CloseTCPClient( ITCPClient* pClient ) { if ( !pClient ) return false; pClient->StopClient(); return true; } <file_sep>/share/src/tools.cpp ///////////////////////////////////////////////////////////////////////////// // FileName : minister.cpp // Creator : zuolizhi // Date : 2006-5-30 9:54:00 // Comment : Function Routine Define // Changes : ///////////////////////////////////////////////////////////////////////////// #include "tools.h" #include "header_debug.h" #include "md5.h" #include "md5c.h" ///////////////////////////////////////////////////////////////////////////// // // Global variables and Macro and Structure Definitions // ///////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////// // // Function Definitions // ///////////////////////////////////////////////////////////////////////////// #ifndef WIN32 void Sleep( unsigned long ulTime = 10 ) { struct timeval sleeptime; sleeptime.tv_sec = 0; sleeptime.tv_usec=ulTime*1000; select(0,0,0,0,&sleeptime); } #endif unsigned long UNIX_TIME_STAMP; void TimerStart() { static timer _timer; if( !_timer.IsRun() ) _timer.start(); } const char *GetCurDateTimeWithString(char *pStringBuf) { time_t tCurDateTime; tm *ptagCurDataTime; char szTimeStringBuf[MAXSIZE_TIMESTRING] = {0}; char *pWriteStringBuf = pStringBuf; if(NULL == pWriteStringBuf) { pWriteStringBuf = szTimeStringBuf; } time(&tCurDateTime); ptagCurDataTime = localtime(&tCurDateTime); sprintf(pWriteStringBuf, "%d-%02d-%02d %02d:%02d:%02d", ptagCurDataTime->tm_year+1900, ptagCurDataTime->tm_mon+1, ptagCurDataTime->tm_mday, ptagCurDataTime->tm_hour, ptagCurDataTime->tm_min, ptagCurDataTime->tm_sec); return pWriteStringBuf; } const char *GetCurYearMonthWithString(char *pStringBuf) { time_t tCurDateTime; tm *ptagCurDataTime; char szTimeStringBuf[MAXSIZE_TIMESTRING] = {0}; char *pWriteStringBuf = pStringBuf; if(NULL == pWriteStringBuf) { pWriteStringBuf = szTimeStringBuf; } time(&tCurDateTime); ptagCurDataTime = localtime(&tCurDateTime); sprintf(pWriteStringBuf, "%d%02d", ptagCurDataTime->tm_year+1900, ptagCurDataTime->tm_mon+1); return pWriteStringBuf; } RingBuffer::RingBuffer( ) { Clear( ); } RingBuffer::~RingBuffer( ) { } void RingBuffer::Clear( ) { m_startptr = NULL; m_leftptr = NULL; m_rightptr = NULL; m_tmpptr = NULL; m_nextleft = NULL; m_uBufLen = 0; m_uMaxPack = 0; } int RingBuffer::Init( char* StartPtr, unsigned uBufLen,unsigned uMaxPack ) { if( uBufLen <= ( 2 * uMaxPack + RB_SPACE ) ) return INVALID_VALUE; if( StartPtr == NULL ) return INVALID_VALUE; Clear( ); memset( StartPtr, 0, uBufLen ); m_startptr = StartPtr; m_uMaxPack = uMaxPack; m_uBufLen = uBufLen - ( m_uMaxPack + RB_SPACE ); //隔开8字节 m_leftptr = m_startptr; m_rightptr = m_startptr; m_tmpptr = m_startptr + ( m_uBufLen + RB_SPACE ); m_nextleft = m_startptr; return SUCCESS_VALUE; } int RingBuffer::Reinit( ) { if( m_startptr == NULL ) return INVALID_VALUE; m_leftptr = m_startptr; m_rightptr = m_startptr; m_nextleft = m_startptr; return SUCCESS_VALUE; } /* * 写入数据,返回右指针 */ char* RingBuffer::PutData( char* leftptr, char* rightptr, char* pData, unsigned datasize ) { //右边空间 unsigned uRightMargin = m_uBufLen - ( rightptr - m_startptr ); if( rightptr >= leftptr && datasize >= uRightMargin ) { /* * 没有环绕,但是右边空间不够,即将环绕 */ memcpy( rightptr, pData, uRightMargin ); rightptr = m_startptr; memcpy( rightptr, pData + uRightMargin, datasize - uRightMargin ); rightptr += datasize - uRightMargin; if( rightptr > m_startptr + m_uBufLen ) return NULL; return rightptr; } //环绕了,或者没有环绕,但是右边空间够用 memcpy( rightptr, pData, datasize ); rightptr += datasize; if( rightptr > m_startptr + m_uBufLen ) return NULL; return rightptr; } unsigned RingBuffer::GetDataLen( char* leftptr, char* rightptr ) { //左指针右边空间 unsigned uRightMargin = m_uBufLen - ( leftptr - m_startptr ); if( rightptr < leftptr && sizeof(unsigned) > uRightMargin ) { /* * 环绕了,但是数据长度不够读取 */ unsigned uDataLen = 0; char* ptr = (char*)&uDataLen; memcpy( ptr, leftptr, uRightMargin ); memcpy( ptr + uRightMargin, m_startptr, sizeof(unsigned) - uRightMargin ); return uDataLen; } return *((unsigned*)leftptr); } int RingBuffer::SndPack( const void* pData, unsigned datasize ) { if( pData == NULL || datasize == 0 ) return INVALID_VALUE; if( m_startptr == NULL ) return INVALID_VALUE; if( datasize > m_uMaxPack ) return INVALID_VALUE; char* leftptr = m_leftptr; char* rightptr = m_rightptr; /* * 判断是否环绕 */ //////////////////////////////////////////////////////// unsigned uContentSize = 0; unsigned uEmptySize = 0; if( rightptr >= leftptr ) //没有环绕 uContentSize = ( rightptr - leftptr ); else uContentSize = m_uBufLen - ( leftptr - rightptr ); if( uContentSize > m_uBufLen - 1 ) return INVALID_VALUE; uEmptySize = m_uBufLen - uContentSize - 1; //////////////////////////////////////////////////////// /* * 没空间了 */ if( datasize + sizeof(unsigned) > uEmptySize ) return INVALID_VALUE; //写入长度 rightptr = PutData( leftptr, rightptr, (char*)&datasize, sizeof(unsigned) ); if( rightptr == NULL ) return INVALID_VALUE; //写入数据 rightptr = PutData( leftptr, rightptr, (char*)pData, datasize ); if( rightptr == NULL ) return INVALID_VALUE; //修改指针 m_rightptr = rightptr; return SUCCESS_VALUE; } //=================================================================================== // 这个函数要特别注意,datasize的大小为pData1+pData2指针里的数据内容大小 // int RingBuffer::SndPack( const void *pData1, const void *pData2, unsigned datasize1, unsigned datasize2 ) { if( pData1 == NULL || pData2 || 0 == datasize1 || 0 == datasize2 ) return INVALID_VALUE; unsigned uiTotalSize = datasize1 + datasize2; if( m_startptr == NULL ) return INVALID_VALUE; if( uiTotalSize > m_uMaxPack ) return INVALID_VALUE; char* leftptr = m_leftptr; char* rightptr = m_rightptr; /* * 判断是否环绕 */ //////////////////////////////////////////////////////// unsigned uContentSize = 0; unsigned uEmptySize = 0; if( rightptr >= leftptr ) //没有环绕 uContentSize = ( rightptr - leftptr ); else uContentSize = m_uBufLen - ( leftptr - rightptr ); if( uContentSize > m_uBufLen - 1 ) return INVALID_VALUE; uEmptySize = m_uBufLen - uContentSize - 1; //////////////////////////////////////////////////////// /* * 没空间了 */ if( uiTotalSize + sizeof(unsigned) > uEmptySize ) return INVALID_VALUE; //写入长度 rightptr = PutData( leftptr, rightptr, (char*)&uiTotalSize, sizeof(unsigned) ); if( rightptr == NULL ) return INVALID_VALUE; //写入数据1 rightptr = PutData( leftptr, rightptr, (char*)pData1, datasize1 ); if( rightptr == NULL ) return INVALID_VALUE; //写入数据2 rightptr = PutData( leftptr, rightptr, (char*)pData2, datasize2 ); if( rightptr == NULL ) return INVALID_VALUE; //修改指针 m_rightptr = rightptr; return SUCCESS_VALUE; } const void* RingBuffer::RcvPack( unsigned& datasize ) { datasize = 0; if( m_startptr == NULL ) return NULL; //释放上一次内容 m_leftptr = m_nextleft; char* leftptr = m_leftptr; char* rightptr = m_rightptr; /* * 判断是否环绕 */ //////////////////////////////////////////////////////// unsigned uContentSize = 0; if( rightptr >= leftptr ) //没有环绕 uContentSize = ( rightptr - leftptr ); else uContentSize = m_uBufLen - ( leftptr - rightptr ); if( uContentSize > m_uBufLen - 1 ) return NULL; //////////////////////////////////////////////////////// if( uContentSize <= sizeof(unsigned) ) return NULL; datasize = GetDataLen( leftptr, rightptr ); if( uContentSize < datasize || datasize > m_uMaxPack ) { /* * 不应该产生的情况 */ datasize = 0; return NULL; } /* * 一起拷贝,总长度 */ unsigned uReadLen = datasize + sizeof(unsigned); //左指针右边空间 unsigned uRightMargin = m_uBufLen - ( leftptr - m_startptr ); if( rightptr < leftptr && uReadLen >= uRightMargin ) { /* * 环绕了,但是数据长度不够读取 */ memcpy( m_tmpptr, leftptr, uRightMargin ); memcpy( m_tmpptr + uRightMargin, m_startptr, uReadLen - uRightMargin ); if( uReadLen - uRightMargin > m_uBufLen ) { datasize = 0; return NULL; } //修改指针 m_nextleft = m_startptr + ( uReadLen - uRightMargin ); return ( m_tmpptr + sizeof(unsigned) ); } else { //修改指针 if( m_nextleft + uReadLen > m_startptr + m_uBufLen ) { datasize = 0; return NULL; } m_nextleft += uReadLen; return ( leftptr + sizeof(unsigned) ); } } //=========================================================== //=========================================================== int timer::stop() { Thread::stop(); return 0; } void* timer::action() { while( m_Run ) { UNIX_TIME_STAMP = (unsigned long)time( NULL ); Sleep(100); } return NULL; } int timer::start() { m_Run = true; Thread::start(); return 0; } timer::timer():Thread(1) { m_Run = false; } timer::~timer() { m_Run = false; } bool timer::IsRun() { return m_Run; } bool string_md5( IN char* inputString, OUT char* md5String ) { if ( inputString || !md5String) return false; md5_state_t state; md5_byte_t digest[16]; //int di; md5_init(&state); md5_append(&state, (const md5_byte_t *)inputString, strlen(inputString)); md5_finish(&state, digest); md5String[0] = '\0'; for ( int i = 0; i < 16; i++) { char temp[5] = {0}; sprintf(temp, "%02X ", digest[i]); strcat( md5String, temp ); } return true; } bool file_md5( IN char* fileName, OUT char* md5String ) { if ( !fileName || !md5String) return false; md5String[0] = '\0'; md5_state_t state; unsigned char ReadBuffer[65536]; size_t ReadBytes = 0; md5_byte_t digest[16]; md5_init(&state); FILE *fp = fopen(fileName, "rb"); if (!fp) return false; while (true) { ReadBytes = fread(ReadBuffer, 1, 65536, fp); if (ReadBytes > 0) md5_append(&state, (const md5_byte_t *)ReadBuffer, ReadBytes); if (feof(fp)) { md5_finish(&state, digest); break; } } fclose(fp); fp = NULL; for ( int i = 0; i < 16; i++) { char temp[5] = {0}; sprintf(temp, "%02X ", digest[i]); strcat( md5String, temp ); } return true; } <file_sep>/share/src/Header/tools/tools.h ///////////////////////////////////////////////////////////////////////////// // FileName : minister.cpp // Creator : zuolizhi // Date : 2006-5-30 9:54:00 // Comment : Function Routine Define // Changes : ///////////////////////////////////////////////////////////////////////////// #ifndef _TOOLS_H_ #define _TOOLS_H_ #include <time.h> #include "def.h" #include "utilities.h" ////////////////////////////////////////////////////////////////////////////// #define MY_ALIGN(A,L) (((A) + (L) - 1) & ~((L) - 1)) #define ALIGN_SIZE(A) MY_ALIGN((A),sizeof(double)) /* Size to make adressable obj. */ #define ALIGN_PTR(A, t) ((t*) MY_ALIGN((A),sizeof(t))) /* Offset of field f in structure t */ #define OFFSET(t, f) ((size_t)(char *)&((t *)0)->f) //time or timer #define MAXSIZE_TIMESTRING 64 extern const char *GetCurDateTimeWithString(char *pstringbuf = NULL ); extern const char *GetCurYearMonthWithString(char *pstringbuf = NULL ); extern void TimerStart(); extern unsigned long UNIX_TIME_STAMP; extern bool string_md5( IN char* inputString, OUT char* md5String ); extern bool file_md5( IN char* fileName, OUT char* md5String ); #ifndef WIN32 extern void Sleep( unsigned long ulTime); #endif class timer: public Thread { private: bool m_Run; public: timer(); ~timer(); public: bool IsRun(); int start(); void* action(); int stop(); }; //inline void tyield( unsigned long ulTime = 10 ) //{ // #ifdef WIN32 // Sleep( ulTime ); // #else // struct timeval sleeptime; // sleeptime.tv_sec = 0; // sleeptime.tv_usec=ulTime*1000; // select(0,0,0,0,&sleeptime); // //usleep(ulTime*1000);//geoffyan // #endif //} // class RingBuffer { public: RingBuffer( ); ~RingBuffer( ); int SndPack( const void* pData, unsigned datasize ); int SndPack( const void *pData1, const void *pData2, unsigned datasize1, unsigned datasize2 ); const void* RcvPack( unsigned& datasize ); int Init( char* StartPtr, unsigned uBufLen,unsigned uMaxPack ); int Reinit( ); private: /* * 写入数据,返回右指针 */ char* PutData( char* leftptr, char* rightptr, char* pData, unsigned datasize ); unsigned GetDataLen( char* leftptr, char* rightptr ); void Clear( ); private: char* m_startptr; char* m_leftptr; char* m_rightptr; char* m_tmpptr; char* m_nextleft; /* * var */ unsigned m_uBufLen; unsigned m_uMaxPack; }; #endif <file_sep>/share/src/header_debug.cpp #include "header_debug.h" #ifdef _WIN32 #else #include <stdarg.h> #include <errno.h> #include <string.h> #endif void _OutPutStringToDebugWin(const char * strOutputString,...) { char strBuffer[4096]={0}; va_list args; va_start(args,strOutputString); #ifdef _WIN32 _vsnprintf(strBuffer,sizeof(strBuffer)-1,strOutputString,args); #else vsnprintf(strBuffer,sizeof(strBuffer)-1,strOutputString,args); #endif va_end(args); #ifdef _WIN32 OutputDebugStringA(strBuffer); #else printf( strBuffer ); #endif } void _OutPutLastErrorMsgToDebugWin( ) { #ifdef _WIN32 _OutPutStringToDebugWin( "LastErrorno: %d <Error Info: %s>\n", GetLastError(), strerror( GetLastError() ) ); #else _OutPutStringToDebugWin( "LastErrorno: %d <Error Info: %s>\n", errno, strerror(errno) ); #endif } void _OutPutDebugStringToCMD( const char * strOutputString, ... ) { char strBuffer[4096]={0}; va_list args; va_start(args,strOutputString); #ifdef _WIN32 _vsnprintf(strBuffer,sizeof(strBuffer)-1,strOutputString,args); #else vsnprintf(strBuffer,sizeof(strBuffer)-1,strOutputString,args); #endif va_end(args); printf( strBuffer ); } void _OutPutLastErrorMsgToCMD( ) { #ifdef _WIN32 _OutPutDebugStringToCMD( "LastErrorno: %d <Error Info: %s>\n", GetLastError(), strerror( GetLastError() ) ); #else _OutPutDebugStringToCMD( "LastErrorno: %d <Error Info: %s>\n", errno, strerror(errno) ); #endif } void _OutPutStringToCMD(const char * strOutputString,...) { char strBuffer[4096]={0}; va_list args; va_start(args,strOutputString); #ifdef _WIN32 _vsnprintf(strBuffer,sizeof(strBuffer)-1,strOutputString,args); #else vsnprintf(strBuffer,sizeof(strBuffer)-1,strOutputString,args); #endif va_end(args); printf( strBuffer ); } <file_sep>/serverCore/src/WGNetProtocolProcess.cpp #include "WGNetProtocolProcess.h" #include "Map/WGMapManager.h" WGNetProtocolProcess g_NetProtocolProcess; WGNetProtocolProcess::WGNetProtocolProcess() { #ifdef WIN32 ZeroMemory(ProcessFunc, sizeof(ProcessFunc)); #else memset(ProcessFunc, 0, sizeof(ProcessFunc)); #endif ProcessFunc[c2s_requestMapInfo] = &WGNetProtocolProcess::c2sRequestMapInfo; } WGNetProtocolProcess::~WGNetProtocolProcess() { } int WGNetProtocolProcess::ProcessNetProtocol( int clientIdx, PROTOCOL_HEADER*protocol, int dataSize ) { if (protocol->Protocol <= c2s_begin || protocol->Protocol >= c2s_end) return INVALID_VALUE; if (dataSize != protocol->Length) return INVALID_VALUE; if (ProcessFunc[protocol->Protocol]) { (this->*ProcessFunc[protocol->Protocol])(clientIdx, protocol, dataSize); return SUCCESS_VALUE; } return INVALID_VALUE; } //协议处理函数开始 //s2c_requestMapInfo void WGNetProtocolProcess::c2sRequestMapInfo( int clientIdx, PROTOCOL_HEADER *protocal, int dataSize ) { WGMapManager::Singleton()->c2sRequestMapInfo(clientIdx, protocal, dataSize); } <file_sep>/serverCore/src/common/WGMapNodeBase.h #ifndef _WG_MAP_NODE_BASE_H_327 #define _WG_MAP_NODE_BASE_H_327 #include "CommonStruct.h" #include "ProtocolMap.h" #include "MapDef.h" class WGMapNodeBase { public: WGMapNodeBase(); ~WGMapNodeBase(); public: void createNode(MapNodeInfo* info); private: MapNodeInfo *m_mapInfo; }; #endif<file_sep>/netWork/src/tcpserver.h #ifndef _TCP_SERVER_ #define _TCP_SERVER_ #ifndef _DEBUG #ifndef _NET_API_ #define _NET_API_ _declspec(dllexport) #endif #else #define _NET_API_ #endif #include <list> #include "itcpserver.h" typedef std::list<int> CONNECTINDEX; class tcpserver : public ITCPServer { public: tcpserver(); ~tcpserver(); public: int StartServer( PORT listenPort, int maxCon = MIN_CONN ); int StopServer( ); int ActionServer(); int OpenConnect( SOCKET sock ); int CloseConnect( int connectIdx ); int PushDataToSendBuff( int connectIdx, void* pBuf, int dataSize ); const void* GetDataFromRecvBuff( unsigned int& dataSize ); int PushDataToRecvBuff( void* pBuf, int dataSize ); const void* GetDataFromSendBuff( unsigned int& dataSize ); int CloseConnectCallBack(int connectIdx); public: int getMaxConn() { return m_MaxConn; } TCPConnection* getConn( int connIdx ) { if ( connIdx < 0 || connIdx >= m_MaxConn ) return NULL; return &m_Conn[connIdx]; } CONNECTINDEX* getConnectedIdxList() { return &m_ConnectedConnIdx; } private: int popFreeConnectIdx(); int pushFreeConnectIdx( int freeIdx ); private: int Init( ); int Release( ); int ReleaseConnectInfo( int connectIdx ); private: tcpListenThread m_ListenThread; tcpSendThread m_SendThread; tcpRecvThread m_RecvThread; char m_SendSpace[MAX_SENDBUFF]; RingBuffer m_SendBuff; char m_RecvSpace[MAX_RECVBUFF]; RingBuffer m_RecvBuff; TCPConnection* m_Conn; int m_MaxConn; CONNECTINDEX m_FreeConnIdx; CONNECTINDEX m_ConnectedConnIdx; }; #endif<file_sep>/common/src/header/ProtocolMap.h #ifndef _PROTOCOL_MAP_H_327 #define _PROTOCOL_MAP_H_327 #include "ProtocolStruct.h" #pragma pack(push, 1) struct S2C_SendMapInfo : PROTOCOL_HEADER { UINT mapNodeCount; char mapInfo[1]; }; #endif<file_sep>/SpaceWarServer/src/makefile WORKSPACE_DIR=../../ INCLUDE= -I./common \ -I. \ -I./header \ -I./Map \ -I$(WORKSPACE_DIR)common/src/header \ -I$(WORKSPACE_DIR)netWork/src/common \ -I$(WORKSPACE_DIR)netWork/src \ -I$(WORKSPACE_DIR)serverCore/src \ -I$(WORKSPACE_DIR)serverCore/src/common \ -I$(WORKSPACE_DIR)serverCore/src/header \ -I$(WORKSPACE_DIR)share/src \ -I$(WORKSPACE_DIR)share/src/Header \ -I$(WORKSPACE_DIR)share/src/Header/tools OBJECTS=main.o CPP_SRCS=main.cpp TARGET=sbServer cc=g++ LIB_DIR=-L $(WORKSPACE_DIR)share/src \ -L $(WORKSPACE_DIR)common/src \ -L $(WORKSPACE_DIR)serverCore/src \ -L $(WORKSPACE_DIR)netWork/src \ PROJECTS_DIR=$(WORKSPACE_DIR)share/src \ $(WORKSPACE_DIR)common/src \ $(WORKSPACE_DIR)serverCore/src \ $(WORKSPACE_DIR)netWork/src \ LIBS=-lservercore -lnetwork -lshare -lcommon -lpthread $(TARGET):$(OBJECTS) $(cc) -o $(TARGET) $(OBJECTS) $(LIBS) $(INCLUDE) $(LIB_DIR) main.o:main.cpp $(cc) -c main.cpp $(INCLUDE) # for cppfile in $(CPP_SRCS);do / #$(subst .cpp,.o,$(cppfile)):$(cppfile) # echo $(cppfile) $(cc); / # g++-4.4 -c $$cppfile $(INCLUDE); / # done; all: @list='$(PROJECTS_DIR)'; \ for subdir in $$list; do \ (cd $$subdir && make); \ done; \ make; clean: @list='$(PROJECTS_DIR)'; \ for subdir in $$list; do \ (cd $$subdir && make clean); \ done; \ rm $(TARGET) $(OBJECTS); <file_sep>/netWork/src/common/errorcode.h #ifndef _ERRORCODE_H_ #define _ERRORCODE_H_ namespace errcode { // begin enum en_FunctionReturnValue_Begin { enFunctionReturnValue_Begin = -99999999, }; //class tcpserver enum en_tcpserver_RetVal { en_tcpserver_RetVal_Begin = enFunctionReturnValue_Begin + 1, en_tcpserver_StartServer_RetVal_ParamError, en_tcpserver_StartServer_RetVal_ConnUnRelease, en_tcpserver_Init_RetVal_NewMallocFalse, en_tcpserver_Init_RetVal_ConnUnRelease, en_tcpserver_InitListenSocket_RetVal_UnFindDll, en_tcpserver_InitListenSocket_RetVal_SocketCreatError, en_tcpserver_InitListenSocket_RetVal_BindError, en_tcpserver_InitListenSocket_RetVal_ListenError, en_tcpserver_getFreeConnectIdx_RetVal_NoneFreeConnIdx, en_tcpserver_OpenClientConnect_RetVal_NoneFreeConnIdx, en_tcpserver_OpenClientConnect_RetVal_FreeConnUnRelease, en_tcpserver_OpenClientConnect_RetVal_ParamError, en_tcpserver_addFreeConnectIdx_RetVal_InvalidFreeConnIdx, en_tcpserver_CloseClientInfo_RetVal_ParamError, en_tcpserver_CloseClientInfo_RetVal_ConnHasBeenReleased, en_tcpserver_RetVal_End, }; enum en_tcpclient_RetVal { en_tcpclient_RetVal_Begin = en_tcpserver_RetVal_End + 1, en_tcpclient_StartClient_RetVal_ParamError, en_tcpclient_StartClient_RetVal_ConnUnRelease, en_tcpclient_Init_RetVal_ConnUnRelease, en_tcpclient_Init_RetVal_NewMallocFalse, en_tcpclient_PushDataToSendBuff_RetVal_NoneConn, en_tcpclient_getFreeConnectIdx_RetVal_NoneFreeConnIdx, en_tcpclient_addFreeConnectIdx_RetVal_InvalidFreeConnIdx, en_tcpclient_OpenClientConnect_RetVal_ParamError, en_tcpclient_OpenClientConnect_RetVal_NoneFreeConnIdx, en_tcpclient_OpenClientConnect_RetVal_FreeConnUnRelease, en_tcpclient_CloseClientInfo_RetVal_ParamError, en_tcpclient_CloseClientInfo_RetVal_ConnHasBeenReleased, en_tcpclient_DisConnectFromServer_RetVal_InvalidConnIdx, en_tcpclient_RetVal_End, }; enum tcpListenThread_RetVal { en_tcpListenThread_RetVal_Begin = en_tcpclient_RetVal_End + 1, en_tcpListenThread_start_RetVal_ParamError, en_tcpListenThread_tcpListenThread_RetVal_ParamError, en_tcpListenThread_InitListenSocket_RetVal_UnFindDll, en_tcpListenThread_InitListenSocket_RetVal_SocketCreatError, en_tcpListenThread_InitListenSocket_RetVal_BindError, en_tcpListenThread_InitListenSocket_RetVal_ListenError, en_tcpListenThread_RetVal_End, }; enum tcpRecvThread_RetVal { en_tcpRecvThread_RetVal_Begin = en_tcpListenThread_RetVal_End + 1, en_tcpRecvThread_start_RetVal_ParamError, en_tcpRecvThread_tcpRecvThread_RetVal_ParamError, en_tcpRecvThread_InitListenSocket_RetVal_UnFindDll, en_tcpRecvThread_InitListenSocket_RetVal_SocketCreatError, en_tcpRecvThread_InitListenSocket_RetVal_BindError, en_tcpRecvThread_InitListenSocket_RetVal_ListenError, en_tcpRecvThread_RetVal_End, }; enum tcpSendThread_RetVal { en_tcpSendThread_RetVal_Begin = en_tcpRecvThread_RetVal_End + 1, en_tcpSendThread_start_RetVal_ParamError, en_tcpSendThread_RetVal_End, }; // end enum en_FunctionReturnValue_End { enFunctionReturnValue_Sucess = 0, }; } #endif<file_sep>/common/src/makefile INCLUDE= -I./header OBJECTS=Protocol.o SRCS=Protocol.cpp TARGET=libcommon.a cc=g++ $(TARGET):$(OBJECTS) ar crv $(TARGET) $(OBJECTS) Protocol.o:Protocol.cpp $(cc) -c Protocol.cpp $(INCLUDE) clean: rm $(TARGET) $(OBJECTS) <file_sep>/common/src/header/CommonStruct.h #ifndef _COMMON_STRUCT_H_327 #define _COMMON_STRUCT_H_327 typedef struct WGVector3 { float x; float y; float z; WGVector3() { x = 0.0f; y = 0.0f; z = 0.0f; }; }WGPoint3; #endif<file_sep>/netWork/src/itcpserver.cpp #include "common/itcpserver.h" #include "tcpserver.h" ITCPServer* CreateTCPServer( ) { ITCPServer* pServer = ( ITCPServer*)(new tcpserver); return pServer; } bool CloseTCPServer( ITCPServer* pServer ) { if ( !pServer ) return false; pServer->StopServer(); return true; } <file_sep>/share/src/Header/def.h ///////////////////////////////////////////////////////////////////////////// // FileName : def.h // Creator : zuolizhi // Date : 2006-5-30 9:54:00 // Comment : Interface Declare // Changes : ///////////////////////////////////////////////////////////////////////////// #ifndef _MINISTER_DEF_H_ #define _MINISTER_DEF_H_ #pragma warning(disable : 4786) #include "string.h" #ifdef WIN32 #include <windows.h> #else #include <stdio.h> #include <stdlib.h> #include <ctype.h> #include <unistd.h> #include <signal.h> #include <fcntl.h> #include <netdb.h> #include <netinet/in.h> #include <net/if.h> #include <arpa/inet.h> #include <sys/types.h> #include <sys/time.h> #include <sys/socket.h> #include <sys/ioctl.h> #include <sys/stat.h> #include <string.h> #include <dirent.h> #include <dlfcn.h> #include <sys/ipc.h> #include <sys/shm.h> #ifndef MAC #include <mntent.h> #include <sys/vfs.h> #endif #include <sys/utsname.h> #include <sys/resource.h> #include <sys/select.h> #include <errno.h> #endif #include <map> #include <set> #include <string> #include <algorithm> #include <fstream> #ifndef interface #define interface struct #endif #define IN #define OUT #define INVALID_VALUE ( -1 ) #define SUCCESS_VALUE ( 0 ) #define WAIT_SELECT ( -2 ) #ifndef NULL #define NULL ( 0 ) #endif #define BYTE_WIDE ( 8 ) #define BIT ( 1 ) #define KB ( 1024 ) #define MB ( 1024 * 1024 ) /* * unit define */ #define MAX_LOGIN_PROTOCOL 10 #define MAX_BUFLEN ( KB * 2 + 16 ) #define MAX_DBBUFLEN ( KB * 96 + 16 ) #define MAX_PACKAGE ( KB ) /* * RB Buffer define */ #define RB_DEFHL ( 4 ) #define RB_SPACE ( 8 ) #define RB_DEFIBL ( KB * 16 + RB_SPACE ) #define RB_MAXPACK ( KB * 4 ) #define BYTE unsigned char #define WORD unsigned short #define UINT unsigned int #endif <file_sep>/share/src/file_log.cpp ////////////////////////////////////////////////////////////////////////// // // Kingsoft Blaze Game Studio. Copyright (C) 2006 // // Created_datetime : 2006-7-25 11:05 // File_base : cfs_filelogs // File_ext : cpp // Author : Cooler(<EMAIL>) // Description : 9Sword series file logs writer // // <Change_list> // // Example: // { // Change_datetime : year-month-day hour:minute // Change_by : changed by who // Change_purpose : change reason // } ////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////// // Include region #ifdef WIN32 #include <windows.h> #endif #include <stdarg.h> #include "file_log.h" #include "tools.h" static Mutex m_gLogMutex(0); FILE *FILELOGS::m_fileSvrLog = NULL; int FILELOGS::m_nFileStatus = FILESTATUS_CLOSE; ////////////////////////////////////////////////////////////////////////// // CFS_FILELOGS class implement region // CFS_FILELOGS class CONSTruct&deCONSTruct functions FILELOGS::FILELOGS() { } FILELOGS::~FILELOGS() { } void FILELOGS::WriteLog(const char*pcLogMsg, ...) { char szMsgBuf[MAXSIZE_MSGBUF] = {0}; va_list va; va_start(va, pcLogMsg); vsprintf(szMsgBuf, pcLogMsg, va); va_end(va); WriteLogInner(szMsgBuf, LOGLEADNAME_DEBUG); } // CFS_FILELOGS class member functions void FILELOGS::WriteLogInner(const char*pcLogMsg, const char*pcLogLead, const int len, bool addTimeStamp) { if(pcLogMsg == NULL || pcLogLead == NULL) { return; } m_gLogMutex.lock(); int nStatus = OpenLog(pcLogLead); if(nStatus != -1) { if(addTimeStamp) { char szCurTime[MAXSIZE_CURTIME] = {0}; GetCurDateTimeWithString(szCurTime); strcat(szCurTime, SERVERLOG_SKIP); WriteData(szCurTime); } WriteData(pcLogMsg, len); CloseLog(); } m_gLogMutex.unlock(); } int FILELOGS::OpenLog(const char*pcLogLead) { int nRet = 0; if(m_nFileStatus == FILESTATUS_CLOSE) { char szOpenPathName[MAXSIZE_PATHNAME] = {0}; time_t ltime; tm tmStruct; time(&ltime); tmStruct = *localtime(&ltime); strcat(szOpenPathName, SUBDIRNAME_LOG); sprintf(szOpenPathName + strlen(szOpenPathName), "%s%04d-%02d.log", pcLogLead, tmStruct.tm_year+1900, tmStruct.tm_mon+1); #ifdef WIN32 CreateDirectory(TEXT(SUBDIRNAME_LOG), NULL); #else /* LINUX */ mkdir(SUBDIRNAME_LOG, 0700); #endif m_fileSvrLog = fopen(szOpenPathName, "a+"); if(m_fileSvrLog) { m_nFileStatus = FILESTATUS_OPEN; } else { nRet = -1; } } return nRet; } void FILELOGS::CloseLog() { if(m_nFileStatus == FILESTATUS_OPEN) { fclose(m_fileSvrLog); m_fileSvrLog = NULL; m_nFileStatus = FILESTATUS_CLOSE; } } void FILELOGS::WriteData(const char*pcLogMsg, const int len) { if(pcLogMsg == NULL) { return; } if(m_nFileStatus == FILESTATUS_OPEN) { if( len == -1 ) fwrite(pcLogMsg, 1, strlen(pcLogMsg), m_fileSvrLog); else fwrite(pcLogMsg, 1, len, m_fileSvrLog); } } <file_sep>/netWork/src/common/itcpclient.h #ifndef _I_TCP_CLIENT_ #define _I_TCP_CLIENT_ #ifdef WIN32 #include <WinSock.h> #else #endif #include "networkthread.h" #define MIN_CONN 1 #define MAX_CONN 10 interface ITCPClient: public ItcpThreadOwner { virtual int StartClient( char* serverIP, PORT listenPort, int maxCon = MIN_CONN ) = 0; virtual int StopClient( ) = 0; virtual int ActionClient() = 0; virtual int ConnectToServer( ) = 0; virtual int DisConnectFromServer( int ConntedIdx ) = 0; }; extern ITCPClient* CreateTCPClient( ); extern bool CloseTCPClient( ITCPClient* pClient); #endif <file_sep>/serverCore/src/Map/WGMapManager.h #ifndef _WG_MAP_MANAGER_H_327 #define _WG_MAP_MANAGER_H_327 #include "def.h" #include "ProtocolMap.h" #include "MapDef.h" class WGMapNodeBase; class WGMapManager { private: WGMapManager(); ~WGMapManager(); public: static WGMapManager *Singleton(); public: int initMapData(); const WGMapNodeBase *getMapInfo(); public: void c2sRequestMapInfo(int clientIdx, PROTOCOL_HEADER* protocol, int dataSize); private: WGMapNodeBase *m_mapNodes; MapNodeInfo *m_mapInfo; UINT m_mapNodeCount; }; #endif<file_sep>/netWork/src/makefile WORKSPACE_DIR=../../ INCLUDE= -I./common \ -I. \ -I$(WORKSPACE_DIR)share/src \ -I$(WORKSPACE_DIR)share/src/Header \ -I$(WORKSPACE_DIR)share/src/Header/tools OBJECTS=itcpclient.o itcpserver.o tcpclient.o tcpserver.o networkthread.o CPP_SRCS=itcpclient.cpp itcpserver.cpp tcpclient.cpp tcpserver.cpp networkthread.cpp TARGET=libnetwork.a cc=g++ $(TARGET):$(OBJECTS) ar crv $(TARGET) $(OBJECTS) itcpclient.o:itcpclient.cpp $(cc) -c itcpclient.cpp $(INCLUDE) itcpserver.o:itcpserver.cpp $(cc) -c itcpserver.cpp $(INCLUDE) tcpclient.o:tcpclient.cpp $(cc) -c tcpclient.cpp $(INCLUDE) tcpserver.o:tcpserver.cpp $(cc) -c tcpserver.cpp $(INCLUDE) networkthread.o:networkthread.cpp $(cc) -c networkthread.cpp $(INCLUDE) # for cppfile in $(CPP_SRCS);do / #$(subst .cpp,.o,$(cppfile)):$(cppfile) # echo $(cppfile) $(cc); / # g++-4.4 -c $$cppfile $(INCLUDE); / # done; clean: rm $(TARGET) $(OBJECTS) <file_sep>/SpaceWarServer/src/main.cpp #include <stdio.h> #include "itcpserver.h" #include "tools.h" #include "Core.h" #include "WGNetProtocolProcess.h" int main() { //ITCPServer* Server = CreateTCPServer(); //if ( !Server ) // return 1; //Server->StartServer( 3345 ); startCore(); g_Server->StartServer(3345, 10); TimerStart(); unsigned int dataSize = 0; while (true) { do { char*data = (char *)g_Server->GetDataFromRecvBuff(dataSize); if (dataSize > 0) { //char recvRet[50] = {0}; TCPNetPack *pack = (TCPNetPack*)data; //sprintf_s(recvRet, "Recv Over:%s", pack->buff); //OutPutDebugStringToCMD(pack->buff); //g_Server->PushDataToSendBuff(pack->ConnectionID, recvRet, strlen(recvRet)); //减去TCPNetPack的开头ConnectIdx的size g_NetProtocolProcess.ProcessNetProtocol(pack->ConnectionID, (PROTOCOL_HEADER*)(pack->buff), dataSize-4); OutPutStringToCMD("recv msg from Client :%d\n", pack->ConnectionID); } }while (dataSize>0); Sleep(10); } g_Server->StopServer(); return 0; /* ITCPServer* Server = CreateTCPServer(); if ( !Server ) return 1; Server->StartServer( 3345 ); TimerStart(); char md5[1024] = {0}; char* filename="d:\\aa.txt"; file_md5( filename, md5 ); OutPutDebugStringToCMD("filemd5:%s\n", md5); char* stringtest ="md5test"; string_md5( stringtest, md5 ); OutPutDebugStringToCMD("stringMd5:%s\n", md5); // unsigned int dataSize = 0; while (true) { char*data = (char *)Server->GetDataFromRecvBuff(dataSize); if (dataSize > 0) { char recvRet[50] = {0}; TCPNetPack *pack = (TCPNetPack*)data; sprintf_s(recvRet, "Recv Over:%s", pack->buff); OutPutDebugStringToCMD(pack->buff); Server->PushDataToSendBuff(pack->ConnectionID, recvRet, strlen(recvRet)); } Sleep(10); } getchar(); Server->StopServer(); return 0; */ }<file_sep>/serverCore/src/common/WGMapNodeBase.cpp #include "WGMapNodeBase.h" #include "random.h" #include "MapDef.h" WGMapNodeBase::WGMapNodeBase() { m_mapInfo = NULL; } WGMapNodeBase::~WGMapNodeBase() { m_mapInfo = NULL; } void WGMapNodeBase::createNode(MapNodeInfo *info) { m_mapInfo = info; } <file_sep>/share/src/Header/tools/header_debug.h #ifndef _HEADER_DEBUG_ #define _HEADER_DEBUG_ #include <stdio.h> #include <stdarg.h> #include <assert.h> #ifdef _WIN32 #include <WinSock.h> #include <Windows.h> #pragma comment( lib, "ws2_32.lib" ) #else #endif #ifdef _DEBUG // 输出信息到Output的Debug窗口 #define OutPutStringToDebugWin _OutPutStringToDebugWin //e.g. OutPutStringToDebugWin( "Debug message %s\n", szMessage ); //输出lasterrorno和lasterrorno转换的info #define OutPutLastErrorMsgToDebugWin _OutPutLastErrorMsgToDebugWin //e.g. OutPutLastErrorMsgDebugWin( ); // 输出信息到命令行窗口 #define OutPutDebugStringToCMD _OutPutDebugStringToCMD //e.g. OutPutDebugStringCMD( "Debug message %s\n", szMessage ); //输出lasterrorno和lasterrorno转换的info #define OutPutLastErrorMsgToCMD _OutPutLastErrorMsgToCMD //e.g. OutPutLastErrorMsgCMD( ); //输出log在cmd窗口 #define OutPutStringToCMD _OutPutStringToCMD //e.g. OutPutStringToCMD( "message %s\n", szMessage ); #else #define OutPutStringToDebugWin #define OutPutLastErrorMsgToDebugWin #define OutPutDebugStringToCMD _OutPutDebugStringToCMD //e.g. OutPutDebugStringCMD( "Debug message %s\n", szMessage ); //#define OutPutLastErrorMsgToCMD() (1) #define OutPutLastErrorMsgToCMD _OutPutLastErrorMsgToCMD //e.g. OutPutLastErrorMsgCMD( ); #define OutPutStringToCMD _OutPutStringToCMD //e.g. OutPutStringToCMD( "message %s\n", szMessage ); #endif extern void _OutPutStringToDebugWin(const char * strOutputString,...); extern void _OutPutLastErrorMsgToDebugWin( ); extern void _OutPutDebugStringToCMD( const char * strOutputString, ... ); extern void _OutPutLastErrorMsgToCMD( ); extern void _OutPutStringToCMD( const char* strOutputString, ...); #endif <file_sep>/serverCore/src/makefile WORKSPACE_DIR=../../ INCLUDE= -I./common \ -I. \ -I./header \ -I./Map \ -I$(WORKSPACE_DIR)common/src/header \ -I$(WORKSPACE_DIR)netWork/src/common \ -I$(WORKSPACE_DIR)netWork/src \ -I$(WORKSPACE_DIR)share/src \ -I$(WORKSPACE_DIR)share/src/Header \ -I$(WORKSPACE_DIR)share/src/Header/tools OBJECTS=WGNetProtocolProcess.o WGMapManager.o Core.o WGMapNodeBase.o CPP_SRCS=WGNetProtocolProcess.cpp WGMapManager.cpp Core.cpp WGMapNodeBase.cpp TARGET=libservercore.a cc=g++ LIB_DIR=-L $(WORKSPACE_DIR)share/src \ -L $(WORKSPACE_DIR)common/src \ -L $(WORKSPACE_DIR)serverCore/src \ -L $(WORKSPACE_DIR)netWork/src \ LIBS=-lshare $(TARGET):$(OBJECTS) ar crv $(TARGET) $(OBJECTS) WGNetProtocolProcess.o:WGNetProtocolProcess.cpp $(cc) -c WGNetProtocolProcess.cpp $(INCLUDE) WGMapManager.o:Map/WGMapManager.cpp $(cc) -c Map/WGMapManager.cpp $(INCLUDE) Core.o:Core.cpp $(cc) -c Core.cpp $(INCLUDE) WGMapNodeBase.o:common/WGMapNodeBase.cpp $(cc) -c common/WGMapNodeBase.cpp $(INCLUDE) # for cppfile in $(CPP_SRCS);do / #$(subst .cpp,.o,$(cppfile)):$(cppfile) # echo $(cppfile) $(cc); / # g++-4.4 -c $$cppfile $(INCLUDE); / # done; clean: rm $(TARGET) $(OBJECTS) <file_sep>/share/src/test.cpp #include <stdio.h> #include "./Header/tools/tools.h" int main(int argc, char *argv[]) { timer tim; Sleep(2000); printf("1"); return 0; } <file_sep>/netWork/src/common/itcpserver.h #ifndef _I_TCP_SERVER_ #define _I_TCP_SERVER_ #ifdef WIN32 #include <WinSock.h> #else #endif #include "networkthread.h" #define MIN_CONN 1 #define MAX_CONN 10 #define MIN_PORT 1024 #define MAX_PORT 49151 interface ITCPServer: public ItcpThreadOwner { virtual int StartServer( PORT listenPort, int maxCon = MIN_CONN ) = 0; virtual int StopServer( ) = 0; virtual int ActionServer() = 0; }; extern ITCPServer* CreateTCPServer( ); extern bool CloseTCPServer( ITCPServer* pServer ); #endif <file_sep>/netWork/src/networkthread.cpp #include "networkthread.h" #include "errorcode.h" #include "file_log.h" #include "common.h" #ifdef WIN32 //#include <WinSock2.h> #endif int tcpListenThread::stop() { CloseListenSocket(); Thread::stop(); RETURN( errcode::enFunctionReturnValue_Sucess ); } void* tcpListenThread::action() { while(1) { sockaddr_in client_addr; #ifdef WIN32 int length = sizeof(client_addr); #else socklen_t length = sizeof(client_addr); #endif SOCKET sock; OutPutDebugStringToCMD( "Listen Thread action!\n" ); if ( (sock = accept( m_ListenSocket, (sockaddr*)&client_addr, &length)) == -1 ) { FILELOGS::WriteLog( "TcpServerListenThread: ERROR on accept" ); continue; } #ifdef WIN32 u_long iMode = 1; ioctlsocket(sock, FIONBIO, &iMode ); #endif int clientidx = getThreadOwner()->OpenConnect( sock ); OutPutDebugStringToCMD("accpet client idx:%d ip:%s\n", clientidx, inet_ntoa(client_addr.sin_addr) ); Sleep(30); } } tcpListenThread::tcpListenThread( ) : Thread( 1 ) { m_bRun = false; } tcpListenThread::~tcpListenThread() { m_bRun = false; } int tcpListenThread::start( ItcpThreadOwner* owner, PORT listenPort ) { if ( listenPort <= MIN_PORT || listenPort >= MAX_PORT ) RETURN( errcode::en_tcpListenThread_start_RetVal_ParamError ); if ( !owner ) RETURN( errcode::en_tcpListenThread_start_RetVal_ParamError ); m_ListenPort = listenPort; setThreadOwner( owner ); InitListenSocket(); Thread::start(); RETURN( errcode::enFunctionReturnValue_Sucess ); } int tcpListenThread::InitListenSocket() { #ifdef _WIN32 WORD wVersionRequested; WSADATA wsadata; int err; wVersionRequested=MAKEWORD(2,2); err=WSAStartup(wVersionRequested,&wsadata); if (err!=0) { //未找到可用的DLL,返回 OutPutLastErrorMsgToCMD(); RETURN( errcode::en_tcpListenThread_InitListenSocket_RetVal_UnFindDll ); } #else #endif m_ListenSocket = socket( PF_INET, SOCK_STREAM, 0 ); if ( -1 == m_ListenSocket ) { OutPutLastErrorMsgToCMD(); #ifdef WIN32 closesocket( m_ListenSocket ); WSACleanup(); #else close(m_ListenSocket); #endif RETURN( errcode::en_tcpListenThread_InitListenSocket_RetVal_SocketCreatError ); } sockaddr_in addr_in; addr_in.sin_family = AF_INET; addr_in.sin_addr.s_addr = htonl(INADDR_ANY); addr_in.sin_port = htons( m_ListenPort ); memset( (void*)addr_in.sin_zero, 0, sizeof( addr_in.sin_zero ) ); int addrlen = sizeof( sockaddr ); int nOnFlag = 1; setsockopt(m_ListenSocket, SOL_SOCKET, SO_REUSEADDR, (const char *)&nOnFlag, sizeof(nOnFlag)); if ( -1 == bind( m_ListenSocket, (sockaddr*)(&addr_in), addrlen ) ) { OutPutLastErrorMsgToCMD(); #ifdef WIN32 closesocket( m_ListenSocket ); WSACleanup(); #else close(m_ListenSocket); #endif RETURN( errcode::en_tcpListenThread_InitListenSocket_RetVal_BindError ); } if(listen(m_ListenSocket, MAX_CONN_QUEUE)) { OutPutLastErrorMsgToCMD(); #ifdef WIN32 closesocket( m_ListenSocket ); WSACleanup(); #else close(m_ListenSocket); #endif RETURN( errcode::en_tcpListenThread_InitListenSocket_RetVal_ListenError ); } RETURN( errcode::enFunctionReturnValue_Sucess ); } int tcpListenThread::CloseListenSocket() { #ifdef WIN32 closesocket( m_ListenSocket ); #else close( m_ListenSocket ); #endif RETURN( errcode::enFunctionReturnValue_Sucess ); } int tcpSendThread::stop() { Thread::stop(); RETURN( errcode::enFunctionReturnValue_Sucess ); } void* tcpSendThread::action() { OutPutDebugStringToCMD( "Send Thread action!\n" ); while(1) { do { unsigned int datasize = 0; const void* data = getThreadOwner()->GetDataFromSendBuff( datasize ); if ( !data ) break; TCPNetPack* netPack = (TCPNetPack*)data; TCPConnection* conn = getThreadOwner()->getConn( netPack->ConnectionID ); if( !conn ) break; if ( conn->m_State != enTCPConnectionState_Connected ) break; int sendsize = send( conn->m_Socket, (const char*)(netPack->buff), datasize-4, MSG_NOSIGNAL ); if ( sendsize == SOCKET_ERROR ) { #ifdef WIN32 if (GetLastError() != WSAEWOULDBLOCK) #else if (errno == EWOULDBLOCK) #endif { OutPutLastErrorMsgToCMD(); closeConnect(conn); } } } while (0); Sleep(30); } } tcpSendThread::tcpSendThread( ) : Thread( 1 ) { m_bRun = false; } tcpSendThread::~tcpSendThread() { m_bRun = false; } int tcpSendThread::start( ItcpThreadOwner* owner ) { if ( !owner ) RETURN( errcode::en_tcpSendThread_start_RetVal_ParamError ); setThreadOwner( owner ); Thread::start(); RETURN( errcode::enFunctionReturnValue_Sucess ); } int tcpRecvThread::stop() { Thread::stop(); RETURN( errcode::enFunctionReturnValue_Sucess ); } void* tcpRecvThread::action() { OutPutDebugStringToCMD( "Recv Thread action!\n" ); while(1) { for ( int i = 0; i < getThreadOwner()->getMaxConn(); i++ ) { do { TCPConnection* conInfo = getThreadOwner()->getConn( i ); if ( !conInfo ) break; if ( conInfo->m_State != enTCPConnectionState_Connected ) break; char recvBuff[MAX_RECVPACK] = {0}; TCPNetPack* pNetPack = (TCPNetPack*)recvBuff; pNetPack->ConnectionID = i; //int recvRet = recv( conInfo->m_Socket, pNetPack->buff, MAX_RECVPACK, 0 ); int recvRet = recv( conInfo->m_Socket, pNetPack->buff, MAX_RECVPACK, 0 ); if ( 0 > recvRet ) { #ifdef WIN32 if (GetLastError() != WSAEWOULDBLOCK) #else if (errno == EWOULDBLOCK) #endif { OutPutLastErrorMsgToCMD(); closeConnect(conInfo); break; } if(UNIX_TIME_STAMP - conInfo->m_lastPingTimeStamp > KEEP_ALIVE_TIME) { OutPutDebugStringToCMD("close socket: client long time send no pack\n"); closeConnect(conInfo); break; } } else if ( 0 < recvRet ) { //OutPutDebugStringToCMD("Recv data:%s\n", pNetPack->buff ); conInfo->m_lastPingTimeStamp = UNIX_TIME_STAMP; getThreadOwner()->PushDataToRecvBuff( recvBuff, recvRet + 4 ); break; } else if (0 == recvRet) { OutPutDebugStringToCMD("close socket: client close socket\n"); closeConnect(conInfo); } } while (0); } Sleep(20); } } tcpRecvThread::tcpRecvThread( ) : Thread( 1 ) { m_bRun = false; } tcpRecvThread::~tcpRecvThread() { m_bRun = false; } int tcpRecvThread::start( ItcpThreadOwner* owner ) { if ( !owner ) RETURN( errcode::en_tcpRecvThread_start_RetVal_ParamError ); setThreadOwner( owner ); Thread::start(); RETURN( errcode::enFunctionReturnValue_Sucess ); } ItcpThreadOwner* tcpThreadInfo::getThreadOwner() { return m_tcpThreadOwner; } void tcpThreadInfo::setThreadOwner( ItcpThreadOwner* owner ) { if ( !owner ) return; m_tcpThreadOwner = owner; } void tcpThreadInfo::closeConnect( TCPConnection *connInfo ) { OutPutLastErrorMsgToCMD(); connInfo->CloseCallBack(getThreadOwner()); connInfo->m_State = enTCPConnectionState_Closing; getThreadOwner()->CloseConnect(connInfo->m_ConnectIdx); } <file_sep>/serverCore/src/WGNetProtocolProcess.h #ifndef _WG_NET_PROTOCOL_PROCESS_H_327 #define _WG_NET_PROTOCOL_PROCESS_H_327 #include "Protocol.h" #include "ProtocolStruct.h" class WGNetProtocolProcess { public: WGNetProtocolProcess(); ~WGNetProtocolProcess(); int ProcessNetProtocol(int clientIdx, PROTOCOL_HEADER*protocol, int dataSize); private: void (WGNetProtocolProcess::*ProcessFunc[c2s_end])(int clientIdx, PROTOCOL_HEADER* protocol, int dataSize); private: void c2sRequestMapInfo(int clientIdx, PROTOCOL_HEADER *protocal, int dataSize); }; extern WGNetProtocolProcess g_NetProtocolProcess; #endif<file_sep>/netWork/src/tcpclient.h #ifndef _TCP_CLIENT_ #define _TCP_CLIENT_ #ifndef _DEBUG #ifndef _NET_API_ #define _NET_API_ _declspec(dllexport) #endif #else #define _NET_API_ #endif #include <list> #include "itcpclient.h" typedef std::list<int> CONNECTINDEX; class tcpclient : public ITCPClient { public: tcpclient(); ~tcpclient(); public: int StartClient( char* serverIP, PORT listenPort, int maxCon = MIN_CONN ); int StopClient( ); int ActionClient(); int OpenConnect( SOCKET sock ); int CloseConnect( int connectIdx ); int ConnectToServer( ); int DisConnectFromServer( int ConntedIdx ); int PushDataToSendBuff(int connectIdx, void* pBuf, int dataSize ); const void* GetDataFromRecvBuff( unsigned int& dataSize ); int PushDataToRecvBuff( void* pBuf, int dataSize ); const void* GetDataFromSendBuff( unsigned int& dataSize ); int CloseConnectCallBack(int connectIdx); public: SOCKET getListenSocket() { return SOCKET_INVALID; } int getMaxConn() { return m_MaxConn; } TCPConnection* getConn( int connIdx ) { if ( connIdx < 0 || connIdx >= m_MaxConn ) return NULL; return &m_Conn[connIdx]; } CONNECTINDEX* getConnectedIdxList() { return &m_ConnectedConnIdx; } int getConnectedIdx() { if ( m_ConnectedConnIdx.size() > 0 ) return *(m_ConnectedConnIdx.begin()); return INVALID_VALUE; } private: int popFreeConnectIdx(); int pushFreeConnectIdx( int freeIdx ); private: int Init( ); int Release( ); int ReleaseConnectInfo( int connectIdx ); private: char m_ServerIP[IP_LEN]; PORT m_ServerPort; tcpSendThread m_SendThread; tcpRecvThread m_RecvThread; char m_SendSpace[MAX_SENDBUFF]; RingBuffer m_SendBuff; char m_RecvSpace[MAX_RECVBUFF]; RingBuffer m_RecvBuff; TCPConnection* m_Conn; int m_MaxConn; CONNECTINDEX m_FreeConnIdx; CONNECTINDEX m_ConnectedConnIdx; }; #endif<file_sep>/netWork/src/common/netcommon.h #ifndef _NET_COMMON_H_ #define _NET_COMMON_H_ #pragma pack(push, 1) #include "macro.h" #ifdef WIN32 #define MSG_NOSIGNAL 0 #else #ifdef MAC #define MSG_NOSIGNAL 0 #endif #define SOCKET_ERROR -1 typedef unsigned int SOCKET; #endif typedef int PORT; #define MIN_PORT 1024 #define MAX_PORT 49151 #define SOCKET_INVALID -1 #define IP_LEN 64 struct TCPNetPack { int ConnectionID; char buff[1]; }; #endif <file_sep>/serverCore/src/Map/WGMapManager.cpp #include "WGMapManager.h" #include "MapDef.h" #include "random.h" #include "WGMapNodeBase.h" #include "ProtocolMap.h" #include "Protocol.h" #include "Core.h" WGMapManager::WGMapManager() { m_mapNodeCount = MAP_NODE_MIN_COUNT + g_Random(MAP_NODE_MAX_COUNT - MAP_NODE_MIN_COUNT); m_mapNodes = new WGMapNodeBase[m_mapNodeCount]; m_mapInfo = new MapNodeInfo[m_mapNodeCount]; } WGMapManager::~WGMapManager() { if (m_mapNodes) delete m_mapNodes; } WGMapManager *WGMapManager::Singleton() { static WGMapManager *mgr = NULL; if (mgr == NULL) { mgr = new WGMapManager; } return mgr; } int WGMapManager::initMapData() { for (UINT i = 0; i < m_mapNodeCount; i++) { m_mapInfo[i].m_position.x = g_Random(MAP_X_MAX); m_mapInfo[i].m_position.y = g_Random(MAP_Y_MAX); m_mapInfo[i].m_position.z = g_Random(MAP_Z_MAX); m_mapNodes[i].createNode(&(m_mapInfo[i])); } return 0; } const WGMapNodeBase *WGMapManager::getMapInfo() { //const WGMapNodeBase* info = m_mapNodes; return m_mapNodes; } void WGMapManager::c2sRequestMapInfo(int clientIdx, PROTOCOL_HEADER* protocol, int dataSize) { char *buffer = (char*)malloc(1000); S2C_SendMapInfo *data = (S2C_SendMapInfo *)buffer; data->Protocol = s2c_sendMapInfo; data->mapNodeCount = m_mapNodeCount; //memcpy_s(data->mapInfo,4000, m_mapInfo, sizeof(MapInfo)*m_mapNodeCount); memcpy(data->mapInfo, m_mapInfo, sizeof(MapNodeInfo)*m_mapNodeCount); data->Length = sizeof(data)+sizeof(MapNodeInfo)*m_mapNodeCount -1; g_Server->PushDataToSendBuff(clientIdx, data, data->Length); }<file_sep>/netWork/src/networkthread.h #ifndef _NETWORK_THREAD_H_ #define _NETWORK_THREAD_H_ #include "def.h" #include "utilities.h" #include "tools.h" #include "netcommon.h" #define INVALID_CONNECT_INDEX -1 #define MAX_SENDBUFF 1024 * 1024 #define MAX_RECVBUFF 1024 * 1024 #define MAX_SENDPACK 2048 #define MAX_RECVPACK 2048 #define MAX_CONN_QUEUE 1000 #define CLOSE_SOCKET_REPEAT_TIME 30 //typedef int (ItcpThreadOwner::*ClOSECALLBACK)( int connectIdx ); class tcpserver; class tcpclient; interface ItcpThreadOwner; enum enTCPConnectionState { enTCPConnectionState_idle, //两种情况会被用到,1、getConn的时候用到的Idx不合法;2、连接已经被断开,但是Send缓冲区还有数据时 enTCPConnectionState_Closing, //链接关闭,但是缓冲去可能还有数据,所以不能被复用 enTCPConnectionState_free, //可被使用的空闲 enTCPConnectionState_WillClose, //将要关闭 enTCPConnectionState_Connected, //已经连接上 }; struct TCPConnection { int m_ConnectIdx; enTCPConnectionState m_State; SOCKET m_Socket; //ClOSECALLBACK m_CloseCallback; //断开连接时,调用的CallBack函数 int (ItcpThreadOwner::*m_CloseCallback)( int connectIdx ); unsigned int m_CloseTimeStamp; //断开连接时的时间 unsigned int m_lastPingTimeStamp; //最后一次ping协议的时间戳 void CloseCallBack(ItcpThreadOwner *owner) { if ( m_CloseCallback ) (owner->*m_CloseCallback)( m_ConnectIdx ); } void Release() { m_ConnectIdx = INVALID_CONNECT_INDEX; m_State = enTCPConnectionState_free; m_Socket = SOCKET_INVALID; m_CloseCallback = NULL; m_CloseTimeStamp = 0; m_lastPingTimeStamp = 0; } TCPConnection() { Release(); } ~TCPConnection() { Release(); } }; interface ItcpThreadOwner { virtual int getMaxConn() = 0; virtual TCPConnection* getConn( int connIdx ) = 0; virtual int CloseConnect( int connectIdx ) = 0; virtual int OpenConnect( SOCKET sock ) = 0; virtual int PushDataToSendBuff( int connectIdx, void* pBuf, int dataSize ) = 0; virtual const void* GetDataFromRecvBuff( unsigned int& dataSize ) = 0; virtual int PushDataToRecvBuff( void* pBuf, int dataSize ) = 0; virtual const void* GetDataFromSendBuff( unsigned int& dataSize ) = 0; virtual int CloseConnectCallBack(int connectIdx) = 0; }; class tcpThreadInfo { private: ItcpThreadOwner* m_tcpThreadOwner; public: ItcpThreadOwner* getThreadOwner(); void setThreadOwner( ItcpThreadOwner* owner ); void closeConnect(TCPConnection *connInfo); }; class tcpListenThread : public Thread, public tcpThreadInfo { private: bool m_bRun; SOCKET m_ListenSocket; PORT m_ListenPort; public: tcpListenThread( ); ~tcpListenThread(); private: int InitListenSocket( ); int CloseListenSocket(); public: int start( ItcpThreadOwner* owner, PORT listenPort ); void* action(); int stop(); }; class tcpSendThread : public Thread, public tcpThreadInfo { private: bool m_bRun; public: tcpSendThread( ); ~tcpSendThread(); public: int start( ItcpThreadOwner* owner ); void* action(); int stop(); }; class tcpRecvThread : public Thread, public tcpThreadInfo { private: bool m_bRun; public: tcpRecvThread( ); ~tcpRecvThread(); public: int start( ItcpThreadOwner* owner ); void* action(); int stop(); }; #endif<file_sep>/common/src/header/Protocol.h #ifndef _PROTOCOL_H_ #define _PROTOCOL_H_ enum enProtocol_s2c { s2c_begin = -1, s2c_sendMapInfo, s2c_end, }; enum enProtocol_c2s { c2s_begin = -1, c2s_requestMapInfo, c2s_end, }; #endif<file_sep>/serverCore/src/Core.cpp #include "Core.h" #include "Map/WGMapManager.h" #include "def.h" #include "header_debug.h" ITCPServer* g_Server = NULL; int initServer() { if (g_Server != NULL) return INVALID_VALUE; g_Server = CreateTCPServer(); if (g_Server == NULL) return INVALID_VALUE; return SUCCESS_VALUE; } int initCore() { if(SUCCESS_VALUE != WGMapManager::Singleton()->initMapData()) { OutPutStringToCMD("initCore ERROR::initMapData error!\n"); return INVALID_VALUE; } return 0; } int startCore() { if (SUCCESS_VALUE != initServer()) { OutPutStringToCMD("initServer error!\n"); return INVALID_VALUE; } if (SUCCESS_VALUE != initCore()) { OutPutStringToCMD("initCore error!\n"); return INVALID_VALUE; } return SUCCESS_VALUE; } <file_sep>/common/src/header/ProtocolStruct.h #ifndef _PROTOCOL_STRUCT_H_ #define _PROTOCOL_STRUCT_H_ #include "def.h" #pragma pack(push, 1) //共用协议头 struct PROTOCOL_HEADER { WORD Protocol; WORD Length; }; //压缩协议头 struct COMPRESSED_PROTOCOL_HEADER : public PROTOCOL_HEADER { WORD Length; }; #endif<file_sep>/share/src/Header/random.h //--------------------------------------------------------------------------- // Sword3 Engine (c) 1999-2000 by Kingsoft // // File: KRandom.h // Date: 2000.08.08 // Code: WangWei(Daphnis) // Desc: Header File //--------------------------------------------------------------------------- #ifndef _Random_H_327 #define _Random_H_327 #include "def.h" //--------------------------------------------------------------------------- UINT g_Random(UINT nMax); void g_RandomSeed(UINT nSeed); UINT g_GetRandomSeed(); //--------------------------------------------------------------------------- #endif <file_sep>/share/src/makefile INCLUDE= -I./Header \ -I./Header/tools OBJECTS=md5.o file_log.o md5c.o random.o header_debug.o tools.o utilities.o minilzo.o SRCS=md5.cpp file_log.cpp md5c.cpp random.cpp header_debug.cpp tools.cpp utilities.cpp minilzo.c TARGET=libshare.a cc=g++ $(TARGET):$(OBJECTS) ar crv $(TARGET) $(OBJECTS) md5.o:md5.cpp $(cc) -c $(INCLUDE) md5.cpp file_log.o:file_log.cpp $(cc) -c file_log.cpp $(INCLUDE) md5c.o:md5c.c $(cc) -c -x c md5c.c $(INCLUDE) random.o:random.cpp $(cc) -c random.cpp $(INCLUDE) header_debug.o:header_debug.cpp $(cc) -c header_debug.cpp $(INCLUDE) tools.o:tools.cpp $(cc) -c tools.cpp $(INCLUDE) utilities.o:utilities.cpp $(cc) -c utilities.cpp $(INCLUDE) minilzo.o:minilzo.c $(cc) -c minilzo.c $(INCLUDE) clean: rm $(TARGET) $(OBJECTS) <file_sep>/share/src/Header/tools/file_log.h #ifndef _FILE_LOG_H_ #define _FILE_LOG_H_ #include <stdio.h> // Macro define region #define FILESTATUS_CLOSE 0 #define FILESTATUS_OPEN 1 #define MAXSIZE_CURTIME 64 #define MAXSIZE_PATHNAME 512 #define MAXSIZE_PERMSG (2*1024) #define MAXSIZE_MSGBUF (8*1024) #ifdef WIN32 #define SUBDIRNAME_LOG "logs\\" #else #define SUBDIRNAME_LOG "./logs/" #endif #define SERVERLOG_SKIP " -> " #define LOGLEADNAME_DEBUG "Debug" class FILELOGS { public: FILELOGS(); virtual ~FILELOGS(); static void WriteLog(const char*pcLogMsg, ...); private: static FILE *m_fileSvrLog; static int m_nFileStatus; static void CloseLog(); static int OpenLog(const char*pcLogLead); static void WriteData(const char*pcLogMsg, const int len = -1); static void WriteLogInner(const char*pcLogMsg, const char*pcLogLead, const int len = -1, bool addTimeStamp = true); }; #endif<file_sep>/share/src/Header/tools/utilities.h /*********************************************************************** * Utilities files used to encapsulate the Thread, Mutex... * Created by <NAME> <EMAIL> ***********************************************************************/ #ifndef _I_UTILITIES_H_ #define _I_UTILITIES_H_ #ifdef WIN32 #include <Windows.h> #else #include <pthread.h> #endif class Thread { private: enum enum_status { new_created, running, stopped }; enum_status status; #ifdef WIN32 HANDLE thread_id; #else pthread_t thread_id; pthread_attr_t th_attr; #endif public: Thread( int detached ); virtual ~Thread(); int start(); int stop(int nKill = 1); virtual void* action() = 0; }; class Mutex { private: #ifdef WIN32 HANDLE mutex; #else pthread_mutex_t mutex; #endif int count; public: #ifdef WIN32 Mutex( int mutex_type ); #else Mutex( int mutex_type ); #endif ~Mutex(); bool trylock(); int lock(); int unlock(); }; #endif
6caff53760d159a3258f7ab7a77536c90a0188d8
[ "C", "Makefile", "C++" ]
41
C
wormggmm/SpaceWarServer
bd56571a1948e0e5dc8a041b37509a5a7aa1526e
0e6acb91be61c7ec27cb905e75f1578505ff9c42
refs/heads/master
<file_sep>#!/usr/bin/env bash if [ "$DEBUG" = true ]; then set -e export DISPLAY=$RemoteDisplay chromedriver --verbose --url-base=wd/hub & else export GEOMETRY="$SCREEN_WIDTH""x""$SCREEN_HEIGHT""x""$SCREEN_DEPTH" Xvfb :99 -screen 0 $GEOMETRY -ac +extension RANDR >>~/xvfb10.log 2>&1 & chromedriver --url-base=wd/hub & fi if [ "$UseSharedModule" = true ]; then if [ ! -d "${WORKSPACE}/node_modules" ]; then ln -s $SourceModuleFolder ${WORKSPACE} else echo "symlink exists" fi fi sleep 3 <file_sep>FROM node:7.8 ENV RemoteDisplay remotedisplay.internal.example.com:1 ENV DEBUG false ENV HTTP_PROXY http://proxy.internal.example.com:8080 ENV HTTPS_PROXY http://proxy.internal.example.com:8080 ENV http_proxy http://proxy.internal.example.com:8080 ENV https_proxy http://proxy.internal.example.com:8080 ENV NO_PROXY ".example-support.com,.aws.example.com,.internal,169.254.169.254" ENV HOME /home/jenkins RUN groupadd -g 10000 jenkins RUN useradd -c "Jenkins user" -d $HOME -u 10000 -g 10000 -m jenkins ARG VERSION=3.7 RUN echo "deb http://http.debian.net/debian jessie-backports main" >> /etc/apt/sources.list RUN apt-get update \ && apt-get -y upgrade \ && apt-get install -y apt-utils git docker openssh-client ca-certificates openssl \ && apt-get -t jessie-backports install -y openjdk-8-jdk # chrome ARG CHROME_VERSION="google-chrome-stable" RUN apt-get update && \ apt-get -y install wget unzip locales xvfb && \ wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ && echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list \ && apt-get update -qqy \ && apt-get -qqy install \ ${CHROME_VERSION:-google-chrome-stable} \ && rm /etc/apt/sources.list.d/google-chrome.list #COPY chrome_launcher.sh /opt/google/chrome/google-chrome #RUN chmod +x /opt/google/chrome/google-chrome # chrome driver ARG CHROME_DRIVER_VERSION=2.29 RUN wget --no-verbose -O /tmp/chromedriver_linux64.zip https://chromedriver.storage.googleapis.com/$CHROME_DRIVER_VERSION/chromedriver_linux64.zip \ && rm -rf /opt/selenium/chromedriver \ && unzip /tmp/chromedriver_linux64.zip -d /opt/selenium \ && rm /tmp/chromedriver_linux64.zip \ && mv /opt/selenium/chromedriver /opt/selenium/chromedriver-$CHROME_DRIVER_VERSION \ && chmod 755 /opt/selenium/chromedriver-$CHROME_DRIVER_VERSION \ && ln -fs /opt/selenium/chromedriver-$CHROME_DRIVER_VERSION /usr/bin/chromedriver RUN npm install yarn RUN echo '{ "allow_root": true }' > /root/.bowerrc RUN mkdir /root/.ssh RUN echo 'github.internal.example.com,172.16.31.10 ecdsa-sha2-nistp256 XXXXXXX' >> /root/.ssh/known_hosts #Install npm-link-shared RUN npm install npm-link-shared -g COPY entrypoint.sh /usr/local/bin/entrypoint.sh RUN chmod +x /usr/local/bin/entrypoint.sh #Clean up RUN apt-get autoclean && \ apt-get autoremove RUN rm -rf /tmp/* && \ rm -rf /var/cache/apk/* && \ rm -rf /var/tmp/* USER jenkins ENV SCREEN_WIDTH 1360 ENV SCREEN_HEIGHT 1020 ENV SCREEN_DEPTH 24 ENV DISPLAY :99.0 RUN mkdir /home/jenkins/.jenkins VOLUME /home/jenkins/.jenkins WORKDIR /home/jenkins ENTRYPOINT ["entrypoint.sh"]
b6ebcf40a5abb8ee6f5f788ebc6c3d284653d087
[ "Dockerfile", "Shell" ]
2
Shell
rocky0001/chromeimage
130139b717667c4d07fc5da32bc68245a8ad3567
1887ee73aaea2a31ba480235c126c9f428c1f4d0
refs/heads/master
<repo_name>avrham408/bitorrent<file_sep>/setup.py from setuptools import setup, find_packages setup( name="bitorrent", verison="0.0.1", author="<NAME>", include_package_data=True, packages=find_packages() ) <file_sep>/torrent/tracker.py import logging from torrent.utilities import run_async from torrent.bencoding import Decoder from urllib.parse import urlencode import requests from random import randint, randrange from time import sleep import struct import socket from enum import Enum logger = logging.getLogger(__name__) class UdpError(Enum): unknown_error = 0 info_hash = 1 request_error = 3 #####udp tracker###### @run_async def udp_request(tracker, torrent_file, peer_manager, wait=0, recursive=False): """ steps: 1. connection requeset 2. announcing: 3. interval announce the tracker actions responses: 0 - connect 1 - announce 2 - scrape 3 - error """ if wait > 1800: wait = 1800 sleep(wait) # request logger.debug(f"try to connect {tracker}") udp_tracker_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) logger.debug("connect to socket succssed") socket_connecion_request = socket_send_to(udp_tracker_socket, tracker, create_connect_req_struct()) if not socket_connecion_request: udp_tracker_socket.close() udp_request(tracker, torrent_file, peer_manager, (wait + 60) * 2, recursive=True) if socket_connecion_request == UdpError.request_error: udp_tracker_socket.close() return UdpError.request_error connection_res = udp_request_connection_response(udp_tracker_socket) if connection_res == 0: udp_tracker_socket.close() udp_request(tracker, torrent_file, peer_manager, (wait + 60) * 2, recursive=True) if connection_res == 1: return None if type(connection_res) == UdpError: return connection_res transcation_id, connection_id = connection_res # announce while True: socket_announce_request = socket_send_to(udp_tracker_socket, tracker, announcing_req_packet(torrent_file, connection_id)) if socket_announce_request == UdpError.request_error: udp_tracker_socket.close() return UdpError.request_error elif not socket_announce_request: udp_tracker_socket.close() udp_request(tracker, torrent_file, peer_manager, (wait + 60) * 2, recursive=True) announce_response = udp_request_announce_response(udp_tracker_socket) if announce_response == 0: udp_tracker_socket.close() udp_request(tracker, torrent_file, peer_manager, (wait + 60) * 2, recursive=True) if announce_response == 1: udp_tracker_socket.close() return None if type(announce_response) == UdpError: return announce_response interval, peers = announce_response parsed_peers = parse_peers(peers) peer_manager.add_peers(parsed_peers) logger.info(f" {len(peers)} peers add to peer_manager and go to sleep for {interval} seconds") sleep(interval / 5) def parse_peers(raw_peers): parsed_peers = [] for peer in raw_peers: ip = get_ip(struct.pack('>I', peer[0])) port = peer[1] parsed_peers.append((ip, port)) return parsed_peers def udp_request_connection_response(sock): """ errors: 0 = restart 1 = kill thread UdpError """ logger.debug("start connection response") packet_data = read_response_from_socket(sock) if not packet_data: logger.debug("connect to tracker failed") return 0 action = packet_data[0] if action == 3: return _handle_error_action(packet_data[1][0]) elif action != 0: logger.info(f"somthing got worng with response code for connection the action is {action}") return 0 try: return packet_data[1] except ValueError as e: logger.error("read_response_from_socket return data not in the format", exc_info=True) return 1 def socket_send_to(sock, tracker, message): try: return sock.sendto(message, (tracker.url, tracker.port)) except socket.gaierror: logger.debug("error in open socket") return False except Exception: logger.error("error in send udp announce request", exc_info=True) return UdpError.request_error def udp_request_announce_response(sock): """ errors: 0 = restart 1 = kill thread UdpError """ packet_data = read_response_from_socket(sock) if not packet_data: logger.debug("announce request failed no answer") return 0 action = packet_data[0] if action == 3: return _handle_error_action(packet_data[1][0]) elif action != 1: logger.debug(f"somthing got worng with response code for connection the action is {action}") return 1 try: transcation_id, interval, leechers, seeders, peers = packet_data[1] except ValueError as e: logger.error("read_response_from_socket return data not in the format", exc_info=True) return 1 return interval, peers def _handle_error_action(error): if error == UdpError.info_hash: logger.info("server back with error string of info hash not exist") return error else: return UdpError.unknown_error def read_response_from_socket(sock): """ the function manage reading from socket the function get socket that wait from response and try recv from socket actions: all packet of data start with action the function parse the action and parse the data the return 'action' and list of relevant data for action if it failed the function return None """ # TODO add decorator support from timeout logger.debug("start get action code") res, _ = sock.recvfrom(4096) action = struct.unpack(">I", res[:4])[0] if action == 0: logger.debug("connet code got from server") parsed_res = parse_connect_packet(res[4:]) if not parsed_res: logger.info("parse udp connection response failed") return None transcation_id, connection_id = parsed_res return action, [transcation_id, connection_id] elif action == 1: logger.debug("announce code got from server") parsed_res = parse_announce_packet(res[4:20]) if not parsed_res: logger.debug("parse udp announce response failed") return None transcation_id, interval, leechers, seeders = parsed_res peers = get_peers(res[20:]) return action, [transcation_id, interval, leechers, seeders, peers] elif action == 3: # TODO support error from _files[4] error from explodie.org:6969 - "uknown option type" error_string = res[8:].decode('utf-8') logger.debug(f"error string from server") return action, [parse_error_to_code(error_string)] else: logger.warning(f"response code from server '{code}'") return None def parse_connect_packet(packet): if len(packet) != 12: logger.info(f"packet length for connection response not match got : {packet}") return None try: req_response = struct.unpack(">LQ", packet) except struct.debug: logger.info("parse response failed", exc_info=True) return None return req_response def parse_announce_packet(packet): if len(packet) != 16: logger.info(f"packet length for announce response not match got : {packet}") return None try: return struct.unpack(">IIII", packet) except struct.error: logger.info(f"parse_announce_res got from udp tracker bed response {res}") return None def create_connect_req_struct(protocol_id=0X41727101980, transcation_id=randrange(0, 65535), action=0): # the struct for udp tracker request # protocol_id is const, transcation id is random num , action 0 = connect return struct.pack(">QLL", protocol_id, action, transcation_id) def announcing_req_packet(torrent_file, connection_id, left=0, downloaded=0, uploaded=0, event=0): action = 1 transcation_id = randrange(0, 65535) peer_id = torrent_file.peer_id.encode('utf-8') ip = 0 key = 0 num_want = -1 port = 9999 try: return struct.pack(">QLL20s20sQQQLLLlI", connection_id, action, transcation_id, torrent_file.info_hash, peer_id, downloaded, left, uploaded, event, ip, key, num_want, port) except struct.error: logger.info("creating packet failed", exc_info=True) return None def get_peers(peers): peer_list = [] for peer_pointer in range(0, len(peers), 6): try: peer = struct.unpack(">IH", peers[peer_pointer: peer_pointer + 6]) except struct.error: logger.info("somthing get worng with unpack peers data") return peer_list peer_list.append(peer) return peer_list def parse_error_to_code(error): """ 1 unknown option type - info hash is not available """ if "unknown option type" in error.lower().strip(): return UdpError.info_hash else: logger.error(f"unknown error - {error}") UdpError.unknown_error ##### http functions ##### @run_async def http_request(url, peer_manager, wait=0, recursive=False): if wait > 1800: wait = 1800 sleep(wait) while True: logger.debug(f"start http tracker request") res = None try: res = requests.get(url) except requests.exceptions.ConnectionError: logger.debug("connect to tracker failed") return http_request(url, peer_manager, wait + 30 * 2, recursive=True) if res.status_code != 200: logger.info(f"requst return with '{res.status_code}' status code") http_request(url, peer_manager, wait + 30 * 2, recursive=True) parsed_res = read_http_tracker_response(res.content) if not parsed_res: return http_request(url, peer_manager, wait + 30 * 2) interval, peers = parsed_res peer_manager.add_peers(peers) logger.info(f"{len(peers)} peers add to peer_manager and go to sleep for {interval} seconds") sleep(interval / 5) def read_http_tracker_response(content): # TODO add support for more exceptions for Decoder # TODO check if we need complete and incomplete try: od = Decoder(content).decode() except Exception: logger.error("decode http tracker request failed", exc_info=True) return None if od.get(b'failure reason') is not None: logger.info("http tracker response with failure reasone") logger.info(od['failure reason']) return None interval = od[b'interval'] peers = get_peers_data(od[b'peers']) return interval, peers def get_peers_data(peers_in_bytes): peers = [] for bytes_pos in range(0, len(peers_in_bytes), 6): peer = peers_in_bytes[bytes_pos: bytes_pos + 6] peer_ip = get_ip(peer[:4]) try: peer_port = struct.unpack(">H", peer[4:])[0] except struct.error: logger.debug("unpacking peers failed") peers.append((peer_ip, peer_port)) return peers def create_url_for_http_tracker(torrent_file, tracker, left, uploaded=0, downloaded=0, ): # TODO fix to consst (that peer id is alwayz the same peer_id # check that 'left', 'uploaded', 'downloaded' is real data params = { 'info_hash': torrent_file.info_hash, 'peer_id': torrent_file.peer_id, 'port': 6889, 'uploaded': uploaded, 'downloaded': downloaded, 'left': left, 'compact': 1, 'event': 'started' # for now hardcode } return tracker.http_url + '?' + urlencode(params) def get_ip(ip_address): return '.'.join([str(num) for num in ip_address]) def tracker_manager(torrent_file, peer_manager): tracker_threads = [] for tracker in torrent_file.trackers: if tracker.tracker_type == 'http': url = create_url_for_http_tracker(torrent_file, tracker, torrent_file.length) tracker_threads.append(http_request(url, peer_manager)) elif tracker.tracker_type == 'udp': tracker_threads.append(udp_request(tracker, torrent_file, peer_manager)) else: # TODO add support to dht protocol pass return tracker_threads <file_sep>/tests/test_save.py from torrent.save import create_single_file, save_torrent_file, get_download_path, file_genrator, write_pieces_to_memory import pytest import torrent from utilities import delete_file, _files_list import random import os DIR_NAME = os.path.dirname(torrent.__path__[0]) def test_create_single_file(): test_path_name = 'test_name_for_test_save' create_single_file(test_path_name, 100) file_abs_path = os.path.join(DIR_NAME, 'downloads', test_path_name) assert os.path.isfile(file_abs_path) delete_file(file_abs_path, True) def test_create_single_file_size(): for i in range(100): test_path_name = f'test_name_for_size_check{i}' test_size = random.randint(1, 10000000) * random.randint(1, 10) create_single_file(test_path_name , test_size) file_abs_path = os.path.join(DIR_NAME, 'downloads', test_path_name) assert os.stat(file_abs_path).st_size == test_size delete_file(file_abs_path, True) def test_create_single_file_0_size(): with pytest.raises(OSError): test_path_name = 'test_name_for_0_size' create_single_file(test_path_name, 0) delete_file(os.path.join(DIR_NAME, 'downloads', test_path_name)) def test_save_torrent_file(): for path in _files_list(): save_torrent_file(path) new_torrent_file_path = os.path.join(DIR_NAME, 'torrent_files', os.path.basename(path)) assert os.path.exists(new_torrent_file_path) delete_file(new_torrent_file_path) def test_save_torrent_file_data_the_same(): for path in _files_list(): save_torrent_file(path) new_torrent_file_path = os.path.join(DIR_NAME, 'torrent_files', os.path.basename(path)) with open(path, 'rb') as f: base_torrent_file = f.read() with open(new_torrent_file_path, 'rb') as f: new_torrent_file = f.read() assert new_torrent_file == base_torrent_file delete_file(new_torrent_file_path) if __name__ == "__main__": test_create_single_file_0_size() <file_sep>/tests/utilities.py import requests import logging import os logger = logging.getLogger(__name__) TEST_FILES_DIR = 'test_files/' def valid_internet(f): def web_site_online(url='http://www.google.com/', timeout=5): try: req = requests.get(url, timeout=timeout) # HTTP errors are not raised by default, this statement does that req.raise_for_status() return True except requests.HTTPError as e: logger.error("HTTPError") except requests.ConnectionError: logger.error("ConnectionError") return False def wrap(*args, **kwargs): if not web_site_online(): raise AssertionError("no internet connection") return f(*args, **kwargs) return wrap def _files_list(): """ file.torrent - multi file annonunce, announce-list file1.torrent - multi file with announce-list and url-list file2.torrent - multi file announce, announce-list file3.torrent - single file announce, announce-list file4.torrent - multi file announce, announce-list and url-list file5.torrent - single url-list contain alot of bulshit for future """ files = ['file.torrent', 'file1.torrent', 'file2.torrent', 'file3.torrent', 'file4.torrent'] return [TEST_FILES_DIR + path for path in files] def kill_thread(t): if t.is_alive(): t._tstate_lock.release() t._stop() def delete_file(src, assert_exception=False): if assert_exception: try: os.remove(src) except FileNotFoundError: assert False #file not found for deletion else: try: os.remove(src) except FileNotFoundError: pass def timeit(method): def timed(*args, **kw): ts = time.time() result = method(*args, **kw) te = time.time() if 'log_time' in kw: name = kw.get('log_name', method.__name__.upper()) kw['log_time'][name] = int((te - ts) * 1000) else: print('%r %2.2f ms' % \ (method.__name__, (te - ts) * 1000)) return result return timed <file_sep>/avtorrent.py import sys, getopt import os from torrent.client import create_client, run_client, close_client def main(argv): if len(argv) != 1: print("usage: main.py <torrent file>") elif os.path.splitext(argv[0])[-1] != '.torrent': print("the file is not torrent file") print("usage: main.py <torrent file>") else: torrent_file, piece_manager, tracker_threads, peer_manager, loop = create_client(argv[0]) try: loop.run_until_complete(run_client(torrent_file, piece_manager, tracker_threads, peer_manager, loop)) except KeyboardInterrupt: close_client(loop, tracker_threads, piece_manager) if __name__ == "__main__": main(sys.argv[1:]) <file_sep>/torrent/status_interface.py import tqdm import asyncio import sys from math import ceil MEGA = 1048576 async def run_tqdm(total_size, block_size, status_func): """ for i in tqdm.trange(size, file=sys.stdout, leave=False, unit_scale=True): await q.get() """ status = status_func() if status: print("The torrent is resuming progress reloading") pbar = tqdm.tqdm(unit='M', total= ceil(total_size / MEGA), file=sys.stdout, initial=(status * block_size / MEGA)) while True: await asyncio.sleep(1) updated_status = status_func() if status < updated_status: block_to_update = updated_status - status pbar.update((block_to_update * block_size / MEGA)) status = updated_status <file_sep>/torrent/save.py from torrent.io import write_to_disc, open_file, close_file, create_file, get_path, copy_file, read_from_disk from torrent.utilities import get_download_path, get_torrent_files_path from torrent.torrent_file import generate_torrent_file from torrent.pieces import create_piece_manager import os import logging import asyncio logger = logging.getLogger(__name__) def create_single_file(torrent_name, torrent_size): path = get_download_path(torrent_name) create_file(path, torrent_size) def save_torrent_file(src_path): dest_path = get_path('torrent_files') copy_file(src_path, dest_path) def file_genrator(torrent_file): save_torrent_file(torrent_file.path) if torrent_file.multi_file: raise NotImplementedError("clinet not support multi file torrent") else: try: create_single_file(torrent_file.name, torrent_file.length) except OSError: # file size is 0 return False logger.info('file created and torrent file saved') return True async def write_pieces_to_memory(torrent_file, done_queue): fd = open_file(get_download_path(torrent_file.name)) piece_size = torrent_file.piece_length logger.info(f"the fd num is {fd}") while True: piece = await done_queue.get() if not piece: break logger.debug(f'piece {piece.index} get to write pieces') try: write_to_disc(fd, piece.get_blocks(), (piece.index) * piece_size) piece.piece_written() except Exception: logger.error("write to disc failed", exc_info=True) return False torrent_file close_file(fd) logger.info("write pieces to memory closed") def load_torrent_file(path): if os.path.isfile(path): return generate_torrent_file(path) def read_piece(fd, piece): return read_from_disk(fd, piece.length) def load(torrent_path): torrent_file = load_torrent_file(torrent_path) if not torrent_file: return False file_path = get_download_path(torrent_file.name) fd = open_file(file_path) piece_manager = create_piece_manager(torrent_file) for piece in piece_manager.pieces: piece_data = read_piece(fd, piece) piece.add_block(piece_data) if piece.piece_done(): piece.piece_written() close_file(fd) return torrent_file, piece_manager def torrent_file_exist(path: str): torrent_files_path = get_torrent_files_path(os.path.basename(path)) return os.path.isfile(torrent_files_path) <file_sep>/torrent/peer.py from queue import Queue from time import sleep import socket import logging from enum import Enum logger = logging.getLogger(__name__) class PeerStatus(Enum): free = 0 in_progress = 1 failed = 2 class Peer_manager(): def __init__(self): self.peers = Queue() self.peer_list = [] self.lock = False def add_peers(self, peers): if not self.lock: self.lock = True for peer_data in peers: # TODO add support for duplicate peers if _valid_peer(peer_data[0], peer_data[1]): peer = Peer(peer_data[0], peer_data[1]) if peer not in self.peer_list: self.peers.put(peer) self.peer_list.append(peer) else: logging.debug("duplicate peer") else: logging.debug("peer not valid") self.lock = False else: logger.debug("add peers failed the peer_manager is lock") sleep(5) self.add_peers(peers) def get_peer(self): if not self.lock: self.lock = True if not self.peers.empty(): peer = self.peers.get() self.lock = False return peer self.lock = False def map_status(self): statuses = list(map(lambda peer: peer.status, self.peer_list)) statuses_dic = dict() statuses_dic[PeerStatus.free.name] = statuses.count(PeerStatus.free) statuses_dic[PeerStatus.in_progress.name] = statuses.count(PeerStatus.in_progress) statuses_dic[PeerStatus.failed.name] = statuses.count(PeerStatus.failed) return statuses_dic class Peer: def __init__(self, ip, port): self.port = port self.ip = ip self.status = PeerStatus.free def open_connection(self): self.status = PeerStatus.in_progress return self.ip, self.port def close_connection(self): # True is torrent done and False is connection problem self.status = PeerStatus.failed def __hash__(self): return hash((self.port, self.ip)) def __eq__(self, other): return (self.__class__ == other.__class__ and self.ip == other.ip and self.port == other.port) def __ne__(self, other): return not self.__eq__(self, other) def __repr__(self): return f"{self.ip}:{self.port}:{self.status.name}" def _valid_peer(ip, port): try: socket.inet_aton(ip) except socket.error: return False if not 1 <= port <= 65535: return False return True <file_sep>/tests/test_pieces.py from torrent.torrent_file import generate_torrent_file from utilities import _files_list from torrent.pieces import create_piece_manager, Piece, PieceStatus import logging import pytest import asyncio logger = logging.getLogger(__name__) def test_create_piece_manager_pieces_size(): files = _files_list() for torrent_path in files: torrent_file = generate_torrent_file(torrent_path) pieces_manager = create_piece_manager(torrent_file) size = 0 for piece in pieces_manager.pieces: size += piece.length assert torrent_file.length == size def test_create_piece_manager_all_pieces_available(): files = _files_list() for torrent_path in files: torrent_file = generate_torrent_file(torrent_path) pieces_manager = create_piece_manager(torrent_file) ptr = 0 for piece in pieces_manager.pieces: assert torrent_file.pieces[ptr:ptr+20] == piece.piece_hash ptr += 20 def test_create_piece_manager_last_piece(): files = _files_list() for torrent_path in files: torrent_file = generate_torrent_file(torrent_path) piece_manager = create_piece_manager(torrent_file) last_piece = piece_manager.pieces[len(piece_manager.pieces) - 1] assert torrent_file.length % torrent_file.piece_length == last_piece.length def test_all_pieces_in_status(): files = _files_list() for torrent_path in files: torrent_file = generate_torrent_file(torrent_path) piece_manager = create_piece_manager(torrent_file) status_dict = piece_manager.pieces_status() assert len(piece_manager.pieces) == sum([i for i in status_dict.values()]) @pytest.mark.asyncio async def test_piece_manager_get_piece(): files = _files_list() for torrent_path in files: torrent_file = generate_torrent_file(torrent_path) piece_manager = create_piece_manager(torrent_file) piece = await piece_manager.get_piece() assert type(piece) == Piece @pytest.mark.asyncio async def async_get_piece(piece_manager): for _ in range(5): piece = await piece_manager.get_piece() assert type(piece) == Piece assert piece.status == PieceStatus.in_progress await asyncio.sleep(1) @pytest.mark.asyncio async def gathering(peer_manager): await asyncio.gather(*[async_get_piece(peer_manager) for _ in range(10)]) @pytest.mark.asyncio async def test_get_piece_many_pieces_at_the_same_time(): files = _files_list() for torrent_path in files: torrent_file = generate_torrent_file(torrent_path) piece_manager = create_piece_manager(torrent_file) await gathering(piece_manager) if __name__ == "__main__": test_all_pieces_in_status() <file_sep>/torrent/peer_protocol.py """Peer Protocol contain 10 types of messages: Handshake - no message type Keep_Alive - no message_type Choke - 0 Unchoke - 1 Interested - 2 uninterested - 3 have - 4 BitField - 5 Request - 6 Piece - 7 Cancel - 8 The file contain the implmentaion in oop of every class and data that belong to him """ import logging import struct from bitarray import bitarray logger = logging.getLogger(__name__) class Message(): message_id = None def send_bytes(): return None @classmethod def parse(): return None class Handshake(Message): const_protocol = 'BitTorrent protocol'.encode('utf-8') message_id = None def __init__(self, info_hash: bytes, peer_id: str): self.info_hash = info_hash self.peer_id = self._encode_peer_id(peer_id) def send_bytes(self): return struct.pack('>B19s8x20s20s', 19, self.const_protocol, self.info_hash, self.peer_id) def _encode_peer_id(self, peer_id): try: return peer_id.encode('utf-8') except AttributeError: return peer_id @classmethod def parse(cls, data): try: _, _, info_hash, peer_id = struct.unpack('>B19s8x20s20s', data) except struct.error: logger.debug("parse info hash failed") return None return cls(info_hash, peer_id) class Keep_Alive(Message): message_id = None def send_bytes(self): return b'0000' @classmethod def parse(cls, data): struct.unpack(">IB", data) if data == b'0000': return cls() return None class Choke(Message): message_id = 0 def send_bytes(self): return struct.pack('>IB', 1, self.message_id) @classmethod def parse(cls, data): # data = b'00011 size, message_id = struct.unpack(">IB", data) if message_id == cls.message_id: return cls() return None class Unchoke(Message): message_id = 1 def send_bytes(self): return struct.pack('>IB', 1, self.message_id) @classmethod def parse(cls, data): # data = b'00010 size, message_id = struct.unpack(">IB", data) if message_id == cls.message_id: return cls() return None class Interested(Message): message_id = 2 def send_bytes(self): return struct.pack('>IB', 1, self.message_id) @classmethod def parse(cls, data): # data = b'00012 size, message_id = struct.unpack(">IB", data) if message_id == cls.message_id: return cls() return None class Uninterested(Message): message_id = 3 def send_bytes(self): return struct.pack('>IB', 1, self.message_id) @classmethod def parse(cls, data): size, message_id = struct.unpack(">IB", peer_response) if message_id == cls.message_id: return cls() return None class Have(Message): message_id = 4 def __init__(self, piece_index): self.piece_index = piece_index def send_bytes(self): raise NotImplementedError("The app doesn't support sending Have messages") @classmethod def parse(cls, data): size, message_id, piece_index = struct.unpack(">IBI", data) if message_id == cls.message_id: return cls(piece_index) return None class BitField(Message): message_id = 5 def __init__(self, positions): self.available_positions = positions def send_bytes(self): # TODO add support for seeding raise NotImplementedError("The app doesn't support sending BitField messages") @classmethod def _parse_bitfield(self, hex_data): bits = bitarray() bits.frombytes(hex_data) return bits @classmethod def parse(cls, data): size, message_id = struct.unpack(">IB", data[0:5]) if message_id == cls.message_id: positions = cls._parse_bitfield(data[5:size + 4]) return cls(positions) return None class Request(Message): message_id = 6 def __init__(self, piece_index, block_offset, block_length): self.piece_index = piece_index self.block_offset = block_offset self.block_length = block_length def send_bytes(self): return struct.pack('>IBIII', 13, self.message_id, self.piece_index, self.block_offset, self.block_length) @classmethod def parse(cls): # TODO add support for seeding raise NotImplementedError("The app doesn't support sending Request messages") class Piece(Message): message_id = 7 def __init__(self, piece_index, block_offset, data): self.piece_index = piece_index self.block_offset = block_offset self.block_data = data def send_bytes(self): # TODO add support for seeding raise NotImplementedError("The app doesn't support sending Piece messages") @classmethod def parse(cls, data): size, message_id = struct.unpack(">IB", data[0:5]) if message_id == cls.message_id: piece_index, block_offset = struct.unpack(">II", data[5:13]) return cls(piece_index, block_offset, data[13:]) return None class Cancel(Message): # TODO add support for Cancel message_id = 8 def __init__(self): raise NotImplementedError("The app doesn't support sending Cancel messages") def send_bytes(self): raise NotImplementedError("The app doesn't support sending Cancel messages") @classmethod def parse(cls): raise NotImplementedError("The app doesn't support sending Cancel messages") <file_sep>/torrent/networking.py import asyncio import struct import logging import concurrent.futures logger = logging.getLogger(__name__) TIMEOUT = 30 class PeerConnection(): def __init__(self, reader, writer): self.reader = reader self.writer = writer @classmethod async def open_connection(cls, ip, port): try: reader, writer = await asyncio.open_connection(ip, port) except OSError: logger.debug("connection to asyncio socket failed") return None logger.debug("connection to sockt success") return cls(reader, writer) async def write(self, message): try: self.writer.write(message) await asyncio.wait_for(self.writer.drain(), timeout=TIMEOUT) return True except OSError: logger.debug(f"writing to socket failed - {message}") except concurrent.futures._base.TimeoutError: logger.warning("reading get to timeout") return False except Exception: logger.error("writing to server failed with uknown error", exc_info=True) return False async def read(self, message_size=2048, all_data=False): buf = b'' while True: try: res = await asyncio.wait_for(self.reader.read(message_size), timeout=TIMEOUT) except OSError: logger.debug("reading from socket failed") return False except concurrent.futures._base.TimeoutError: logger.warning("reading get to timeout") return False except Exception: logger.info("reading from server failed with uknown error", exc_info=True) if not all_data: return res else: buf += res if res == b'': logger.debug("we didn't go all data from peer") return False if len(buf) >= message_size: return buf <file_sep>/torrent/torrent_file.py from torrent.bencoding import Encoder, Decoder import logging from enum import Enum from hashlib import sha1 from os.path import isfile from urllib.parse import urlsplit from random import randint logger = logging.getLogger(__name__) #####classes##### class TorrentFile(): def __init__(self, struct_dict): self.multi_file = struct_dict['multi_file'] self.path = struct_dict['path'] self.od_data = struct_dict['orderd_content'] # all the data in OrderdDict object self.trackers = struct_dict['trackers'] self.info_hash = struct_dict['info_hash'] if self.multi_file: self.files = struct_dict['files'] self.length = struct_dict['length'] self.piece_length = struct_dict['piece_length'] self.name = struct_dict['name'] if type(self.name) == bytes: self.name = self.name.decode('utf-8') self.pieces = struct_dict['pieces'] self.raw_content = struct_dict['raw'] self.peer_id = f"-PC0001-{''.join([str(randint(0, 9)) for _ in range(12)])}" def __repr__(self): return f"torrent_file({self.name})" class Tracker(): # TODO think if this is the right place hold this object and if the the future the tracker going to """ the object contain 2 types of trackers udp or http trackers url(domain):str, path:str, type:'http' or 'udp, port:'int' """ def __init__(self, tracker_type, url, path, port=80): self.schema = tracker_type self.url = url self.path = path self.port = port self.tracker_type = tracker_type.replace('https', 'http') @property def http_url(self): return f'{self.schema}://{self.url}:{self.port}{self.path}' def __repr__(self): return f'tracker({self.schema}:{self.url}:{self.port}{self.path})' def __hash__(self): return hash(self.url) def __eq__(self, other): return (self.__class__ == other.__class__ and self.url == other.url and self.tracker_type == other.tracker_type and self.port == other.port) def __ne__(self, other): return not self.__eq__(self, other) def valid_torrent_path(path): # the function valid torrent object path if type(path) != str: logger.error(f"the path type is {type(path)} the function vaild_torrent_path get only string") return False if not isfile(path): logger.error(f"the path {path} is not a File") return False if not path.endswith(".torrent"): logger.error(f"the file {path} suffix is not .torrent") return False return True def read_file(path): try: with open(path, 'rb') as f: return f.read() except FileNotFoundError: logger.error("read file problem", exc_info=True) return False def decode_raw_data(raw_data): """ the function get bits object and return the file decode in bencoding format """ try: return Decoder(raw_data).decode() except (RuntimeError, TypeError): logger.error("file content not in bencoding format", exc_info=True) return False def get_info_hash(file_content): # The SHA1 hash of the info dict found in the .torrent # from https://markuseliasson.se/article/bittorrent-in-python/ try: info = file_content[b'info'] except KeyError: logget.error("torrent file 'info' header is not exist") return False return sha1(Encoder(info).encode()).digest() def parse_info(info_od): # the function seprate multi file and single file torrent objects if info_od.get(b'files') is not None: return multi_file_parse(info_od) else: return single_file_parse(info_od) def multi_file_parse(info_od): info_data = {'multi_file': True} info_data['name'] = info_od[b'name'] info_data['piece_length'] = info_od[b'piece length'] pieces = info_od[b'pieces'] _validate_division_by_20(pieces) info_data['pieces'] = pieces info_data['files'], info_data['length'] = parse_files_data( info_od[b'files']) if not info_data['files']: logger.error("parsing torrent file failed") return False return info_data def parse_files_data(file_list): # only relevant for multi files because they have 'files' header parsed_list = [] total_length = 0 for od_file in file_list: # OrderdDict file file_data = {} file_data['length'] = od_file[b'length'] total_length += file_data['length'] # the path list consist one or more subdirectorys and the last file is the name of the file file_data['path'] = od_file[b'path'] parsed_list.append(file_data) return parsed_list, total_length def single_file_parse(info_od): info_data = {'multi_file': False} info_data['length'] = info_od[b'length'] info_data['name'] = info_od[b'name'] info_data['piece_length'] = info_od[b'piece length'] pieces = info_od[b'pieces'] _validate_division_by_20(pieces) info_data['pieces'] = pieces return info_data def _validate_division_by_20(pieces): # raise error if pieces in torrent_file is contain piece hash can't be divided by 20 with no remainder if len(pieces) % 20 != 0: raise ValueError("Torrent file came with not valid pieces") return True def create_tracker(url): protocol, netloc, path, _, _ = urlsplit(url) if protocol == 'wss': # temp for deletion in future # TODO support wss protocol return None if protocol not in ['udp', 'http', 'https']: logger.warning(f'the trackers {url} not conatin protocol') return None if ':' not in netloc: url = netloc if protocol == 'http': port = 80 else: port = 443 else: url, port = netloc.split(':') port = int(port) return Tracker(protocol, url, path, port) def get_trackers(od_torrent): announce_list = od_torrent.get(b'announce-list') url_list = od_torrent.get(b'url-list') single_announce = od_torrent.get(b'announce') trackers = [] if announce_list: trackers += [create_tracker(url.decode('utf-8')) for list_of_url in announce_list for url in list_of_url] if single_announce: trackers += [create_tracker((single_announce).decode('utf-8'))] if url_list: # TODO add support to wss tracker type (b'url-list' header) pass return list(set(trackers)) def get_torrent_data(path): """ main function in torrent file the function get file path and return dict with all torrent file data or false res """ # valid type if not valid_torrent_path(path): return False torrent_cont = {} # read file file_content = read_file(path) if not file_content: return False torrent_cont['raw'] = file_content # decode file file_content = decode_raw_data(file_content) torrent_cont['orderd_content'] = file_content if not torrent_cont['orderd_content']: return False torrent_cont['info_hash'] = get_info_hash(file_content) if not torrent_cont['info_hash']: return False # parse_info info_parsed = parse_info(file_content[b'info']) if not info_parsed: return False torrent_cont.update(info_parsed) # trackers trackers = get_trackers(file_content) torrent_cont['trackers'] = [tracker for tracker in trackers if tracker] if not torrent_cont['trackers']: logger.warning("the torrent file not contain any valid trackers") return False # path torrent_cont['path'] = path return torrent_cont def generate_torrent_file(path): torrent_data_dict = get_torrent_data(path) if not torrent_data_dict: logger.warning('create TorrentFile object failed') return False return TorrentFile(torrent_data_dict) <file_sep>/torrent/client.py from torrent.torrent_file import generate_torrent_file from torrent.pieces import create_piece_manager from torrent.peer import Peer_manager from torrent.tracker import tracker_manager from torrent.peer_communication import peer_to_peer_communication from torrent.save import torrent_file_exist, load, write_pieces_to_memory, file_genrator from torrent.async_handler import get_loop, run_async_task from torrent.utilities import close_thread from torrent.status_interface import run_tqdm import asyncio import logging logger = logging.getLogger(__name__) MAX_PEERS = 50 SLEEP_BETWEEN_LOOP = 3 def create_or_load_torrent_file(path: str): if torrent_file_exist(path): try: torrent_file, piece_manager = load(path) except TypeError: raise Exception("loading torrent file failed") else: torrent_file = generate_torrent_file(path) file_genrator(torrent_file) piece_manager = create_piece_manager(torrent_file) return torrent_file, piece_manager async def close_client(loop, tracker_threads, piece_manager): await piece_manager.put_in_queue(None) loop.stop() for thread in tracker_threads: close_thread(thread) logger.info("proccess closed successfuly") def create_client(torrent_path: str): loop = get_loop() torrent_file, piece_manager = create_or_load_torrent_file(torrent_path) run_async_task(loop, write_pieces_to_memory(torrent_file, piece_manager.done_queue)) peer_manager = Peer_manager() tracker_threads = tracker_manager(torrent_file, peer_manager) return torrent_file, piece_manager, tracker_threads, peer_manager, loop async def run_client(torrent_file, piece_manager, tracker_threads, peer_manager, loop): timeing = 0 run_async_task(loop, run_tqdm(torrent_file.length, torrent_file.piece_length, piece_manager.get_pieces_written)) while True: timeing += 1 if MAX_PEERS > peer_manager.map_status()['in_progress'] and not peer_manager.peers.empty(): task = peer_to_peer_communication(peer_manager.get_peer(), torrent_file, piece_manager) run_async_task(loop, task) if piece_manager.all_pieces_done(): logger.info("all pieces done") await close_client(loop, tracker_threads, piece_manager) break await asyncio.sleep(SLEEP_BETWEEN_LOOP) <file_sep>/requirements.txt requests==2.22.0 pytest==5.2.1 urllib3==1.25.7 pytest-asyncio==0.10.0 tqdm==4.46.0 bitarray==1.2.1 aiofiles==0.5.0 system-service==0.3 <file_sep>/torrent/io.py import aiofiles import os import shutil import torrent MODULE_PATH = os.path.dirname(torrent.__path__[0]) def write_to_disc(fd, block_data, seek_position): os.lseek(fd, seek_position, os.SEEK_SET) os.write(fd, block_data) def open_file(path): # the function return fd return os.open(path, os.O_RDWR) def close_file(fd): os.close(fd) def create_file(path, size): f = open(path, 'wb') f.seek(size - 1) f.write(b'\0') f.close() def get_path(*args, base=MODULE_PATH): # if you want create_path that not connect to module path just base=False if base: return os.path.join(base, *args) else: return os.path.join(*args) def copy_file(src, dest): shutil.copy(src, dest) def read_from_disk(fd, size, seek_position=False): if seek_position: os.lseek(fd, seek_position, os.SEEK_SET) return os.read(fd, size) <file_sep>/tests/test_torrent_file.py import pytest from collections import OrderedDict from torrent.torrent_file import valid_torrent_path, read_file, decode_raw_data, TorrentFile, generate_torrent_file,\ parse_info, create_tracker, _validate_division_by_20 from os import listdir import logging from utilities import _files_list logger = logging.getLogger() TEST_FILES_DIR = 'test_files/' def test_valid_torrent_file_functionl(): test_files = listdir(TEST_FILES_DIR) for test_torrent_file in test_files: if test_torrent_file.endswith('.torrent'): assert valid_torrent_path(TEST_FILES_DIR + test_torrent_file) != False else: assert valid_torrent_path(TEST_FILES_DIR + test_torrent_file) == False def test_valid_torrent_file_file_not_exist(): test_files = ['../file_not_exist', TEST_FILES_DIR + 'file_that_not_exist'] for test_torrent_file in test_files: assert valid_torrent_path(TEST_FILES_DIR + test_torrent_file) == False def test_valid_torrent_file_file_bed_input(): assert valid_torrent_path(['1']) == False assert valid_torrent_path(1) == False def test_read_file_functional(): test_files = listdir(TEST_FILES_DIR) for test_torrent_file in test_files: path = TEST_FILES_DIR + test_torrent_file if valid_torrent_path(path): assert type(read_file(path)) == bytes def test_decode_raw_data_functional(): assert decode_raw_data(b'i123e') test_files = listdir(TEST_FILES_DIR) for test_torrent_file in test_files: path = TEST_FILES_DIR + test_torrent_file if valid_torrent_path(path): decode_file = decode_raw_data(read_file(path)) assert type(decode_file) == OrderedDict def test_decode_raw_data_not_bencoding_bytes(): assert decode_raw_data(b'avi') == False assert decode_raw_data(TEST_FILES_DIR + 'bad_file') == False #add to good valid torrent file garbage def test_generate_torrent_file_multi(): files = _files_list() for path in files: torrent_file = generate_torrent_file(path) od_struct = torrent_file.od_data try: if od_struct[b'info'][b'files']: assert torrent_file.multi_file except KeyError: assert torrent_file.multi_file == False def test_generate_torrent_file_path(): files = _files_list() for path in files: torrent_file = generate_torrent_file(path) assert torrent_file.path == path def test_generate_torrent_file_trackers(): files = _files_list() for path in files: torrent = generate_torrent_file(path) list_of_trackers = decode_raw_data(read_file(path))[b'announce-list'] list_of_trackers = [create_tracker(tracker.decode('utf-8')) for track_list in list_of_trackers for tracker in track_list] list_of_trackers = [tracker for tracker in list_of_trackers if tracker] for tracker in list_of_trackers: assert tracker in torrent.trackers def test_generate_torrent_not_duplicate_tracker(): files = _files_list() for path in files: tracker_list = generate_torrent_file(path).trackers for _ in range(len(tracker_list)): tracker = tracker_list.pop() for t in tracker_list: assert t.tracker_type != tracker.tracker_type or t.url != tracker.url or t.path != tracker.path or t.port != tracker.port def test_tracker_with_https_schema(): path = _files_list()[3] torrent_file = generate_torrent_file(path) for tracker in torrent_file.trackers: assert tracker.schema == "https" assert tracker.tracker_type == 'http' def test_tracker_with_http_schema(): path = _files_list()[2] torrent_file = generate_torrent_file(path) for tracker in torrent_file.trackers: if tracker.tracker_type == 'http': assert tracker.schema == "http" def test_torrent_file_repr(): files = _files_list() for path in files: tracker_file = generate_torrent_file(path) assert tracker_file def test_tracker_repr(): files = _files_list() for path in files: torrent_file = generate_torrent_file(path) for tracker in torrent_file.trackers: assert tracker def test_validate_division_by_20_not_raising_error_with_good_data(): with open(TEST_FILES_DIR + 'pieces_in_length', 'rb') as f: pieces = f.read() assert _validate_division_by_20(pieces) def test_validate_division_by_20_raising_error_with_bed_data(): with open(TEST_FILES_DIR + 'pieces_not_in_length', 'rb') as f: pieces = f.read() with pytest.raises(ValueError): _validate_division_by_20(pieces) if __name__ == "__main__": pass <file_sep>/torrent/pieces.py from enum import Enum from hashlib import sha1 import logging import asyncio logger = logging.getLogger(__name__) class PieceStatus(Enum): free = 0 in_progress = 1 done = 2 written = 3 class Piece: """The piece object reprsent one piece from pieces in the info of torrent file all piece in size of piece length fro torrent file hoz me except from the last piece the piece come with info hash of 20 bytes in sha1 and if the piece data the came from peer in sha1 not the same like the piece hash the piece is not right""" def __init__(self, piece_hash, length, piece_index): self.index = piece_index self.piece_hash = piece_hash self.length = length self.blocks = [] self.status = PieceStatus.free def piece_done(self): validation = self.piece_hash == sha1(b''.join(self.blocks)).digest() if validation: logger.debug(f'piece {self.index} is valid') self.set_status(PieceStatus.done) else: logger.info(f'piece {self.index} is not valid') self.reset_piece() return validation def get_blocks(self): return b''.join(self.blocks) def reset_piece(self): logger.debug(f"we lost a piece in index {self.index}") self.set_status(PieceStatus.free) self.blocks = [] def add_block(self, block): logger.debug(f"block add to piece {self.index}") self.blocks.append(block) def set_status(self, status: PieceStatus): self.status = status def piece_written(self): self.set_status(PieceStatus.written) self.blocks = [] def __repr__(self): return f"{self.index}:{self.status.name}" class Piece_Manager: """ the class purpose is to manage the connection from piece to the place in the memory on file. for now it only a list with data """ def __init__(self): self.pieces = [] self.lock = False self.done_queue = asyncio.Queue() def add_piece(self, piece): if type(piece) != Piece: raise Exception(f"add piece to Piece manager failed the object appended in not piece is {self.piece}") self.pieces.append(piece) return True def pieces_status(self): statuses = list(map(lambda piece: piece.status, self.pieces)) free = statuses.count(PieceStatus.free) done = statuses.count(PieceStatus.done) in_progress = statuses.count(PieceStatus.in_progress) written = statuses.count(PieceStatus.written) return {PieceStatus.free: free, PieceStatus.done: done, PieceStatus.in_progress: in_progress, PieceStatus.written: written} async def get_piece(self): while True: if not self.lock: self.lock = True for piece in self.pieces: if piece.status == PieceStatus.free: piece.set_status(PieceStatus.in_progress) self.lock = False return piece return None self.lock = False else: await asyncio.sleep(1) async def put_in_queue(self, piece): await self.done_queue.put(piece) def get_pieces_written(self): return self.pieces_status()[PieceStatus.written] def all_pieces_done(self): pieces_status = self.pieces_status() if pieces_status[PieceStatus.written] == len(self.pieces): return True def __repr__(self): return f'piece_manager{self.pieces_status()}' def create_piece_manager(torrent_file): piece_manager = Piece_Manager() piece_length = torrent_file.piece_length index = 0 for piece_pointer in range(0, len(torrent_file.pieces) - 20, 20): piece_manager.add_piece(Piece(torrent_file.pieces[piece_pointer:piece_pointer+20], piece_length, index)) index += 1 last_piece = Piece( torrent_file.pieces[20 * index: 20 * index + 20], torrent_file.length % piece_length, index ) piece_manager.add_piece(last_piece) return piece_manager <file_sep>/torrent/utilities.py from queue import Queue from torrent.io import get_path from threading import Thread from collections import OrderedDict import logging logger = logging.getLogger(__name__) def run_async(f, daemon=False): def wrapped_f(q, *args, **kwargs): ret = f(*args, **kwargs) q.put(ret) def wrap(*args, **kwargs): try: if kwargs.get('recursive'): return f(*args, **kwargs) except KeyError: pass q = Queue() t = Thread(target=wrapped_f, args=(q, ) + args, kwargs=kwargs) t.daemon = daemon t.start() t.result_queue = q return t return wrap def get_download_path(file_name): return get_path('downloads', file_name) def get_torrent_files_path(file_name): return get_path('torrent_files', file_name) def handle_exception(func, error_message, error_type=Exception, *args, **kwargs): try: return func(*args, **kwargs) except error_type: logger.debug(error_message) return False def close_thread(thread): if thread.is_alive(): thread._tstate_lock.release() thread._stop() <file_sep>/torrent/async_handler.py import asyncio import logging logger = logging.getLogger(__name__) def get_loop(): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop def run_async_task(loop, func): return loop.create_task(func) <file_sep>/README.md # AVTorrent Python implimentation for bitorrent client ## Getting Started Have Python3.6.8+ ``` pip install requirements.txt ``` To download torrent- ``` python avtorrent.py [torrentfile.torrent] ``` ## Running the tests ``` pytest tests/ ``` ## Authors * **<NAME>** - [avrham408](https://github.com/avrham408) ## Acknowledgments * Hat tip to anyone whose code was used * Inspiration * etc <file_sep>/tests/test_integration.py from torrent.save import write_pieces_to_memory, file_genrator from torrent.torrent_file import generate_torrent_file from torrent.pieces import create_piece_manager, PieceStatus import torrent import pytest import asyncio import struct from utilities import _files_list import logging import os logger = logging.getLogger(__name__) # help functions def delete_file_for_test_if_exsit(torrent_file): base_path = os.path.dirname(torrent.__path__[0]) path_torrent_file = os.path.join(base_path, 'torrent_files', os.path.basename(torrent_file.path)) path_download_file = os.path.join(base_path, 'downloads', torrent_file.name) try: os.remove(path_torrent_file) except FileNotFoundError: pass try: os.remove(path_download_file) except FileNotFoundError: pass def create_test_data_for_piece(piece): return struct.pack('>B', piece.index % 255) * piece.length def full_piece_with_bytes(piece): data = create_test_data_for_piece(piece) for _ in range(int(piece.length / (2 ** 14))): piece.add_block(data) piece.status = PieceStatus.done # tests def check_all_data_wirtten_in_right_place(torrent_file, piece_manager): base_path = os.path.dirname(torrent.__path__[0]) path_download_file = os.path.join(base_path, 'downloads', torrent_file.name) assert os.stat(path_download_file).st_size == torrent_file.length with open(path_download_file, 'rb') as f: for piece in piece_manager.pieces: f.read(piece.length) == create_test_data_for_piece(piece) @pytest.mark.asyncio async def test_write_file(): path = _files_list()[3] #only not multi file in _file_list torrent_file = generate_torrent_file(path) file_genrator(torrent_file) delete_file_for_test_if_exsit(torrent_file) file_genrator(torrent_file) piece_manager = create_piece_manager(torrent_file) asyncio.gather(write_pieces_to_memory(torrent_file, piece_manager.done_queue)) for piece in piece_manager.pieces: full_piece_with_bytes(piece) await piece_manager.put_in_queue(piece) piece.piece_written() await piece_manager.done_queue.put(None) check_all_data_wirtten_in_right_place(torrent_file, piece_manager) #close process delete_file_for_test_if_exsit(torrent_file) <file_sep>/tests/test_tracker.py from utilities import valid_internet, _files_list, kill_thread from torrent.tracker import tracker_manager, udp_request, http_request, create_url_for_http_tracker from torrent.peer import Peer_manager from torrent.torrent_file import generate_torrent_file import logging from time import sleep from random import choice from threading import Thread from urllib.parse import urlparse, parse_qs logger = logging.getLogger(__name__) @valid_internet def test_tracker_manager_tracker_alive(): files = _files_list() for torrent_path in files: log = logging torrent_file = generate_torrent_file(torrent_path) peer_manager = Peer_manager() tracker_threads = tracker_manager(torrent_file, peer_manager) sleep(10) for thread in tracker_threads: if thread.result_queue.empty(): assert thread.is_alive() else: assert not thread.is_alive() for thread in tracker_threads: kill_thread(thread) @valid_internet def test_udp_request_good_tracker(): files = _files_list() torrent_file = generate_torrent_file(files[2]) peer_manager = Peer_manager() for tracker in torrent_file.trackers: if tracker.url == 'exodus.desync.com': valid_tracker = tracker thread = udp_request(valid_tracker, torrent_file, peer_manager) sleep(30) assert peer_manager.peers.empty() == False kill_thread(thread) @valid_internet def test_udp_request_tracker_with_no_response(): path = _files_list()[2] torrent_file = generate_torrent_file(path) for tracker in torrent_file.trackers: if tracker.url == "tracker.yify-torrents.com" and tracker.tracker_type == 'udp': no_res_tracker = tracker peer_manager = Peer_manager() thread = udp_request(no_res_tracker, no_res_tracker, peer_manager) assert thread.is_alive() assert peer_manager.peers.empty() sleep(30) assert thread.is_alive() assert peer_manager.peers.empty() kill_thread(thread) @valid_internet def test_udp_request_tracker_tracker_from_another_torrent(): files = _files_list() for tracker in generate_torrent_file(files[4]).trackers: if tracker.url == 'explodie.org': tracker_other_torrent = tracker torrent_file = generate_torrent_file(files[0]) peer_manager = Peer_manager() thread = udp_request(tracker_other_torrent, torrent_file, peer_manager) sleep(1) assert not thread.is_alive() def test_create_url_for_http_tracker(): path = _files_list()[3] torrent_file = generate_torrent_file(path) peer_manager = Peer_manager() for tracker in torrent_file.trackers: #TODO add asser for query string url = create_url_for_http_tracker(torrent_file, tracker, torrent_file.length) schema, netloc, path, _, query, _ = urlparse(url) base_url = schema + '://' + netloc + path assert base_url == 'https://ipv6.torrent.ubuntu.com:443/announce' or base_url == 'https://torrent.ubuntu.com:443/announce' @valid_internet def test_http_request_good_tracker(): #TODO the test fail if you run the untest more then one time in a row fix it... path = _files_list()[3] torrent_file = generate_torrent_file(path) peer_manager = Peer_manager() for tracker in torrent_file.trackers: if tracker.url == 'torrent.ubuntu.com': good_tracker = tracker url = create_url_for_http_tracker(torrent_file, tracker,torrent_file.length) thread = http_request(url, peer_manager) sleep(15) assert thread.is_alive() empty_status = peer_manager.peers.empty() kill_thread(thread) assert empty_status == False #first kill the thread the assert chek otherwise the all the pytest stuck <file_sep>/torrent/log_config.py from logging.config import dictConfig import torrent PATH = torrent.__path__[0] + '/log/' LOGGING_CONFIG = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'simple': { "format": "%(asctime)s - %(name)s - %(levelname)s - %(lineno)d %(message)s\t" }, }, 'handlers': { 'default': { 'level': 'ERROR', 'formatter': 'simple', 'class': 'logging.StreamHandler', 'stream': 'ext://sys.stdout', # Default is stde }, "file_handler": { "class": "logging.handlers.RotatingFileHandler", "level": "WARNING", "formatter": "simple", "filename": fr"{PATH}main.log", "maxBytes": 1048576, "backupCount": 20, "encoding": "utf8" } }, 'loggers': { "": { "level": "WARNING", "handlers": ["default", "file_handler"] } } } dictConfig(LOGGING_CONFIG) <file_sep>/torrent/peer_communication.py from torrent.networking import PeerConnection from torrent.peer_protocol import Handshake, Keep_Alive, Interested, \ Unchoke, Choke, Piece, Request, BitField, Have, Uninterested, Cancel import logging import asyncio import struct BLOCK_SIZE = 2**14 logger = logging.getLogger(__name__) RESTART_PEER_TIME = 60 * 7.5 async def read_message(peer_connection): pack = await peer_connection.read(5) if not pack: return False if len(pack) == 4: return Keep_Alive() try: message_size, message_type = struct.unpack(">IB", pack) except struct.error: logger.info("parse message and size failed") return False if message_size > 1: try: pack += await peer_connection.read(message_size - 1, all_data=True) except TypeError: return False return message_type_switch(message_type).parse(pack) def message_type_switch(message_type): if message_type == 0: return Choke if message_type == 1: return Unchoke if message_type == 2: return Interested if message_type == 3: return Uninterested if message_type == 4: return Have if message_type == 5: return BitField if message_type == 6: return Request if message_type == 7: return Piece if message_type == 8: return Cancel async def open_connection(peer): peer_connection = await PeerConnection.open_connection(*peer.open_connection()) if not peer_connection: return False return peer_connection def create_handshake(info_hash, peer_id): return Handshake(info_hash, peer_id) async def send_handshake(peer_connection, handshake): if await peer_connection.write(handshake.send_bytes()): return True return False async def get_and_parse_handshake(peer_connection, recursive=True): res = await peer_connection.read(68, all_data=True) if res: return Handshake.parse(res) elif recursive is True: await asyncio.sleep(10) return await get_and_parse_handshake(peer_connection, False) async def get_and_parse_bitfield(peer_connection): # TODO handle Have messages bitfield = await read_message(peer_connection) if type(bitfield) != BitField: logger.debug(f"we got from peer in bitfield {bitfield}") return False else: return bitfield async def send_intersted(peer_connection): intersted = Interested() if await peer_connection.write(intersted.send_bytes()): return True return False async def get_choking_status(peer_connection): status = await read_message(peer_connection) if type(status) == Choke: # TODO handle restart peer communication logger.debug("peer choke") await asyncio.sleep(RESTART_PEER_TIME) return False if type(status) == Unchoke: return True return False async def get_pieces(peer_connection, piece_manager, bitfield): while True: piece = await piece_manager.get_piece() if not piece: # all the pieces in proccess return False await request_all_blocks_for_piece(peer_connection, piece) if not piece.piece_done(): # TODO handle restart logger.info("piece requests from peer failed") return False await piece_manager.put_in_queue(piece) logger.debug('piece add to queue') async def request_all_blocks_for_piece(peer_connection, piece): offset = 0 while offset < piece.length: if not await request_piece(peer_connection, piece.index, offset, BLOCK_SIZE): return False piece_message = await read_message(peer_connection) if type(piece_message) is not Piece: return False piece.add_block(piece_message.block_data) offset += len(piece_message.block_data) async def request_piece(peer_connection, piece_index, offset, block_size): request_message = Request(piece_index, offset, block_size) if await peer_connection.write(request_message.send_bytes()): return True return False def close_connection(peer): peer.close_connection() return False async def restart(peer, torrent_file, piece_manager): return await peer_to_peer_communication(peer, torrent_file, piece_manager) async def peer_to_peer_communication(peer, torrent_file, piece_manager): peer_connection = await open_connection(peer) if not peer_connection: logger.info("open connections failed") return close_connection(peer) logger.debug("connection open") handshake = create_handshake(torrent_file.info_hash, torrent_file.peer_id) if not await send_handshake(peer_connection, handshake): logger.info("send handshake failed") return close_connection(peer) logger.debug("handshake pass") if not await get_and_parse_handshake(peer_connection): logger.info("res handshake failed") return close_connection(peer) bitfield = await get_and_parse_bitfield(peer_connection) if not bitfield: logger.info("bitfield failed") return close_connection(peer) logger.debug("bitfield pass") if not await send_intersted(peer_connection): logger.info("send intersted message failed") return close_connection(peer) logger.debug("send interested pass") if not await get_choking_status(peer_connection): logger.info("peer choking status return False") return close_connection(peer) logger.debug("get choke status pass") if not await get_pieces(peer_connection, piece_manager, bitfield): return close_connection(peer) logger.debug("get choke status pass")
bf8f533bfd6b42f9783cdc77a2794037b176d700
[ "Markdown", "Python", "Text" ]
24
Python
avrham408/bitorrent
841edfbe7ebfb9ee363160fca2f5cce81e1a4d9c
c88816c52c916789300f841cd90809e8b63e5d28
refs/heads/main
<file_sep><?php use Illuminate\Database\Migrations\Migration; use Illuminate\Database\Schema\Blueprint; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Schema; class CreateMenuTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('menu', function (Blueprint $table) { $table->engine = 'InnoDB'; $table->increments('id'); $table->char('name', 32)->default('')->comment('角色名称'); $table->char('api', 50)->default('')->comment('权限规则'); $table->integer('parent_id')->default(0)->comment('上级ID'); $table->tinyInteger('is_subordinate')->default(1)->comment('是否有下级:1=有,2=无'); $table->integer('sort')->default(50)->comment('排序数值越大越靠前'); $table->timestamps(); }); $prefix = DB::getConfig('prefix'); DB::statement("ALTER TABLE `" . $prefix . "menu` comment '菜单权限表'"); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('menu'); } } <file_sep><?php namespace App\Http\Requests; use Illuminate\Contracts\Validation\Validator; use Illuminate\Http\Exceptions\HttpResponseException; trait CommonRequest { protected function failedValidation(Validator $validator) { $errors = $validator->errors(); throw (new HttpResponseException(error(40001, $errors->first()))); } } <file_sep><?php namespace App\Http\Controllers\Manage; use App\Models\MenuModel; use Illuminate\Http\Request; use Illuminate\Support\Facades\DB; class MenuController extends BaseController { public function __construct() { $this->model = new MenuModel(); } /** * 获取权限菜单列表 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function lists(Request $request) { $list = (new MenuModel())->getList(); return success(['items' => $list]); } /** * 新增权限菜单 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function add(Request $request) { $name = (string)$request->input('name', ''); $api = (string)$request->input('api', ''); $sort = (integer)$request->input('sort', 50); $parentId = (integer)$request->input('parent_id', 0); $model = new MenuModel(); $model->name = $name; $model->api = $api; $model->sort = $sort; $model->parent_id = $parentId; $model->is_subordinate = 2; if ($model->save()) { if ($parentId != 0) { $parent = MenuModel::find($parentId); $parent->is_subordinate = 1; $parent->save(); } return success(); } else { return error(); } } /** * 编辑权限菜单 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function edit(Request $request) { $id = (integer)$request->input('id', ''); $name = (string)$request->input('name', ''); $api = (string)$request->input('api', ''); $sort = (integer)$request->input('sort', 50); $model = MenuModel::find($id); $model->name = $name; $model->api = $api; $model->sort = $sort; if ($model->save()) { return success(); } else { return error(); } } /** * 获取权限菜单详情 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function details(Request $request) { $id = $request->input('id', 0); $field = array( 'id', 'name', 'api', 'sort', 'parent_id', 'is_subordinate' ); $data = MenuModel::where('id', $id)->select($field)->first()->toArray(); $data['parent_name'] = (string)MenuModel::where('id', $data['parent_id'])->value('name'); return success(['item' => $data]); } /** * 删除权限菜单详情 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function del(Request $request) { $id = $request->input('id', 0); DB::beginTransaction(); $menu = MenuModel::where('id', $id)->first(); if (!$menu->delete()) { DB::rollBack(); return error('删除失败'); } $count = (int)MenuModel::where('parent_id', $menu->parent_id)->count(); if ($count == 0) { $parent = MenuModel::find($menu->parent_id); $parent->is_subordinate = 2; if (!$parent->save()) { DB::rollBack(); return error('删除失败'); } } DB::commit(); return success(); } } <file_sep><?php namespace App\Models; class LogsModel extends BaseModel { //表名 protected $table = 'logs'; } <file_sep><?php namespace App\Models; use Illuminate\Database\Eloquent\Model; class BaseModel extends Model { protected $casts = [ 'created_at' => 'datetime:Y-m-d H:i:s', 'updated_at' => 'datetime:Y-m-d H:i:s', ]; public static function page($model) { $limit = request()->input('limit', 20); $res = json_decode($model->paginate($limit)->toJson(), true); return array( 'total' => $res['total'], 'page' => $res['current_page'], 'limit' => intval($res['per_page']), 'page_count' => $res['last_page'], 'items' => $res['data'] ); unset($res); return $data; } protected function serializeDate(\DateTimeInterface $date) { return $date->format('Y-m-d H:i:s'); } } <file_sep>import request from '@/utils/request' export function role(params) { return request({ url: '/public/role', method: 'get', params }) } export function menuTree(params) { return request({ url: '/public/menu_tree', method: 'get', params }) } <file_sep><?php namespace App\Models; use Illuminate\Support\Facades\Redis; class AdminModel extends BaseModel { //表名 protected $table = 'admin'; //存储token的redis键 private $adminTokenRedisKey = 'admin:token:'; //存储管理员信息的redis键 private $adminInfoRedisKey = 'admin:info:'; //存储管理员权限的redis键 private $adminPermissionRedisKey = 'admin:permission:'; /** * 依据用户名查询用户 * @param $username * @return mixed */ public function getRowByUsername($username) { $admin = $this->where('username', $username)->first(); return $admin ? $admin->toArray() : []; } /** * 依据用户token获取用户信息 * @param $token * @return array|mixed */ public function getRowByToken($token) { $id = Redis::get($this->adminTokenRedisKey . $token); if ($id) { return $this->getInfo($id); } else { return []; } } /** * 验证密码 * @param $admin * @param $password * @return bool true=正确,false=不正确 */ public function verifyPassword($admin, $password) { return $admin['password'] === md5(md5($password) . $admin['created_at']); } /** * 修改密码 * @param $admin * @param $password * @return mixed */ public function editPassword($admin, $password) { return $this->where('id', $admin['id'])->update([ 'password' => md5(md5($password) . $admin['created_at']) ]); } /** * 设置token * @param $id * @param $token */ public function setToken($id, $token) { Redis::set($this->adminTokenRedisKey . $token, $id, 7200); } /** * 删除token * @param $token */ public function delToken($token) { Redis::del($this->adminTokenRedisKey . $token); } /** * 设置管理员信息 * @param $admin */ public function setInfo($admin) { Redis::hMSet($this->adminInfoRedisKey . $admin['id'], $admin); } /** * 获取管理员信息 * @param $id * @return mixed */ public function getInfo($id) { return Redis::hGetAll($this->adminInfoRedisKey . $id); } /** * 删除管理员信息 * @param $id * @return mixed */ public function delInfo($id) { Redis::del($this->adminInfoRedisKey . $id); } /** * 更新最后登录时间 * @param $id * @param int $time */ public function updateLoginTime($id, $time = 0) { $time = $time ?: time(); $this->where('id', $id)->update(['login_time' => $time]); } /** * 获取管理员的权限 * @param $id * @return mixed */ public function getPermission($id) { $permission = Redis::hGetAll($this->adminPermissionRedisKey . $id); if (empty($permission)) { $menus = AdminRoleModel::join('role', 'admin_role.role_id', '=', 'role.id') ->where('admin_role.admin_id', $id) ->pluck('role.menus')->toArray(); $menusStr = join(',', $menus); $menusArr = explode(',', $menusStr); $menusArr = array_unique($menusArr); $permission = MenuModel::whereIn('id', $menusArr)->where('api', '<>', '')->pluck('api')->toArray(); Redis::hMSet($this->adminPermissionRedisKey . $id, $permission); } return $permission; } /** * 清除缓存的管理员权限 * @param $id */ public function delPermission($id) { Redis::del($this->adminPermissionRedisKey . $id); } } <file_sep><?php namespace App\Models; class RoleModel extends BaseModel { //表名 protected $table = 'role'; } <file_sep><?php namespace App\Exceptions; use Illuminate\Foundation\Exceptions\Handler as ExceptionHandler; use Throwable; class Handler extends ExceptionHandler { /** * A list of the exception types that are not reported. * * @var array */ protected $dontReport = [ // ]; /** * A list of the inputs that are never flashed for validation exceptions. * * @var array */ protected $dontFlash = [ 'current_password', 'password', 'password_confirmation', ]; /** * Register the exception handling callbacks for the application. * * @return void */ public function register() { $this->reportable(function (Throwable $e) { // }); } public function render($request, Throwable $exception) { if ($request->is('manage/*')) { $response = []; $error = $this->convertExceptionToResponse($exception); $response['status'] = $error->getStatusCode(); $response['msg'] = 'something error'; if (config('app.debug')) { $response['msg'] = empty($exception->getMessage()) ? 'something error' : $exception->getMessage(); if ($error->getStatusCode() >= 500) { if (config('app.debug')) { $response['trace'] = $exception->getTraceAsString(); $response['code'] = $exception->getCode(); } } } $response['data'] = []; return response()->json($response, $error->getStatusCode()); } else { return parent::render($request, $exception); } } } <file_sep><?php use Illuminate\Database\Migrations\Migration; use Illuminate\Database\Schema\Blueprint; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Schema; class CreateAdminRoleTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('admin_role', function (Blueprint $table) { $table->engine = 'InnoDB'; $table->increments('id'); $table->integer('admin_id')->default(0)->comment('管理员id'); $table->integer('role_id')->default(0)->comment('角色id'); $table->timestamps(); }); $prefix = DB::getConfig('prefix'); DB::statement("ALTER TABLE `" . $prefix . "admin_role` comment '管理员角色关系表'"); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('admin_role'); } } <file_sep><?php Route::post('admin/login', 'Manage\AdminController@login'); Route::post('admin/logout', 'Manage\AdminController@logout'); Route::middleware(['admin'])->group(function () { // 登录成功获取个人信息 Route::get('admin/info', 'Manage\AdminController@info'); // 修改密码 Route::post('admin/edit_password', 'Manage\AdminController@editPassword'); // 日志列表 Route::get('logs/lists', 'Manage\LogsController@lists'); // 人员管理 Route::get('admin/details', 'Manage\AdminController@details')->middleware('permission:admin/edit|admin/add'); Route::get('admin/lists', 'Manage\AdminController@lists')->middleware(['permission', 'manage.log:人员管理-查看']); Route::post('admin/add', 'Manage\AdminController@add')->middleware(['permission', 'manage.log:人员管理-新增']); Route::post('admin/edit', 'Manage\AdminController@edit')->middleware(['permission', 'manage.log:人员管理-编辑']); Route::post('admin/del', 'Manage\AdminController@del')->middleware(['permission', 'manage.log:人员管理-删除']); // 角色管理 Route::get('role/details', 'Manage\RoleController@details')->middleware('permission:role/edit|role/add'); Route::get('role/lists', 'Manage\RoleController@lists')->middleware(['permission', 'manage.log:角色管理-查看']); Route::post('role/add', 'Manage\RoleController@add')->middleware(['permission', 'manage.log:角色管理-新增']); Route::post('role/edit', 'Manage\RoleController@edit')->middleware(['permission', 'manage.log:角色管理-编辑']); Route::post('role/del', 'Manage\RoleController@del')->middleware(['permission', 'manage.log:角色管理-删除']); // 权限管理 Route::get('menu/details', 'Manage\MenuController@details')->middleware('permission:menu/edit|menu/add'); Route::get('menu/lists', 'Manage\MenuController@lists')->middleware(['permission', 'manage.log:权限管理-查看']); Route::post('menu/add', 'Manage\MenuController@add')->middleware(['permission', 'manage.log:权限管理-新增']); Route::post('menu/edit', 'Manage\MenuController@edit')->middleware(['permission', 'manage.log:权限管理-编辑']); Route::post('menu/sort', 'Manage\MenuController@sort')->middleware(['permission', 'manage.log:权限管理-编辑-排序']); Route::post('menu/del', 'Manage\MenuController@del')->middleware(['permission', 'manage.log:权限管理-删除']); Route::get('public/role', 'Manage\PublicController@role'); Route::get('public/menu_tree', 'Manage\PublicController@menuTree'); }); <file_sep><?php use Illuminate\Database\Migrations\Migration; use Illuminate\Database\Schema\Blueprint; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Schema; class CreateRoleTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('role', function (Blueprint $table) { $table->engine = 'InnoDB'; $table->increments('id'); $table->char('name', 32)->default('')->comment('角色名称'); $table->text('menus')->comment('菜单权限'); $table->tinyInteger('status')->default(1)->comment('状态:1=正常,2=禁用'); $table->timestamps(); }); $prefix = DB::getConfig('prefix'); DB::statement("ALTER TABLE `" . $prefix . "role` comment '角色表'"); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('role'); } } <file_sep><?php namespace App\Rules; use Illuminate\Contracts\Validation\Rule; class MobilePhone implements Rule { /** * Create a new rule instance. * * @return void */ public function __construct() { // } /** * Determine if the validation rule passes. * * @param string $attribute * @param mixed $value * @return bool */ public function passes($attribute, $value) { return (bool)preg_match('/^(13[0-9]|14[01456879]|15[0-35-9]|16[2567]|17[0-8]|18[0-9]|19[0-35-9])\d{8}$/', $value); } /** * Get the validation error message. * * @return string */ public function message() { return ':attribute 不是合法的手机号码'; } } <file_sep><?php namespace App\Http\Controllers\Manage; use App\Http\Controllers\Controller; use App\Models\MenuModel; use App\Models\RoleModel; use Illuminate\Http\Request; class PublicController extends Controller { /** * 获取所有角色 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function role(Request $request) { $status = $request->input('status', 0); $field = [ 'id', 'name' ]; $model = RoleModel::select($field); if ($status) { $model->where('status', $status); } $items = $model->get(); return success(['items' => $items ? $items->toArray() : []]); } /** * 获取菜单tree * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function menuTree(Request $request) { $tree = (new MenuModel())->getTree(); return success(['items' => $tree]); } } <file_sep><?php use Illuminate\Database\Migrations\Migration; use Illuminate\Database\Schema\Blueprint; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Schema; class CreateAdminTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('admin', function (Blueprint $table) { $table->engine = 'InnoDB'; $table->increments('id'); $table->char('name', 32)->default('')->comment('姓名'); $table->char('username', 32)->default('')->comment('用户名'); $table->char('password', 32)->default('')->comment('密码'); $table->char('phone', 11)->default('')->comment('手机号'); $table->integer('login_time')->default(0)->comment('上次登录时间'); $table->tinyInteger('status')->default(1)->comment('状态:1=正常,2=禁用'); $table->timestamps(); }); $prefix = DB::getConfig('prefix'); DB::statement("ALTER TABLE `" . $prefix . "admin` comment '管理员表'"); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('admin'); } } <file_sep><?php namespace App\Http\Controllers\Manage; use App\Http\Controllers\Controller; use Illuminate\Http\Request; class BaseController extends Controller { protected $model = null; /** * 排序 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function sort(Request $request) { $id = (int)$request->input('id'); $sort = (int)$request->input('sort', 0); $data = $this->model->find($id); if (empty($data)) { return error('数据不存在'); } $data->sort = $sort; if ($data->save()) { return success(); } else { return error('编辑排序失败'); } } } <file_sep><?php namespace App\Http\Controllers\Manage; use App\Http\Controllers\Controller; use App\Models\AdminModel; use App\Models\AdminRoleModel; use App\Models\BaseModel; use App\Models\MenuModel; use App\Models\RoleModel; use Illuminate\Http\Request; class RoleController extends Controller { /** * 获取角色列表 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function lists(Request $request) { $status = $request->input('status', 0); $name = $request->input('name', ''); $sort = $request->input('sort', ''); $field = array( 'id', 'name', 'status', 'created_at' ); $model = RoleModel::select($field); if ($status) { $model->where('status', $status); } if ($name) { $model->where('name', 'like', "%$name%"); } if ($sort == '+id') { $sort = 'asc'; } else { $sort = 'desc'; } $model->orderBy('id', $sort); $data = BaseModel::page($model); return success($data); } /** * 新增角色 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function add(Request $request) { $name = $request->input('name', ''); $status = $request->input('status', 1); $menus = (array)$request->input('menus', []); if (empty($menus)) { return error('权限必选'); } $menus = (new MenuModel())->findAllNode($menus); $model = new RoleModel(); $model->name = $name; $model->menus = join(',', $menus); $model->status = $status; if ($model->save()) { return success(); } else { return error(); } } /** * 获取角色详情 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function details(Request $request) { $id = $request->input('id', 0); $field = array( 'id', 'name', 'menus', 'status' ); $data = RoleModel::where('id', $id)->select($field)->first()->toArray(); $data['menus'] = (new MenuModel())->deleteAllNode(explode(',', $data['menus'])); return success(['item' => $data]); } /** * 编辑角色 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function edit(Request $request) { $id = $request->input('id', 0); if ($id === 1) { return error('角色不可编辑'); } $name = $request->input('name', ''); $status = $request->input('status', 1); $menus = (array)$request->input('menus', []); if (empty($menus)) { return error('权限必选'); } $menus = (new MenuModel())->findAllNode($menus); $model = RoleModel::where('id', $id)->first(); if (empty($model)) { return error('角色不存在'); } $model->menus = join(',', $menus); $model->name = $name; $model->status = $status; if ($model->save()) { //查询所有该角色的管理员 $adminIds = AdminRoleModel::where('role_id', $id)->pluck('admin_id'); foreach ($adminIds as $adminId) { //清除以缓存的管理员权限 (new AdminModel())->delPermission($adminId); } return success(); } else { return error(); } } /** * 删除角色 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function del(Request $request) { $id = $request->input('id', 0); if ($id === 1) { return error('角色不可删除'); } $model = RoleModel::where('id', $id)->first(); if (empty($model)) { return error('角色不存在'); } $count = AdminRoleModel::where('role_id', $id)->count(); if ($count > 0) { return error('部分用户正在使用该角色,无法删除!'); } if ($model->delete()) { return success(); } else { return error(); } } } <file_sep><?php use Illuminate\Database\Migrations\Migration; use Illuminate\Database\Schema\Blueprint; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Schema; class CreateLogsTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('logs', function (Blueprint $table) { $table->engine = 'InnoDB'; $table->increments('id'); $table->char('path', 100)->default('')->comment('访问路径'); $table->string('info', 100)->default('')->comment('操作描述'); $table->longText('request')->comment('请求参数'); $table->longText('response')->comment('响应结果'); $table->char('ip', 15)->default('')->comment('IP地址'); $table->char('token', 50)->default('')->comment('令牌'); $table->integer('admin_id')->default(0)->comment('管理员ID'); $table->integer('request_time')->default(0)->comment('请求时间'); $table->timestamps(); }); $prefix = DB::getConfig('prefix'); DB::statement("ALTER TABLE `" . $prefix . "logs` comment '操作日志表'"); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('logs'); } } <file_sep><?php namespace App\Http\Middleware\Manage; use App\Models\AdminModel; use Closure; class Permission { /** * 验证管理员是否有权限操作,该中间件需用在Auth中间件后面 * @param $request * @param Closure $next * @param string $path * @return \Illuminate\Http\JsonResponse|mixed */ public function handle($request, Closure $next, $path = '') { if (empty($path)) { $path = str_replace('manage/', '', $request->path()); } $pathArr = explode('|', $path); $admin = $request->get('admin'); $permission = (new AdminModel())->getPermission($admin['id']); if (in_array('*', $permission)) { return $next($request); } $isPermission = false; foreach ($pathArr as $path) { if (in_array($path, $permission)) { $isPermission = true; break; } } if ($isPermission) { return $next($request); } else { return error('无权限访问'); } } } <file_sep><?php namespace App\Http\Controllers\Manage; use App\Http\Controllers\Controller; use App\Models\LogsModel; use Illuminate\Http\Request; class LogsController extends Controller { /** * 获取操作日志列表 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function lists(Request $request) { $admin = $request->get('admin'); $field = [ 'logs.id', 'logs.path', 'logs.info', 'logs.ip', 'logs.request_time', 'admin.name as admin_name' ]; $model = LogsModel::leftJoin('admin', 'logs.admin_id', '=', 'admin.id')->select($field); if ($admin['id'] != 1) { $model->where('logs.admin_id', $admin['id']); } $model->orderBy('logs.id', 'desc'); $data = LogsModel::page($model); foreach($data['items'] as &$item){ $item['request_time'] = format_time($item['request_time']); } return success($data); } } <file_sep><?php namespace App\Http\Controllers\Manage; use App\Http\Controllers\Controller; use App\Http\Requests\Manage\AdminPostRequest; use App\Models\AdminModel; use App\Models\AdminRoleModel; use App\Models\BaseModel; use Illuminate\Http\Request; use Illuminate\Support\Str; class AdminController extends Controller { /** * 管理员登录接口 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function login(Request $request) { $username = (string)$request->input('username', ''); $password = (string)$request->input('password', ''); $model = new AdminModel(); $admin = $model->getRowByUsername($username); if (empty($admin)) { return error('管理员不存在'); } if ($admin['status'] == 2) { return error('管理员已禁用'); } if ($model->verifyPassword($admin, $password) === false) { return error('密码不正确'); } $time = time(); $model->updateLoginTime($admin['id'], $time); $admin['login_time'] = $time; $token = (string)Str::uuid(); $model->setToken($admin['id'], $token); $admin['token'] = $token; $model->setInfo($admin); $data = [ 'token' => $token ]; return success($data); } /** * 获取管理员信息及权限 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function info(Request $request) { $admin = $request->get('admin'); $permission = (new AdminModel())->getPermission($admin['id']); $data = [ 'name' => $admin['name'], 'permissions' => $permission, 'avatar' => config('app.url') . '/img/portrait.jpg', 'introduction' => $admin['id'] == 1 ? '我是一个超级管理员' : '我是一个普通管理员' ]; return success($data); } /** * 获取管理员列表 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function lists(Request $request) { $status = $request->input('status', 0); $username = $request->input('username', ''); $name = $request->input('name', ''); $sort = $request->input('sort', ''); $field = array( 'id', 'name', 'phone', 'username', 'status', 'login_time', 'created_at' ); $model = AdminModel::select($field); if ($status) { $model->where('status', $status); } if ($username) { $model->where('username', 'like', "%$username%"); } if ($name) { $model->where('name', 'like', "%$name%"); } if ($sort == '+id') { $sort = 'asc'; } else { $sort = 'desc'; } $model->orderBy('id', $sort); $data = BaseModel::page($model); foreach ($data['items'] as &$item) { $item['login_time'] = format_time($item['login_time']); $item['role'] = (new AdminRoleModel())->getRoleName($item['id']); } return success($data); } /** * 新增管理员 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function add(AdminPostRequest $request) { $username = $request->input('username', ''); $phone = (string)$request->input('phone', ''); $password = $request->input('password', ''); $name = $request->input('name', ''); $status = $request->input('status', 1); $role = (array)$request->input('role', []); $date = format_time(time()); $model = new AdminModel(); $model->username = $username; $model->name = $name; $model->phone = $phone; $model->password = md5(md5($password) . $date); $model->status = $status; $model->created_at = $date; if ($model->save()) { (new AdminRoleModel())->updateAdminRole($model->id, $role); return success(); } else { return error(); } } /** * 获取用户详情 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function details(Request $request) { $id = $request->input('id', 0); $field = array( 'id', 'username', 'name', 'phone', 'password', 'status' ); $data = AdminModel::where('id', $id)->select($field)->first()->toArray(); $data['role'] = (new AdminRoleModel())->getRoleId($data['id']); return success(['item' => $data]); } /** * 编辑管理员 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function edit(AdminPostRequest $request) { $id = $request->input('id', 0); if ($id === 1) { return error('超级管理员不可编辑'); } $username = $request->input('username', ''); $phone = (string)$request->input('phone', ''); $password = $request->input('password', ''); $name = $request->input('name', ''); $status = $request->input('status', 1); $role = (array)$request->input('role', []); $model = AdminModel::where('id', $id)->first(); if (empty($model)) { return error('用户不存在'); } $model->username = $username; $model->phone = $phone; $model->name = $name; if ($password !== $model->password) { $createdAt = $model->created_at->format('Y-m-d H:i:s'); $model->password = md5(md5($password) . $createdAt); } $model->status = $status; if ($model->save()) { //更新角色 (new AdminRoleModel())->updateAdminRole($model->id, $role); //清除已有权限 (new AdminModel())->delPermission($model->id); return success(); } else { return error(); } } /** * 删除管理员 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function del(Request $request) { $id = $request->input('id', 0); if ($id === 1) { return error('超级管理员不可删除'); } $model = AdminModel::where('id', $id)->first(); if (empty($model)) { return error('用户不存在'); } if ($model->delete()) { $admin = (new AdminModel())->getInfo($model->id); if ($admin && isset($admin['token'])) { //清除token (new AdminModel())->delToken($admin['token']); //清除缓存的管理员数据 (new AdminModel())->delInfo($model->id); } //清除已有权限 (new AdminModel())->delPermission($model->id); return success(); } else { return error(); } } /** * 修改密码 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function editPassword(Request $request) { $admin = $request->get('admin'); $password = $request->input('password', ''); $newPassword = $request->input('new_password', ''); $model = new AdminModel(); if ($model->verifyPassword($admin, $password) == false) { return error('原密码不正确'); } $model->editPassword($admin, $newPassword); $this->logout($request); return success(); } /** * 退出登录 * @param Request $request * @return \Illuminate\Http\JsonResponse */ public function logout(Request $request) { $token = $request->header('Token'); if ($token) { (new AdminModel())->delToken($token); } return success(); } } <file_sep><?php namespace Database\Seeders; use Illuminate\Database\Seeder; use Illuminate\Support\Facades\DB; class MenuTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $date = date('Y-m-d H:i:s'); DB::table('menu')->insert([ [ 'id' => 1, 'name' => '所有权限', 'api' => '*', 'parent_id' => 0, 'is_subordinate' => 2, 'sort' => 100, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 2, 'name' => '管理员管理', 'api' => '', 'parent_id' => 0, 'is_subordinate' => 1, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 3, 'name' => '管理员列表', 'api' => 'admin/lists', 'parent_id' => 2, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 4, 'name' => '新增管理员', 'api' => 'admin/add', 'parent_id' => 2, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 5, 'name' => '编辑管理员', 'api' => 'admin/edit', 'parent_id' => 2, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 6, 'name' => '删除管理员', 'api' => 'admin/del', 'parent_id' => 2, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 7, 'name' => '角色管理', 'api' => '', 'parent_id' => 0, 'is_subordinate' => 1, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 8, 'name' => '角色列表', 'api' => 'role/lists', 'parent_id' => 7, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 9, 'name' => '新增角色', 'api' => 'role/add', 'parent_id' => 7, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 10, 'name' => '编辑角色', 'api' => 'role/edit', 'parent_id' => 7, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 11, 'name' => '删除角色', 'api' => 'role/del', 'parent_id' => 7, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 12, 'name' => '权限管理', 'api' => '', 'parent_id' => 0, 'is_subordinate' => 1, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 13, 'name' => '权限列表', 'api' => 'menu/lists', 'parent_id' => 12, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 14, 'name' => '新增权限', 'api' => 'menu/add', 'parent_id' => 12, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 15, 'name' => '编辑权限', 'api' => 'menu/edit', 'parent_id' => 12, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 16, 'name' => '删除权限', 'api' => 'menu/del', 'parent_id' => 12, 'is_subordinate' => 2, 'sort' => 50, 'created_at' => $date, 'updated_at' => $date ] ]); } } <file_sep># vue-element-admin-laravel8 基于服务端PHP 【laravel8.4】框架 与【vue-element-admin】框架搭建的角色权限管理基础系统。 #安装运行页面效果 ![image](image/login.png) #laravel安装 1.重命名.env.example文件为.env 2.编辑.env中的数据库配置及redis配置 3.命令行运行:composer install 4.命令行运行:php artisan key:generate 5.命令行运行:php artisan migrate 6.命令行运行:php artisan db:seed #vue安装 1.命令行运行:npm install 2.配置.env开头的文件内的VUE_APP_BASE_API=自己服务端域名 3.命令行运行:npm run dev<file_sep><?php namespace App\Jobs; use App\Models\LogsModel; use Illuminate\Bus\Queueable; use Illuminate\Contracts\Queue\ShouldQueue; use Illuminate\Foundation\Bus\Dispatchable; use Illuminate\Queue\InteractsWithQueue; use Illuminate\Queue\SerializesModels; class LogsJob implements ShouldQueue { use Dispatchable, InteractsWithQueue, Queueable, SerializesModels; // 待插入数据 public $insertData = []; /** * Create a new job instance. * * @return void */ public function __construct($insertData) { $this->insertData = $insertData; } /** * Execute the job. * * @return void */ public function handle() { $model = new LogsModel(); foreach ($this->insertData as $field => $value) { $model->{$field} = $value; } $model->save(); } } <file_sep><?php namespace App\Http\Requests\Manage; use Illuminate\Foundation\Http\FormRequest; use App\Http\Requests\CommonRequest; use Illuminate\Http\Request; use Illuminate\Validation\Rule; use App\Rules\MobilePhone; class AdminPostRequest extends FormRequest { use CommonRequest; /** * Determine if the user is authorized to make this request. * * @return bool */ public function authorize() { return true; } public function rules(Request $request) { $id = $request->input('id', 0); $rule = [ 'username' => ['required', 'string', Rule::unique('admin')->ignore($id)], 'name' => ['required', 'string'], 'password' => ['required', 'string'] ]; $phone = $request->input('phone', ''); if ($phone != '') { $rule['phone'] = [new MobilePhone]; } return $rule; } public function attributes() { return [ 'username' => '用户名', 'name' => '姓名', 'phone' => '手机号' ]; } } <file_sep><?php namespace App\Http\Middleware\Manage; use App\Models\AdminModel; use Closure; class Auth { /** * 验证是否登录,并且将用户信息存储在request中 * @param $request * @param Closure $next * @return \Illuminate\Http\JsonResponse|mixed */ public function handle($request, Closure $next) { $token = $request->header('Token'); $token = $token ?: $request->input('token'); if (empty($token)) { return error(40000, '令牌不正确,请重新登录', [], 401); } $admin = (new AdminModel())->getRowByToken($token); if (empty($admin)) { return error(40000, '令牌不正确,请重新登录', [], 401); } if ($admin['status'] != 1) { return error('用户已禁用,请联系管理员!'); } $request->attributes->add(['admin' => $admin, 'token'=>$token]); return $next($request); } } <file_sep><?php if (!function_exists('response_json')) { /** * 返回json数据 * @param int $status * @param string $msg * @param array $data * @param int $code * @return \Illuminate\Http\JsonResponse */ function response_json($status = 0, $msg = '', $data = [], $code = 200) { $response = [ 'status' => $status, 'msg' => $msg, 'data' => $data ]; return response()->json($response, $code); } } if (!function_exists('success')) { /** * 成功 * @param int $status * @param string $msg * @param array $data * @param int $code * @return \Illuminate\Http\JsonResponse */ function success($status = 0, $msg = 'ok', $data = [], $code = 200) { if (is_array($status)) { $data = $status; $status = 0; } return response_json($status, $msg, $data, $code); } } if (!function_exists('error')) { /** * 失败 * @param int $status * @param string $msg * @param array $data * @param int $code * @return \Illuminate\Http\JsonResponse */ function error($status = 40000, $msg = 'error', $data = [], $code = 200) { if (!is_integer($status)) { if (is_array($status)) { $data = $status; $status = 40000; } if (is_string($status)) { $msg = $status; $status = 40000; } } return response_json($status, $msg, $data, $code); } } if (!function_exists('format_time')) { /** * 转换时间戳 * @param int $time * @return string */ function format_time($time) { if ($time) { return date('Y-m-d H:i:s', $time); } return ''; } } if (!function_exists('format_day')) { /** * 转换时间戳 * @param int $time * @return string */ function format_day($time) { if ($time) { return date('Y-m-d', $time); } return ''; } } <file_sep><?php namespace Database\Seeders; use Illuminate\Database\Seeder; use Illuminate\Support\Facades\DB; class AdminTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $date = date('Y-m-d H:i:s'); DB::table('admin')->insert( [ [ 'id' => 1, 'name' => 'Admin', 'username' => 'admin', 'password' => md5(md5('<PASSWORD>') . $<PASSWORD>), 'phone' => '', 'login_time' => 0, 'status' => 1, 'created_at' => $date, 'updated_at' => $date ], [ 'id' => 2, 'name' => '人事主管', 'username' => 'renshi', 'password' => md5(md5('<PASSWORD>') . $<PASSWORD>), 'phone' => '', 'login_time' => 0, 'status' => 1, 'created_at' => $date, 'updated_at' => $date ] ] ); } } <file_sep><?php namespace App\Http\Middleware\Manage; use App\Jobs\LogsJob; use Closure; class Log { /** * 记录操作日志 * @param $request * @param Closure $next * @param String $info * @return \Illuminate\Http\JsonResponse|mixed */ public function handle($request, Closure $next, $info) { // 数组键请参考logs表 $logData = [ 'path' => $request->url(), 'info' => $info, 'ip' => $request->ip(), 'request' => json_encode($request->all()), 'response' => '', 'token' => $request->get('token'), 'admin_id' => $request->get('admin')['id'], 'request_time' => time() ]; $response = $next($request); $contentType = $response->headers->get('content-type'); if (in_array($contentType, ['application/json'])) { $logData['response'] = $response->getContent(); } $logJobNew = true; if($logJobNew){ LogsJob::dispatchNow($logData); //同步队列,写入接口响应速度不如异步队列 }else{ LogsJob::dispatch($logData); //异步队列,需要自己使用Supervisor创建监听进程 } return $response; } } <file_sep><?php namespace App\Models; class AdminRoleModel extends BaseModel { //表名 protected $table = 'admin_role'; /** * 获取用户角色名称 * @param $adminId * @return array */ public function getRoleName($adminId) { $names = $this->join('role', 'admin_role.role_id', '=', 'role.id') ->where('admin_role.admin_id', $adminId) ->pluck('role.name'); return $names ? $names->toArray() : []; } /** * 获取用户角色ID * @param $adminId * @return array */ public function getRoleId($adminId) { $ids = $this->join('role', 'admin_role.role_id', '=', 'role.id') ->where('admin_role.admin_id', $adminId) ->pluck('role.id'); return $ids ? $ids->toArray() : []; } /** * 更新用户角色 * @param $adminId * @param $roleIds */ public function updateAdminRole($adminId, $roleIds) { $this->where('admin_id', $adminId)->delete(); foreach ($roleIds as $roleId) { $model = new self(); $model->admin_id = $adminId; $model->role_id = $roleId; $model->save(); } } } <file_sep><?php namespace App\Models; class MenuModel extends BaseModel { //表名 protected $table = 'menu'; // 存储列表数据 public $listData = []; /** * 获取后台菜单列表 * @param int $parentId * @param int $level * @return $this */ public function getList($parentId = 0, $level = 1) { $field = array( 'id', 'name', 'api', 'sort', 'is_subordinate', 'created_at', ); $data = $this->where('parent_id', $parentId)->select($field) ->orderBy('sort', 'desc') ->orderBy('id', 'asc') ->get()->toArray(); foreach ($data as $item) { $item['level'] = $level; $this->listData[] = $item; if ($item['is_subordinate'] == 1) { $this->getList($item['id'], $level + 1); } } if ($parentId == 0) { return $this->listData; } } /** * 查询菜单tree * @param int $parentId * @return mixed */ public function getTree($parentId = 0) { $field = array( 'id', 'name', 'is_subordinate' ); $data = $this->where('parent_id', $parentId)->select($field) ->orderBy('sort', 'desc') ->orderBy('id', 'asc') ->get()->toArray(); foreach ($data as &$item) { if ($item['is_subordinate'] == 1) { $item['children'] = $this->getTree($item['id']); } } return $data; } /** * 递归查找所有的上级id * @param $id * @return array */ public function parentIdAll($id) { $data = []; $parentId = $this->where('id', $id)->value('parent_id'); if ($parentId > 0) { $data = array_merge($data, $this->parentIdAll($parentId)); } $data[] = $id; return $data; } /** * 查找Vue的tree节点所有id * @param $ids * @return array */ public function findAllNode($ids) { $idArr = []; foreach ($ids as $id) { $idArr = array_merge($idArr, $this->parentIdAll($id)); } return array_values(array_unique($idArr)); } /** * 去除节点ID * @param $ids * @return array */ public function deleteAllNode($ids) { $data = $this->whereIn('id', $ids)->where('is_subordinate', 2)->pluck('id'); return $data ? $data->toArray() : []; } }
5f821eb9e35d42eae6b2ad3d1c3de3b14397b910
[ "JavaScript", "Markdown", "PHP" ]
31
PHP
huihuilang/vue-element-admin-laravel8
272cabfe0189fbddcd1c31eacc7c9cea788c2dd3
d8ff73641cbb4d331d1aa4be375aeb4a65465c3d
refs/heads/master
<repo_name>lucaspena/Evicted<file_sep>/nonsite/todo.txt larger area for latlng queries? change buttons in business table (bottom right) change text color/layout in business table (bottom right)? indices README Documentation Group Picture/Description for Aadi's blog <file_sep>/changestats.php <?php $con = mysqli_connect("fling.seas.upenn.edu","tmandel","abc123","tmandel"); $name = $_POST["username"]; $b = $_POST["b"]; $c = $_POST["c"]; // get stats $gets = "SELECT hunger, health, comfort, satisfaction, energy, money, social FROM Users WHERE username='$name'"; ($stats = mysqli_query($con, $gets)) or die("BAD USERNAME AHHHHHH"); $values = mysqli_fetch_row($stats); // get reviews $getr = "SELECT text, rating FROM Review WHERE businessid='$b' ORDER BY rand() LIMIT 0, 5"; ($rtext = mysqli_query($con, $getr)) or die("NO REVIEWS"); $r1 = mysqli_fetch_row($rtext); $r2 = mysqli_fetch_row($rtext); $r3 = mysqli_fetch_row($rtext); $r4 = mysqli_fetch_row($rtext); $r5 = mysqli_fetch_row($rtext); // get current new business information $geta = "SELECT rating, businessname, reviewCount FROM Business WHERE businessid='$b'"; ($atext = mysqli_query($con, $geta)) or die("THERE IS NO RATING"); $rating = mysqli_fetch_row($atext); // combine reviews for efficiency $allrevs = $r1[0] . " " . $r2[0] . " " . $r3[0] . " " . $r4[0] . " " . $r5[0]; $arr = json_decode(exec("python fullparse.py \"$allrevs\"")); $hunger = $values[0]; $health = $values[1]; $comfort = $values[2]; $satis = $values[3]; $fatig = $values[4]; $money = $values[5]; $social = $values[6]; // default update $update = "SELECT hunger FROM Users WHERE username='$name'"; // default check $check = true; //hunger if ($c == "hun") { $satis = $satis + $arr->{'food'} + (($rating[0] - 50) / 2); $satis = min($satis, 100); $price = ($arr->{'price'}) * rand(6, 10); $hunger = $hunger - rand(50, 80); $hunger = max($hunger, 0); if ($hunger > 20 && $hunger < 35) { $fatig = max($fatig - rand(1, 2), 0); } $money = $money - $price; $check = $money >= 0; $health = $arr->{'health'} + $health; $health = max(0, min($health, 100)); // medical } else if ($c == "med") { $health2 = min (($r1[1] + $r2[1] + $r3[1] + $r4[1] + $r5[1]) / 10 + $health, 100); $price = ($arr->{'price'} + 2) * rand(6, 10); $money = $money - $price; $check = $money >= 0 && $health < 100; $health = $health2; // social } else if ($c == "soc") { $social = min (($r1[1] + $r2[1] + $r3[1] + $r4[1] + $r5[1]) / 10 + $social, 100); $price = ($arr->{'price'} + 1) * rand(5, 10); $money = $money - $price; $health = $health - rand(3, 6); $health = max(0, $health); $fatig = $fatig + rand(4, 6); $fatig = min(100, $fatig); $check = $money >= 0; // satisfaction } else if ($c == "art") { $satis = $arr->{'satis'} + $satis + ($rating[0] / 2); $satis = min($satis, 100); $price = ($arr->{'price'} + 1) * rand(3, 5); $money = $money - $price; $fatig = $fatig + rand(1, 3); $fatig = min($fatig, 100); $check = $money >= 0; // fatigue } else if ($c == "str") { $price = 0; if (stristr($rating[1], "gym") || stristr($rating[1], "fit") || stristr($rating[1], "train") || stristr($rating[1], "club") || stristr($rating[1], "sport") || stristr($rating[1], "studio")) { $price = ($arr->{'price'} + 1) * rand(1, 2); } $money = $money - $price; $check = $money >= 0; $satis = $arr->{'satis'}; $health = min (($r1[1] + $r2[1] + $r3[1] + $r4[1] + $r5[1]) / 60 + $health, 100); $fatig = min ($fatig + rand(25, 40), 100); // rest } else if ($c == "slp") { $satisf = $arr->{'satis'}; $fatig = max($fatig - ($satisf + rand(50, 80)), 0); $price = ($arr->{'price'} + 1) * rand(15, 24); $money = $money - $price; $hunger = $hunger + rand(25, 40); $check = $money >= 0; // work } else if ($c == "wor") { $price = $arr->{'price'}; $satisf = $arr->{'satis'}; $parse = $arr->{'food'}; $money = $price + $satisf + $parse + $money + rand(15, 25) + $rating[2] / 2; $satis = max($satis + $satisf - rand(35, 50), 0); $social = max($social - max(40 - $rating[2], 10), 0); $fatig = min($fatig + $rating[2] / 5 + rand(25, 30), 100); } $health = $health - 1; // if changing if ($check) { if ($c != "hun") { $hunger = $hunger + rand(5,10); if ($c == "wor") { $hunger = $hunger + rand(10, 20); } if ($hunger > 80) { $health = max ($health - (($hunger * 1.5) / 20), 0); } if ($fatig > 80) { $health = max($health - (5 - ((100 - $fatig) / 5)), 0); } } $hunger = min ($hunger, 100); $update = "UPDATE Users SET hunger=$hunger, health=$health, comfort=$comfort, satisfaction=$satis, energy=$fatig, money=$money, social=$social WHERE username='$name'"; mysqli_query($con, $update) or die("DEAD"); } // death if ($check && $health <= 0) { mysqli_query($con, "DELETE FROM Users WHERE username='$name'") or die("YOU DIED!"); mysqli_query($con, "DELETE FROM Authentication WHERE username='$name'") or die ("YOU ARE DEAD"); mysqli_close($con); die("dead"); } mysqli_close($con); ?> <file_sep>/game.php <!DOCTYPE html> <title>Evicted</title> <?php session_start(); if (!isset($_SESSION['username'])) { header("location:index.php"); } $name = $_SESSION['username']; ?> <html> <head> <?php $c = mysqli_connect("fling.seas.upenn.edu","tmandel","abc123","tmandel"); ?> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <link rel="stylesheet" type="text/css" href="styles.css" /> <meta name="viewport" content="initial-scale=1.0, user-scalable=no" /> <script type="text/javascript" src="https://maps.googleapis.com/maps/api/js?key=<KEY>&sensor=true"> </script> <script type="text/javascript"> // initialize Google Maps function initialize(arr) { var point = new google.maps.LatLng(arr[0], arr[1]); var mapOptions = { center: point, zoom: 14, mapTypeId: google.maps.MapTypeId.ROADMAP }; var map; map = new google.maps.Map(document.getElementById("map-canvas"), mapOptions); var marker = new google.maps.Marker({ //icon: 'http://maps.google.com/mapfiles/ms/icons/green-dot.png', position: new google.maps.LatLng(arr[0], arr[1]), map: map, title: 'Current Location' }); for (var i = 2; i < arr.length; i += 4) { var myLatLng = new google.maps.LatLng(arr[i], arr[i + 1]); var marker = new google.maps.Marker({ icon: 'http://www.google.com/intl/en_ALL/mapfiles/marker_green'+String.fromCharCode((i - 2)/4 + 65)+'.png', position: myLatLng, map: map, title: arr[i + 2] }); } } google.maps.event.addDomListener(window, 'load', initialize); // fill table function repopulate(uname, tableID, arr, rain, feels, wind) { try { var table = document.getElementById(tableID); var rowCount = table.rows.length; for (var i = 1; i < rowCount; i++) { console.log(rowCount); table.deleteRow(1); } } catch (e) { alert(e); } for (var i = 2; i < arr.length; i += 4) { var row = table.insertRow((i - 2) / 4 + 1); var cell = row.insertCell(0); cell.innerHTML = "<button class='suggestions' onclick='updateLocation(\"" + uname + "\", " + arr[i] + ", " + arr[i+1] + ", \"" + arr[i+3] + "\", " + rain + ", " + feels + ", " + wind + "); return false;'>"+ String.fromCharCode((i - 2)/4 + 65) + " : " + arr[i+2] + "</button>"; } } // reload page after timeout function delayedRedirect() { location.reload(); } //Updates location, reinitializes, and reloads page function updateLocation(uname, newx, newy, bid, rain, feels, wind){ $.post('update.php', { username: uname, b: bid, r: rain, t: feels, w: wind }).done(function(data) { if (data === "dead") { window.location = "death.php"; } else { initialize([newx, newy]); setTimeout(delayedRedirect, 1000); } }, "json"); } //Updates user stats function updateStats(uname, bid, cat){ $.post('changestats.php', { username: uname, b: bid, c: cat }).done(function(data) { if (data === "dead") { window.location = "death.php"; } else { setTimeout(delayedRedirect, 1000); } }, "json"); } </script> </head> <?php // death mysqli_query($c, "SELECT busid FROM Users WHERE username='$name'") or die("<h2>GAME OVER</h2><p>You have died. Create a new account if you would like to try again.</p>"); // user stats $get_coords = "SELECT xcoord, ycoord, city, state FROM Users, Business WHERE username = '$name' AND busid = businessid"; ($coord_res = mysqli_query($c, $get_coords)) or die("Can't get coordinates"); $row = mysqli_fetch_row($coord_res); $xco = $row[0]; $yco = $row[1]; // initialize points for Google Maps $p_pts = array($xco, $yco); $j_pts = json_encode($p_pts); $phun_pts = array($xco, $yco); $jhun_pts = json_encode($phun_pts); $pmed_pts = array($xco, $yco); $jmed_pts = json_encode($pmed_pts); $pcom_pts = array($xco, $yco); $jcom_pts = json_encode($pcom_pts); $psoc_pts = array($xco, $yco); $jsoc_pts = json_encode($psoc_pts); $part_pts = array($xco, $yco); $jart_pts = json_encode($part_pts); $pstr_pts = array($xco, $yco); $jstr_pts = json_encode($pstr_pts); $pmov_pts = array($xco, $yco); $jmov_pts = json_encode($pmov_pts); $pslp_pts = array($xco, $yco); $jslp_pts = json_encode($pslp_pts); echo "<body onload='initialize(" . $j_pts . ")'>"; // Weather $city = str_replace(" ", "_", $row[2]); $command = "python weather.py $city $row[3]"; $location = exec($command); // TATES API KEY: <KEY> $json_string = file_get_contents("http://api.wunderground.com/api/8535d5a9b64d9c3d/geolookup/conditions/q/" . $location . ".json"); $parsed_json = json_decode($json_string); $temp_f = $parsed_json->{'current_observation'}->{'temp_f'}; $rain = $parsed_json->{'current_observation'}->{'precip_today_in'}; $wind = $parsed_json->{'current_observation'}->{'wind_mph'}; $feelslike = $parsed_json->{'current_observation'}->{'feelslike_f'}; $stats = "SELECT * FROM Users WHERE username = '$name'"; ($rstats = mysqli_query($c, $stats)) or die("OOPS, Something went wrong!"); $row = mysqli_fetch_row($rstats); // get options $hun = "SELECT xcoord, ycoord, businessname, businessid FROM Business WHERE xcoord <= $xco + 0.05 AND xcoord >= $xco - 0.05 AND " . "ycoord <= $yco + 0.05 AND xcoord >= $yco - 0.05 AND category = 'hun' AND businessid <> '$row[10]' ORDER BY rand() LIMIT 0,5"; $med = "SELECT xcoord, ycoord, businessname, businessid FROM Business WHERE xcoord <= $xco + 0.05 AND xcoord >= $xco - 0.05 AND " . "ycoord <= $yco + 0.05 AND xcoord >= $yco - 0.05 AND category = 'med' AND businessid <> '$row[10]' ORDER BY rand() LIMIT 0,5"; $soc = "SELECT xcoord, ycoord, businessname, businessid FROM Business WHERE xcoord <= $xco + 0.05 AND xcoord >= $xco - 0.05 AND " . "ycoord <= $yco + 0.05 AND xcoord >= $yco - 0.05 AND category = 'soc' AND businessid <> '$row[10]' ORDER BY rand() LIMIT 0,5"; $art = "SELECT xcoord, ycoord, businessname, businessid FROM Business WHERE xcoord <= $xco + 0.05 AND xcoord >= $xco - 0.05 AND " . "ycoord <= $yco + 0.05 AND xcoord >= $yco - 0.05 AND category = 'art' AND businessid <> '$row[10]' ORDER BY rand() LIMIT 0,5"; $str = "SELECT xcoord, ycoord, businessname, businessid FROM Business WHERE xcoord <= $xco + 0.05 AND xcoord >= $xco - 0.05 AND " . "ycoord <= $yco + 0.05 AND xcoord >= $yco - 0.05 AND category = 'str' AND businessid <> '$row[10]' ORDER BY rand() LIMIT 0,5"; $mov = "SELECT distinct (city), xcoord, ycoord, state, businessid FROM Business WHERE category <> 'oth' AND city <> '$row[2]' GROUP BY city ORDER BY rand() LIMIT 0,5"; $slp = "SELECT xcoord, ycoord, businessname, businessid FROM Business WHERE xcoord <= $xco + 0.05 AND xcoord >= $xco - 0.05 AND " . "ycoord <= $yco + 0.05 AND xcoord >= $yco - 0.05 AND category = 'slp' AND businessid <> '$row[10]' ORDER BY rand() LIMIT 0,5"; ($rhun = mysqli_query($c, $hun)) or die("Can't get the 5 businesses-hun"); while($row1 = mysqli_fetch_row($rhun)){ array_push($phun_pts, $row1[0], $row1[1], htmlspecialchars($row1[2], ENT_QUOTES), $row1[3]); $jhun_pts = json_encode($phun_pts); } ($rmed = mysqli_query($c, $med)) or die("Can't get the 5 businesses-med"); while($row1 = mysqli_fetch_row($rmed)){ array_push($pmed_pts, $row1[0], $row1[1], htmlspecialchars($row1[2], ENT_QUOTES), $row1[3]); $jmed_pts = json_encode($pmed_pts); } ($rsoc = mysqli_query($c, $soc)) or die("Can't get the 5 businesses-soc"); while($row1 = mysqli_fetch_row($rsoc)){ array_push($psoc_pts, $row1[0], $row1[1], htmlspecialchars($row1[2], ENT_QUOTES), $row1[3]); $jsoc_pts = json_encode($psoc_pts); } ($rart = mysqli_query($c, $art)) or die("Can't get the 5 businesses-art"); while($row1 = mysqli_fetch_row($rart)){ array_push($part_pts, $row1[0], $row1[1], htmlspecialchars($row1[2], ENT_QUOTES), $row1[3]); $jart_pts = json_encode($part_pts); } ($rstr = mysqli_query($c, $str)) or die("Can't get the 5 businesses-str"); while($row1 = mysqli_fetch_row($rstr)){ array_push($pstr_pts, $row1[0], $row1[1], htmlspecialchars($row1[2], ENT_QUOTES), $row1[3]); $jstr_pts = json_encode($pstr_pts); } ($rmov = mysqli_query($c, $mov)) or die("Can't get the 5 businesses-mov"); while($row1 = mysqli_fetch_row($rmov)){ array_push($pmov_pts, $row1[1], $row1[2], $row1[0] . ", " . $row1[3], $row1[4]); $jmov_pts = json_encode($pmov_pts); } ($rslp = mysqli_query($c, $slp)) or die("Can't get the 5 businesses-mov"); while($row1 = mysqli_fetch_row($rslp)){ array_push($pslp_pts, $row1[0], $row1[1], htmlspecialchars($row1[2], ENT_QUOTES), $row1[3]); $jslp_pts = json_encode($pslp_pts); } // businessname for stat table $busid = "SELECT businessname FROM Business where businessid = '$row[10]'"; ($bn = mysqli_query($c, $busid)) or die("WE CAN'T FIND YOU"); $r = mysqli_fetch_row($bn); $four = 100 - $row[3]; $five = 100 - $row[4]; $six = 100 - $row[5]; $sev = 100 - $row[6]; $eight = 100 - $row[7]; $ten = 100 - $row[9]; // stat table echo "<div id='leftside'>"; echo "<img src='evictedlogo.png' height='50' style='float:left;'/><form action='logout.php' class='logout' ><input type='submit' class='submit' value='Logout'></form><br/>"; echo "<table id='left'><tr>" . "<td><div id='status'>" . "<table>" . "<tr><td><strong>Stats</strong></td><td/></tr>" . "<tr><td>Username</td><td>$row[0]</td></tr>" . "<tr><td>Name</td><td>$row[1] $row[2]</td></tr>" . "<tr><td>Money</td><td>$$row[8]</td></tr>" . "<tr><td>Hunger</td><td><img src='bar_red2.png' height=10px width=$row[3]px><img src='bar2.png' height=10px width=$four px> ($row[3])</td></tr>" . "<tr><td>Fatigue</td><td><img src='bar_red2.png' height=10px width=$row[7]px><img src='bar2.png' height=10px width=$eight px> ($row[7])</td></tr>" . "<tr><td>Health</td><td><img src='bar_grn.png' height=10px width=$row[4]px><img src='bar2.png' height=10px width=$five px> ($row[4])</td></tr>" . "<tr><td>Comfort</td><td><img src='bar_grn.png' height=10px width=$row[5]px><img src='bar2.png' height=10px width=$six px> ($row[5])</td></tr>" . "<tr><td>Satisfaction</td><td><img src='bar_grn.png' height=10px width=$row[6]px><img src='bar2.png' height=10px width=$sev px> ($row[6])</td></tr>" . "<tr><td>Social</td><td><img src='bar_grn.png' height=10px width=$row[9]px><img src='bar2.png' height=10px width=$ten px> ($row[9])</td></tr>" . "<tr><td>Temperature</td><td>$temp_f</td></tr>" . "</table><BR>"; echo "</div></td>"; echo "<td><div id='buttons'>"; // add rows to table echo ("<button class='options' onclick='initialize(" . $jhun_pts . "); repopulate(\"$name\", \"placesTable\", $jhun_pts, \"$rain\", \"$feelslike\", \"$wind\")'>Get Food</button><br/>"); echo ("<button class='options' onclick='initialize(" . $jmed_pts . "); repopulate(\"$name\", \"placesTable\", $jmed_pts, \"$rain\", \"$feelslike\", \"$wind\")'>Be Healthy</button><br/>"); echo ("<button class='options' onclick='initialize(" . $jsoc_pts . "); repopulate(\"$name\", \"placesTable\", $jsoc_pts, \"$rain\", \"$feelslike\", \"$wind\")'>Go Out</button><br/>"); echo ("<button class='options' onclick='initialize(" . $jart_pts . "); repopulate(\"$name\", \"placesTable\", $jart_pts, \"$rain\", \"$feelslike\", \"$wind\")'>See the Sights</button><br/>"); echo ("<button class='options' onclick='initialize(" . $jstr_pts . "); repopulate(\"$name\", \"placesTable\", $jstr_pts, \"$rain\", \"$feelslike\", \"$wind\")'>Work Out</button><br/>"); echo ("<button class='options' onclick='initialize(" . $jmov_pts . "); repopulate(\"$name\", \"placesTable\", $jmov_pts, \"$rain\", \"$feelslike\", \"$wind\")'>Move Away</button><br/>"); echo ("<button class='options' onclick='initialize(" . $jslp_pts . "); repopulate(\"$name\", \"placesTable\", $jslp_pts, \"$rain\", \"$feelslike\", \"$wind\")'>Go Rest</button><br/>"); echo "</div></td></tr></table>"; // table of choices echo "<div id='info'>" . "<table id='placesTable' width='350px'>" . "<tr><td><strong>Your Choices</strong></td></tr>" . "</table>" . "</div>" . "</div>"; // business info $currbus = "SELECT fullAddress, photourl, reviewCount, url, rating, category FROM Business WHERE businessid = '$row[10]'"; ($loca = mysqli_query($c, $currbus)) or die("YOUR LOCATION IS OUT OF BOUNDS"); $busrow = mysqli_fetch_row($loca); mysqli_close($c); $comma = "python category.py $busrow[5]"; $but = exec($comma); // business information echo "<div id='rightside'>"; echo "<div id='map-canvas'></div><br/>"; echo "<div id='currlocation'>" . "<table id='locinfo'>" . "<tr>" . "<td><img src='$busrow[1]'/></td>" . "<td>" . "<table id='infotable'>" . "<tr>&nbsp;&nbsp;&nbsp;&nbsp;Business Name: $r[0]</tr><br/>" . "<tr>&nbsp;&nbsp;&nbsp;&nbsp;Address: $busrow[0]</tr><br/>" . "<tr>&nbsp;&nbsp;&nbsp;&nbsp;Rating: $busrow[4] ($busrow[2] reviews)</tr><br/>" . "<tr>&nbsp;&nbsp;&nbsp;&nbsp;Business URL: <a href='$busrow[3]' target='_blank'>$busrow[3]</a></tr>" . "</table>" . "</td>" . "</tr>" . "<tr>" . "<td/><td>" . "<button class='options' onclick='updateStats(\"$name\", \"$row[10]\", \"$busrow[5]\")'>$but</button>&nbsp;&nbsp;&nbsp;" . "<button class='options' onclick='updateStats(\"$name\", \"$row[10]\", \"wor\")'>Work</button>" . "</td>" . "</tr>" . "</table>" . "</div></div>"; ?> </body> </html> <file_sep>/category.py #!/usr/bin/python import sys argList = sys.argv cat = argList[1] fixes = {"med" : "Get Well", "hun" : "Eat", "com" : "Relax", "soc" : "Interact", "art" : "Admire", "str" : "Exercise", "slp" : "Rest", "oth" : "Random"} print fixes[cat] <file_sep>/death.php <!DOCTYPE HTML> <div class="header"> <a href="index.php"><img src="evictedlogo.png"></a> </div> <head> <link rel="stylesheet" type="text/css" href="styles.css" /> </head> <title>Evicted</title> <body> <div class="wrapper"> <h2>GAME OVER</h2>You have died, so we deleted you. Try to stay alive next time. You can sign up again on the homepage.<BR/> </div> </body> </html> <?php session_start(); session_destroy(); ?> <file_sep>/nonsite/yelpjson/src/main/Business.java /* Business Object from Yelp */ package main; import java.util.ArrayList; public class Business { public String business_id = ""; public String full_address = ""; public ArrayList<String> schools; public String open = ""; public ArrayList<String> categories; public String photo_url = ""; public String city = ""; public int review_count; public String name = ""; public ArrayList<String> neighborhoods; public String url = ""; public double longitude; public String state = ""; public double stars; public double latitude; public String type = ""; public Business(String b, String a, String o, String p, String c, int r, String n, String u, double lon, String s, double st, double lat, String t) { if (b!=null) business_id = b; if (a!=null) full_address = a; if (o!=null) open = o; if (p!=null) photo_url = p; if (c!=null) city = c; review_count = r; if (n!=null) name = n; if (u!=null) url = u; longitude = lon; if (s!=null) state = s; stars = st; latitude = lat; if (t!=null) type = t; } } <file_sep>/checklogin.php <!DOCTYPE HTML> <?php include 'header.php' ?> <head> <link rel="stylesheet" type="text/css" href="styles.css" /> </head> <title>Evicted</title> <body> <div class="wrapper"> <h2>Logged In!</h2> <?php $user = $_POST["username"]; $pass = $_POST["password"]; $con = mysqli_connect("fling.seas.upenn.edu","tmandel","<PASSWORD>","t<PASSWORD>"); if (mysqli_connect_errno()) { echo "Failed to connect to MySQL: " . mysqli_connect_error(); } // check if exists $query = "SELECT * FROM Authentication WHERE username = '" . $user . "' AND password = '" . MD5($pass) . "'"; ($result = mysqli_query($con, $query)) or die("Couldn't execute query."); $count = mysqli_num_rows($result); if ($count == 1) { session_start(); $_SESSION['username'] = $user; mysqli_close($con); header("location:game.php"); } else { echo "WRONG"; mysqli_close($con); header("location:index.php"); } ?> <BR> </div> </body> </html> <file_sep>/createaccount.php <!DOCTYPE HTML> <?php include 'header.php' ?> <head> <link rel="stylesheet" type="text/css" href="styles.css" /> </head> <title>Evicted</title> <body> <div class="wrapper"> <?php $user = $_POST["username"]; $pass1 = $_POST["pass1"]; $pass2 = $_POST["pass2"]; if ($pass1 != $pass2) { echo "<h2>Password Mismatch!</h2>"; echo "Did you want to have to keep track of two passwords?"; } else if (strlen ($pass1) < 6) { echo "<h2>Your password must be at least 6 characters long!</h2>"; echo "You should probably try again..."; } else if (strlen ($user) < 4 ) { echo "<h2>Your username must be at least 4 characters long!</h2>"; echo "You probably want to fix that..."; } else { $fname = $_POST["fname"]; $lname = $_POST["lname"]; $location = $_POST["location"]; $xcoord = 0; $ycoord = 0; $b = "A"; if ($location == "ann") { $b = "OoCzKO4aeGr6Kq39m5Hcjg"; } else if ($location == "cambridge") { $b = "FLF1GSMzylSaHM6iDHnkbw"; } else if ($location == "ny") { $b = "fco4qToL2DhnVamv0yaW4w"; } else if ($location == "seattle") { $b = "XYcht_y51ZsZdUJgam0e3A"; } else if ($location == "la") { $b = "KXjkZVuH001Fo2FYXmjGfw"; } else { $b = "bjkA3ustWdg3EsTLRiUNKA"; } $hunger = 10; $health = 90; $comfort = 30; $satisfaction = 30; $energy = 50; $social = 50; $money = 100; $con = mysqli_connect("fling.seas.upenn.edu","tmandel","abc123","tmandel"); if (mysqli_connect_errno()) { echo "Failed to connect to MySQL: " . mysqli_connect_error(); } $q = "INSERT INTO Authentication VALUES ('" . $user . "','" . MD5($pass1) . "')"; ($r = mysqli_query($con, $q)) or die("<h2>Username Taken!</h2>Try again."); $qu = "INSERT INTO Users VALUES ('" . $user . "','" . $fname . "','" . $lname . "'," . $hunger . "," . $health . "," . $comfort . "," . $satisfaction . "," . $energy . "," . $money . "," . $social . ",'" . $b . "')"; ($ru = mysqli_query($con, $qu)) or die("User doesn't exist."); session_start(); $_SESSION['username'] = $user; mysqli_close($con); header("location:game.php"); } ?> <BR> </div> </body> </html> <file_sep>/fullparse.py #!/usr/bin/python import sys argList = sys.argv text = argList[1] # food goodf = ["awesome", "good", "great", "smile", "free", "happy", "pleasantly", "pleasant", "helpful", "sweet", "yummy", "amazing", "fresh", "friendly", "excellent", "best", "better", "fantastic", "wonderful", "extraordinary", "fast", "fastest"] badf = ["upset", "bad", "horrible", "indigestion", "sick", "dirty", "gross", "nasty", "unpleasant", "greasy", "oily", "closed", "worst", "hate", "hated"] # price cheap = ["cheap", "inexpensive", "affordable", "thrifty", "economical", "bargain", "budget"] exp = ["expensive", "pricey", "costly", "fancy", "extravagent", "overpriced"] # satisfaction goods = ["awesome", "good", "great", "smile", "free", "happy", "pleasantly", "pleasant", "helpful", "sweet", "rare", "unique", "amazing", "fresh", "friendly", "excellent", "best", "better", "fantastic", "wonderful", "extraordinary", "clean", "nice", "respectful", "festive", "entertaining", "entertainment", "fun", "funny", "cool"] bads = ["upset", "bad", "horrible", "dirty", "unpleasant", "closed", "boring", "lame", "stupid", "dumb", "annoying", "frustrating", "noisy", "unhelpful", "uncomfortable", "rude", "worse", "racist", "worst", "hate", "hated"] # health healthy = ["fresh", "vegetable", "vegetables", "healthy", "fruit", "fruits", "fruity", "protein", "health"] unhealthy = ["oily", "greasy", "fatty", "fat", "grease", "oil", "oils", "fats", "carbs", "lard", "rats", "bugs", "rat", "poison", "bug", "poisoning", "sickness", "illness", "sick", "ill", "indigestion", "ache", "heartburn", "death", "dead", "attack", "stroke", "artery", "arteries"] food = 0 price = 0 price1 = 0 price2 = 0 satis = 0 health = 0 health1 = 0 health2 = 0 text = text.split() for word in text: if word.lower() in goodf: food = food + 1 elif word.lower() in badf: food = food - 1 if word.lower() in cheap: price1 = price1 + 1 elif word.lower() in exp: price2 = price2 + 1 if word.lower() in goods: satis = satis + 1 elif word.lower() in bads: satis = satis - 1 if word.lower() in healthy: health1 = health1 + 1 elif word.lower() in unhealthy: health2 = health2 + 1 if price1 > price2: price = 1 elif price1 < price2: price = 3 else: price = 2 health = health1 - health2 # json print "{ \"food\" :", food, ", \"price\" :", price, ", \"satis\" :", satis, ", \"health\" :", health, "}" <file_sep>/update.php <?php $c = mysqli_connect("fling.seas.upenn.edu","tmandel","abc123","tmandel"); $name = $_POST["username"]; $b = $_POST["b"]; $t = $_POST["t"]; $w = $_POST["w"]; $r = $_POST["r"]; $check = true; // business and user information $old = "SELECT xcoord, ycoord, hunger, health, city, money, comfort, satisfaction, social, energy FROM Business, Users WHERE username='$name' AND busid=businessid"; ($oldloc = mysqli_query($c, $old)) or die("CAN'T UPDATE"); $oldcoords = mysqli_fetch_row($oldloc); // business information $new = "SELECT xcoord, ycoord, city FROM Business WHERE businessid='$b'"; ($newloc = mysqli_query($c, $new)) or die("CAN'T UPDATE"); $newcoords = mysqli_fetch_row($newloc); $dist = sqrt(pow($newcoords[0] - $oldcoords[0], 2) + pow($newcoords[1] - $oldcoords[1], 2)) * 300; $hunger = $oldcoords[2]; // for moving if ($dist > 80) { $hunger = max(min(max($hunger + (($dist * 1.5) / 20) - rand(100, 110), $hunger), 100), 0); } $hunger = min($hunger + rand(8, 11), 100); $satis = $oldcoords[7]; $soc = $oldcoords[8]; $fatig = $oldcoords[9]; $comfort = $oldcoords[6]; // weather if ($w > 15) { $comfort = $comfort - ($w - 15); } if ($t < 45.0) { $comfort = $comfort - (45 - $t); } else if ($t > 90.0) { $comfort = $comfort - ($t - 90); } else { $comfort = $comfort + rand(2, 4); } if ($r > 0.0) { $comfort = $comfort - ceil($r * 3); } $comfort = min(100, max(0, $comfort)); $health = $oldcoords[3]; // for moving if ($dist > 80) { $fatig = max(min(max($fatig + (($dist * 1.5) / 20) - rand(100, 110), $fatig), 100), 0); } $fatig = min($fatig + rand(4, 7), 100); $money = $oldcoords[5]; if ($oldcoords[4] != $newcoords[2]) { $money = $money - ($dist / 50 + rand(4, 8)); $check = $money >= 0; } // if query is to execute if ($check) { $health = max ($health - 1, 0); if ($hunger > 80) { $health = max ($health - (($hunger * 1.5) / 20), 0); } if ($comfort == 0) { $health = max ($health - 1, 0); } if ($satis < 5) { $health = max ($health - 1, 0); } if ($satis > 95) { $health = min ($health + 1, 100); } if ($soc < 5) { $health = max ($health - 1, 0); } if ($soc > 95) { $health = min ($health + 1, 100); } $update = "UPDATE Users SET busid='$b', money=$money, hunger=$hunger, health=$health, comfort=$comfort, energy=$fatig WHERE username='$name'"; ($rupdate = mysqli_query($c, $update)) or die("Location Not Updated!"); } // death if ($check && $health == 0) { mysqli_query($c, "DELETE FROM Users WHERE username='$name'") or die("dead"); mysqli_query($c, "DELETE FROM Authentication WHERE username='$name'") or die ("dead"); mysqli_close($c); die("dead"); } mysqli_close($c); ?> <file_sep>/nonsite/yelpjson/src/main/Main.java /* * This loads the Yelp data into the database, along with a category we assigned * to each business. Tables are created previously in the mySQL command line. */ package main; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStreamReader; import java.sql.Connection; import java.sql.DriverManager; import java.sql.Statement; import java.util.ArrayList; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; public class Main { public static void main(String[] args) throws Exception { // Initialize a gson variable Gson gson = new Gson(); ArrayList<Business> businesses = new ArrayList<Business>(); ArrayList<Review> reviews = new ArrayList<Review>(); // Decodes dataset - we only use the Business and Review tables File file = new File("yelp_academic_dataset.json"); FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); BufferedReader d = new BufferedReader(new InputStreamReader(bis)); String line = ""; while ((line = d.readLine()) != null) { try { JsonObject o = new JsonParser().parse(line).getAsJsonObject(); String s = o.get("type").getAsString(); if (s.equals("review")) { reviews.add(gson.fromJson(o, Review.class)); } else if (s.equals("business")) { businesses.add(gson.fromJson(o, Business.class)); } } catch (Exception e) { e.printStackTrace(); System.out.println("Invalid data format."); System.exit(0); } } fis.close(); bis.close(); d.close(); // A simple test case to check if data has been read properly and // decoded if (reviews.get(0).votes.cool == 1) { System.out.println("Reviews have been succesfully read"); System.out.println(); } if (businesses.get(1).city.equals("Cambridge")) { System.out.println("Businesses have been successfully read"); System.out.println(); } /* Insert all of the data into database Initializes the database. We manually CREATE TABLE (or DROP TABLE in mySQL because we kept having to make changes to the schema, so it isn't used here. */ Statement st = null; st = makeConnectionWithDatabase(args); st.setEscapeProcessing(true); DMLs(businesses, reviews, st); st.close(); } // Makes connection with fling database public static Statement makeConnectionWithDatabase(String[] args) throws Exception { try { Class.forName("com.mysql.jdbc.Driver").newInstance(); Connection conn = null; conn = DriverManager.getConnection("jdbc:mysql://fling.seas.upenn.edu:3306/tmandel","tmandel","abc123"); Statement st = conn.createStatement(); return st; } catch (Exception e) { e.printStackTrace(); System.out.println(e); } return null; } /* We sort all our businesses into categories so each button in the game * corresponds to a category of business to visit (e.g. restaurant). These * are ordered so that something that is a Hotel with food won't accidentally * get lumped with a restaurant, or a Park that is in both "Parks" and * "Museum" would end up in "Work Out" instead of in "See The Sights".*/ private static String getCat(ArrayList<String> category){ for (String s : category) { //Go Rest if ((s.contains("Apartment") || s.equals("Hotels") || s.contains("Hostel") || s.contains("Bed & Breakfast") || s.contains("Retirement Home")|| s.contains("University Housing")) && !s.contains("Realty") && !s.contains("Realtor") && !s.contains("Taxi") && !s.contains("Real Estate")) return "slp"; } for (String s : category) { //Get Food if (s.contains("Food") || s.contains("Restaurant")) return "hun"; } for (String s : category) { //Be Healthy if (s.contains("Health") || s.contains("Doctor") || s.contains("Medical") || s.contains("Hospital")) return "med"; } /* for (String s : category) { if (s.contains("Auto") || s.contains("Bike") || s.contains("Car") || s.contains("Home") || s.contains("Clothing")) return "com"; }*/ for (String s : category) { //Work Out if (s.contains("Parks") || s.contains("Gyms") || s.contains("Playgrounds") || s.contains("Recreation Centers")) return "str"; } for (String s : category) { //Go Out if (s.contains("Nightlife") || s.contains("Social Clubs")) return "soc"; } for (String s : category) { //See The Sights if (s.contains("Adult") || s.contains("Art") || s.contains("Museum") || s.contains("Cinema") || s.contains("Bowling") || s.contains("Arcade") || s.contains("Performing Arts")) return "art"; } return "oth"; //We aren't using other businesses in our game } /* * Puts reviews in Review table and puts businesses (and category that * we had determined) in Business table. Removes ' and \ in strings. */ public static void DMLs(ArrayList<Business> businesses, ArrayList<Review> reviews, Statement st) { for (Business b : businesses) { try { b.name = b.name.replace("'", "\\'"); b.name = b.name.replace("\"", "\\\""); if (b.name.endsWith("\\")) b.name = b.name.substring(0, b.name.length() - 1); b.full_address = b.full_address.replace("'", "\\'"); b.full_address = b.full_address.replace("\"", "\\\""); if (b.full_address.endsWith("\\")) b.full_address = b.full_address.substring(0, b.full_address.length() - 1); String temp1a = "INSERT INTO Business VALUES ('" + b.business_id + "', '" + b.name + "', '" + b.full_address + "', " + b.latitude + ", " + b.longitude + ", '" + b.photo_url + "', '" + b.city + "', '" + b.state + "', " + b.review_count + ", '" + b.url + "', " + (b.stars * 20) + ", '" + getCat(b.categories) + "')"; st.execute(temp1a); } catch (Exception e) { e.printStackTrace(); } } for (Review r : reviews) { try { r.text = r.text.replace("'", "\\'"); r.text = r.text.replace("\"", "\\\""); if (r.text.endsWith("\\")) r.text = r.text.substring(0, r.text.length() - 1); //System.out.println(r.text); String temp2a = "INSERT INTO Review VALUES ('" + r.business_id + "', '" + r.review_id + "', " + r.votes.useful + ", " + r.votes.funny + ", " + r.votes.cool + ", " + (r.stars * 20) + ", '" + r.text + "')"; st.execute(temp2a); } catch (Exception e) { e.printStackTrace(); System.out.println(r.text); } } } }<file_sep>/index.php <!DOCTYPE HTML> <?php include 'header.php' ?> <link rel="stylesheet" type="text/css" href="styles.css"> <title>Evicted</title> <body> <div class="wrapper"> </div> <div class="main"> <table class="begin"> <tr> <td style="text-align:center;"><h2>Login</h2></td> </tr> <tr> <td><form action="checklogin.php" method="POST"> <table> <tr> <td>Username:</td> <td><input type="text" name="username"></td> </tr> <tr> <td>Password:</td> <td><input type="<PASSWORD>" name="password"></td> </tr> </table> <input type="submit" class="submit" value="Login"> </form> <p><a href="signup.php">Click here</a> to register</p> <p></p> </td> </tr> </table> </div></body> </html> <file_sep>/nonsite/yelpjson/src/main/Votes.java /* Votes Object from Reviews*/ package main; public class Votes { public int funny; public int useful; public int cool; public Votes (int funny, int useful, int cool) { this.funny = funny; this.useful = useful; this.cool = cool; } } <file_sep>/nonsite/yelpjson/src/main/Review.java /* Review Object from Yelp */ package main; public class Review { public Votes votes; public String user_id; public String review_id; public int stars; public String date; public String text; public String type; public String business_id; public Review(Votes v, String u, String r, int s, String d, String te, String ty, String b) { votes = v; user_id = u; review_id = r; stars = s; date = d; text = te; type = ty; business_id = b; } } <file_sep>/signup.php <!DOCTYPE HTML> <?php include 'header.php' ?> <link rel="stylesheet" type="text/css" href="styles.css"> <title>Evicted</title> <body> <div class="wrapper"> </div> <div class="main"> <table> <tr> <td style="text-align:center;"><h2>Sign Up</h2></td> </tr> <tr> <td><form action="createaccount.php" method="POST"> <table> <tr> <td>First Name</td> <td><input type="text" name="fname"></td> </tr> <tr> <td>Last Name:</td> <td><input type="text" name="lname"></td> </tr> <tr> <td>Username:</td> <td><input type="text" name="username"></td> </tr> <tr> <td>Password: <td><input type="<PASSWORD>" name="<PASSWORD>"></td> </tr> <tr> <td>Re-enter Password: <td><input type="<PASSWORD>" name="<PASSWORD>"></td> </tr> <tr> <td>Starting Location: <td><select name="location"> <option value="phila">Philadelphia</option> <option value="cambridge">Cambridge</option> <option value="ny">New York</option> <option value="seattle">Seattle</option> <option value="la">Los Angeles</option> <option value="ann">Ann Arbor</option> </select></td> </tr> <tr> <td><input type="submit" value="Sign Up" class="submit"></td> </tr> </form> </td> </tr> </table> </div></body> </html> <file_sep>/weather.py #!/usr/bin/python import sys argList = sys.argv city = argList[1] state = argList[2] # locations with no straight up changes fixes = {"Ann_Arbor_Charter_Township" : "Ann_Arbor", "Christiansbrg" : "Christiansburg", "Hampshire" : "Northampton", "Harlem" : "Manhattan", "Lansingburgh" : "Albany", "Mid-Cambridge" : "Cambridge", "Philly" : "Philadelphia", "Princeton_Jct" : "Princeton_Junction", "Riverdale_Park" : "Riverdale", "Southern_California" : "Los_Angeles", "St._Jacobs" : "Kitchener", "St_Jacobs" : "Kitchener", "Victoria_Park" : "Kitchener", "Waterloo" : "Kitchener", "Westwood_Village" : "Westwood"} if city in fixes: city = fixes[city] # path for weather uses underscores city = city.replace(" ", "_") print state + "/" + city
9f1ea290cf9b762c70d74fa7cc8c0ca1864105da
[ "Java", "Python", "Text", "PHP" ]
16
Text
lucaspena/Evicted
e440d50e2d32d6fb6057e41958d4ee437a9bb2a2
9091ab77f36d6de637926463f2630cdb70297fc4
refs/heads/master
<repo_name>Mallangs/32To64<file_sep>/main.cpp // use 32bit data to reperesent 64bit data #include <iostream> int main() { int x ,y; int xFront, yFront; int xBack, yBack; //input x, y std::cin >> x >> y; long long int result; //divide half bit xFront = (x & 0xffff0000)>>16; xBack = x & 0xffff; yFront = (y & 0xffff0000)>>16; yBack = y & 0xffff; printf("%x %x\n", xFront, xBack); printf("%x %x\n", yFront, yBack); int front; int last; / front = (xFront * yFront) + (((yFront * xBack + xFront * yBack) & 0xffff0000) >> 16); last = (xBack * yBack) + (((xFront * yBack + xBack * yFront) & 0xffff) << 16); printf("(int)A * (int)B = %#X", front); printf("%X\n\n", last); result = (long long int)x * (long long int)y; //-----------------------------// printf("%#016I64X\n\n\n", result); printf("%lld\n\n\n", result); }
794c3a401ef3b2528027045179866c221b5c30df
[ "C++" ]
1
C++
Mallangs/32To64
0bb56b4f3551bbe607be32d368d341c90736d7fd
11f795b5be01068ffc1fd0155edc8e3eff98afdd
refs/heads/master
<repo_name>grafiters/cursor-pack-TeainuxOS_XI<file_sep>/cursor-pack-tealinux-dark/readme.md ## TEA CURSORS PACK ![](../update.PNG) x-cursor theme ini terinspirasi dari cursor pack Captaine Cursors -> (https://github.com/ChrisP4/captain-frank-cursors/tree/master/src/svg) semua resource yang ada dibuat denga menggunakan ```Inkscape``` ## Installation untuk menginstall Tea Cursors Pack ini cukup mudah hanya perlu mengcopy cursors theme ini ke dalam directory ./icons ``` cp -pr dist/ ~/icons/Tea-Cursors-Dark ``` atau ke directory /usr/share/icons/Tea-Cursors-Dark ``` cp -pr dist/ /usr/share/icons/Tea-Cursors-Dark ``` setelah itu coba di switch dengan menggunakan desktop tool atau dengan menggunakan terminal pertama ``` sudo update-alternatives --install /usr/share/icons/default/index.theme x-cursor-theme /usr/share/icons/Tea-Cursors-Dark/cursor.theme 65 ``` atau ``` gsettings set org.gnome.desktop.interface cursor-theme "Tea-Cursors-Dark" & sudo ln -fs /usr/share/icons/Tea-Cursors-Dark/cursor.theme /etc/alternatives/x-cursor-theme ``` setelah itu tinggal di update dengan ``` sudo update-alternatives --config x-cursor-theme ``` kemudian edit default yang berada di `/etc/alternatives/x-cursor-theme` ubah isinya menjadi `Inherits=Tea-Cursors-Dark` setelah itu jalankan `sudo reboot` ## Build From Source apabila anda ingin mengganti atau mengedit cursor ini , anda bisa compile cursor sendiri dengan source yang ada pertama edit source `.svg` yang berlokasi di `src/svg` setelah itu jalan kan ``` sh build.sh ``` semua source yang berformat `.svg` akan tergenerate dengan menggunakan pixmaps dan appropriate alliases. semua hasi generate tadi akan secara otomatis berada di `dist/`. ## Error Apabila terjadi error saat proses generate yang bertuliskan ``` unknown cursor config ``` error tersebut karena file yang berada pada `/config` kosong atau file yang digunakan untuk config cursor tidak sama dengan source cursor. dan usahakan dalam generate cursor ukuran `DPI` gambar tidak ada yang menggunakan ukuran semisal `96,22 dpi` ukuran tersebut sangat tidak dianjurkan karna file tersebut tidak di anggap oleh generator `xcursorgen` usahakan ukuran display adalah ukuran fix sebagai contoh `96 dpi` / `120 dpi`<file_sep>/readme.md ## Tea Cursors Pack ![](update.PNG) > ```Tea Cursors Pack``` adalah paket cursor yang baru dikembangkan oleh para ```DEVELOPER DOSCOM``` yang bersamaan dengan update SISTEM OPERASI distro `TEALINUXOS XI` > ```Tea Cursors Pack``` ini sendiri tersedi adalam 2 mode yaitu mode dark and ligt mode `dark` cocok buat kamu yang secara basic lebih senang menggunakan warna terang sedangkan mode `light` cocok buat kamu yang suka dengan ketenangan karena lebih bersahabat > ```Tea Cursors Pack``` ini nantinya akan di ```release``` bersamaan dengan update ```TEALINUXOS``` yang terbaru saat ini ```Tea Cursor Pack``` masih dalam mode ```BETA``` dan nantinya akan dikembangkan lebih baik lagi<file_sep>/cursor-pack-tealinux-dark/build.sh #!/bin/bash SRC=$PWD/src cd "$SRC" mkdir -p x1 x1_25 x1_5 x2 cd "$SRC"/svg find . -name "*.svg" -type f -exec sh -c 'inkscape -z -e "../x1/${0%.svg}.png" -w 32 -h 32 $0' {} \; find . -name "*.svg" -type f -exec sh -c 'inkscape -z -e "../x1_25/${0%.svg}.png" -w 40 -w 40 $0' {} \; find . -name "*.svg" -type f -exec sh -c 'inkscape -z -e "../x1_5/${0%.svg}.png" -w 48 -w 48 $0' {} \; find . -name "*.svg" -type f -exec sh -c 'inkscape -z -e "../x2/${0%.svg}.png" -w 64 -w 64 $0' {} \; cd $SRC # generate cursors BUILD="$SRC"/../dist OUTPUT="$BUILD"/cursors ALIASES="$SRC"/cursorList if [ ! -d "$BUILD" ]; then mkdir "$BUILD" fi if [ ! -d "$OUTPUT" ]; then mkdir "$OUTPUT" fi echo -ne "Generating cursor theme...\\r" for CUR in config/*.cursor; do BASENAME="$CUR" BASENAME="${BASENAME##*/}" BASENAME="${BASENAME%.*}" xcursorgen "$CUR" "$OUTPUT/$BASENAME" done echo -e "Generating cursor theme... DONE" cd "$OUTPUT" #generate aliases echo -ne "Generating shortcuts...\\r" while read ALIAS; do FROM="${ALIAS#* }" TO="${ALIAS% *}" if [ -e $TO ]; then continue fi ln -sr "$FROM" "$TO" done < "$ALIASES" echo -e "Generating shortcuts... DONE" cd "$PWD" echo -ne "Generating Theme Index...\\r" INDEX="$OUTPUT/../index.theme" if [ ! -e "$OUTPUT/../$INDEX" ]; then touch "$INDEX" echo -e "[Icon Theme]\nName=Tea Cursor Dark\nDescription=tea-cursor-dark\n" > "$INDEX" fi echo -e "Generating Theme Index... DONE"
1c8f5145679b2e9976e5d1ae2b6079c1c2d68cd0
[ "Markdown", "Shell" ]
3
Markdown
grafiters/cursor-pack-TeainuxOS_XI
4c20b343560af92d6e30d2c152f241e6882cc04e
e0cf5369abd8c51784e46e15df9f4ebef75ba995
refs/heads/master
<repo_name>DobryakK555/testTask<file_sep>/TestTask/Pages/Result.cshtml.cs using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.RazorPages; namespace TestTask { public class ResultModel : PageModel { private readonly IMongoDbHelper _mongoDbHelper; public string wordOfTheUser { get; set; } public int userWordCount { get; set; } public KeyValuePair<string, int> wordOfTheDay { get; set; } public Dictionary<string, int> todayWords { get; set; } public List<KeyValuePair<string, int>> Statisics = new List<KeyValuePair<string, int>>(); public ResultModel(IMongoDbHelper mongoDbHelper) { _mongoDbHelper = mongoDbHelper; } public void OnGet(string wordOfTheUser) { this.wordOfTheUser = wordOfTheUser; userWordCount = _mongoDbHelper.TodayWords.ContainsKey(this.wordOfTheUser) ? _mongoDbHelper.TodayWords[this.wordOfTheUser] : 0; todayWords = _mongoDbHelper.TodayWords; wordOfTheDay = _mongoDbHelper.GetWordOfTheDay(); foreach (var word in todayWords.Keys) { var distance = LevenshteinDistance.Compute(wordOfTheUser, word); if (distance == 1) Statisics.Add(new KeyValuePair<string, int>(word, todayWords[word])); } } } }<file_sep>/TestTask/MongoDbHelper.cs using Microsoft.Extensions.Configuration; using MongoDB.Bson; using MongoDB.Driver; using System; using System.Collections.Generic; using System.Linq; using TestTask.Models; namespace TestTask { public class MongoDbHelper : IMongoDbHelper { private readonly IConfiguration _configuration; private string connectionString = null; private readonly MongoClient client = null; private IMongoDatabase database = null; private IMongoCollection<UserInput> collection = null; public Dictionary<string, int> TodayWords { get; set; } public MongoDbHelper(IConfiguration configuration) { _configuration = configuration; connectionString = _configuration.GetConnectionString("MongoDB"); client = new MongoClient(connectionString); database = client.GetDatabase(_configuration.GetSection("DBName").Value); collection = database.GetCollection<UserInput>(_configuration.GetSection("CollectionName").Value); } public KeyValuePair<string, int> GetWordOfTheDay() { return TodayWords.Aggregate((l, r) => l.Value > r.Value ? l : r); } public string GetWordOfTheDayForUser(UserInput input) { var userTodayPost = CheckTodayUserPost(input.EmailAddress); if (userTodayPost != null) return userTodayPost.Word; collection.InsertOne(input); CheckTodayWordsDictionary(); return input.Word; } public void CheckTodayWordsDictionary() { TodayWords = new Dictionary<string, int>(); var filterBuilder = Builders<UserInput>.Filter; var dateFilter = filterBuilder.Gte(x => x.PostingTime, new BsonDateTime(DateTime.Today)) & filterBuilder.Lt(x => x.PostingTime, new BsonDateTime(DateTime.Today.AddDays(1))); var allInputs = collection.Find(dateFilter).ToList(); foreach (var input in allInputs) { if (TodayWords.ContainsKey(input.Word)) TodayWords[input.Word]++; else TodayWords[input.Word] = 1; } } private UserInput CheckTodayUserPost(string email) { var filterBuilder = Builders<UserInput>.Filter; var dateFilter = filterBuilder.Gte(x => x.PostingTime, new BsonDateTime(DateTime.Today)) & filterBuilder.Lt(x => x.PostingTime, new BsonDateTime(DateTime.Today.AddDays(1))); var emailFilter = filterBuilder.Eq(x => x.EmailAddress, email); var searchResult = collection.Find(dateFilter & emailFilter).ToList().FirstOrDefault(); return searchResult; } } } <file_sep>/TestTask/Models/UserInput.cs using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; using System; namespace TestTask.Models { public class UserInput { [BsonId] [BsonRepresentation(BsonType.ObjectId)] public ObjectId Id { get; set; } public string Word { get; set; } public string EmailAddress { get; set; } public DateTime PostingTime { get; set; } } } <file_sep>/TestTask/IMongoDbHelper.cs using System.Collections.Generic; using TestTask.Models; namespace TestTask { public interface IMongoDbHelper { string GetWordOfTheDayForUser(UserInput input); void CheckTodayWordsDictionary(); KeyValuePair<string, int> GetWordOfTheDay(); Dictionary<string, int> TodayWords { get; set; } } } <file_sep>/TestTask/Pages/Index.cshtml.cs using System; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.RazorPages; using Microsoft.Extensions.Logging; using TestTask.Models; namespace TestTask.Pages { public class IndexModel : PageModel { private readonly IMongoDbHelper _mongoDbHelper; public IndexModel(IMongoDbHelper mongoDbHelper) { _mongoDbHelper = mongoDbHelper; } public void OnGet() { _mongoDbHelper.CheckTodayWordsDictionary(); } public IActionResult OnPost() { var word = Request.Form["word"]; var emailAddress = Request.Form["emailAddress"]; var todayUserWord = _mongoDbHelper.GetWordOfTheDayForUser(new UserInput { Word = word, EmailAddress = emailAddress, PostingTime = DateTime.Today }); return new RedirectToPageResult("/result", new { wordOfTheUser = todayUserWord }); } } }
07a28c5f20fd3a0042649bb64c21d433e8a9ca55
[ "C#" ]
5
C#
DobryakK555/testTask
b77af324dbcf11c0e9aad4daa9c408eb967dab6c
26f8d800cb089b73e516d7636c6366a360daa42a
refs/heads/master
<file_sep>/*************************************************** * Universidade Federal da Paraiba * * Aluna: <NAME> * * Matricula: 11218796 * * * ***************************************************/ #include <stdio.h> #include <time.h> #define max 1000 //variavel que define a quantidade maxima de itens na lista struct aluno{ int matricula; char nome[100]; }; struct aluno vetor[max]; /* Definição da função insertion_sort */ void insertion_sort (struct aluno vet[]) { FILE *p; char str[30] = "insertion-lista_ordenada.txt"; // nome para o arquivo a ser aberto if (!(p = fopen(str,"w"))){ /* Caso ocorra algum erro na abertura do arquivo..*/ printf("Erro! Impossivel abrir o arquivo!\n"); /* o programa aborta automaticamente */ exit(1); } int i = 0, j = 0, cont = 0 ; struct aluno aux; int x = 0; clock_t c2, c1; /* variáveis que contam ciclos do processador */ double tempo; c1 = clock(); for (j = 1; j < max ; ++j){ aux = vet[j]; for (i = j-1; i >= 0 && vet[i].matricula > aux.matricula; --i){ vet[i+1] = vet[i]; if(!x){ x = 1; } } vet[i+1] = aux; if(x){ x = 1; cont++; } } c2 = clock(); tempo = ((double)(c2 - c1))*1000/CLOCKS_PER_SEC; /* Imprime o vetor ordenado em ordem crescente no arquivo criado 'lista ordenada.txt' */ for (i = 0; i < max; i++) { fprintf(p, " %d %s\n", vet[i].matricula, vet[i].nome); } printf("\n>>INSERTION SORT<<\n"); printf("\nTEMPO DE EXECUCAO: %f\n", tempo); printf("NUMERO DE TROCAS: %d\n", cont); fclose(p); } /* Definição da função selection_sort */ void selection_sort (struct aluno vet[]) { FILE *p; char str[30] = "selection-lista_ordenada.txt"; // nome para o arquivo a ser aberto if (!(p = fopen(str,"w"))){ /* Caso ocorra algum erro na abertura do arquivo..*/ printf("Erro! Impossivel abrir o arquivo!\n"); /* o programa aborta automaticamente */ exit(1); } int i, j, min, cont = 0; struct aluno aux; clock_t c2, c1; /* variáveis que contam ciclos do processador */ double tempo; c1 = clock(); for (i = 0; i < (max - 1); i++){ /* O minimo é o primeiro número não ordenado ainda */ min = i; for (j = i+1; j < max; j++){ /* Caso tenha algum numero menor ele faz a troca do minimo*/ if (vet[j].matricula < vet[min].matricula){ min = j; } cont++; } /* Se o minimo for diferente do primeiro numero não ordenado ele faz a troca para ordena-los*/ if (i != min) { aux = vet[i]; vet[i] = vet[min]; vet[min] = aux; } } c2 = clock(); tempo = ((double)(c2 - c1))*1000/CLOCKS_PER_SEC; /* Imprime o vetor ordenado em ordem crescente no arquivo criado 'lista ordenada.txt' */ for (i = 0; i < max; i++) { fprintf(p, " %d %s\n", vet[i].matricula, vet[i].nome); } printf("\n>>SELECTION SORT<<\n"); printf("\nTEMPO DE EXECUCAO: %f\n", tempo); printf("NUMERO DE TROCAS: %d\n", cont); fclose(p); } int main(void){ char url[]="Arquivo.txt"; int i; FILE *arq; arq = fopen(url, "r"); if(arq == NULL) printf("Erro, nao foi possivel abrir o arquivo\n"); else /** le o arquivo ate o final e vai preenchendo o vetor * com os dados encontrados no arquivo, seguindo o padrao 'numero', 'espaco' e 'string'*/ for(i = 0; i < max; i++){ if((fscanf(arq,"%d %s \n", &vetor[i].matricula, vetor[i].nome))!= EOF); } insertion_sort (vetor); rewind(arq); for(i = 0; i < max; i++){ if((fscanf(arq,"%d %s \n", &vetor[i].matricula, vetor[i].nome))!= EOF); } selection_sort (vetor); rewind(arq); for(i = 0; i < max; i++){ if((fscanf(arq,"%d %s \n", &vetor[i].matricula, vetor[i].nome))!= EOF); } fclose(arq); return 0; } <file_sep># Algoritmo-Insertion-Selection
c03aba84dc32a73599f324377f4451023eb1803d
[ "Markdown", "C" ]
2
C
mriojnr/Algoritmo-Insertion-Selection
0d22f52ecb3f66c085b7baef7af6e2c36019f35b
c115cc42c34c22d8727f0912aa3e5d8ff04af64d
refs/heads/master
<file_sep># Python Challenge ## Mummy Money Get Rich&trade; Dashboard for Mr. Mummy to moniter his income. Once the scheme starts, Mr. Mummy can track his income in real time using the dashboard. ![alt text](https://github.com/vishalbharti1990/Python_Challenge/blob/master/snap.png) **The dash has the following components:** * Mummy Money Trend: This component has a dropdown selector and a line graph. Using the dropdown, the money trend for any member can be observed. The figure updates in real time when dropdown selection changes and also updates regular interval of 10 seconds, simulating a weeks trend. * Stats Card: Shows the stats for current week. It also has the "END SCHEME" button, which ends the MUMMY MONEY scheme and Mr. Mummy can walk away with the earnings. * New Recruits: Shows the list of new members recruited this week. * Eliminated: Shows the list of members eliminated from the scheme, as their tenure ran out. * Withdrawn: Shows the list of members, who left the program deliberately (modeled through a random probability generated from unifrom random distribution and marking the member for withdrawal if this is greator than a threshold(0.85)). **Libraries Used:** * Flask * Dash * Sqlite3 <file_sep>from mimesis import Person import sqlite3 from random import random as rand from sqlite3 import Error import sys def db_connect(): try: con = sqlite3.connect('mydatabase.db', check_same_thread=False) con.row_factory = sqlite3.Row return con except Error: print(Error) sys.exit() def create_table(con): # Init the connection cursorObj = con.cursor() try: # drop the table if it already exists cursorObj.execute("DROP TABLE IF EXISTS Investors") # commit changes to database con.commit() # Create the Investor table cursorObj.execute("CREATE TABLE Investors(Id integer PRIMARY KEY, Name text, Innocence real, Experience real, Charisma real, Status text)") # Commit the changes to database con.commit() except Error as e: print(e.args[0]) sys.exit() return cursorObj def data_iterator(n): # Instantiate mimesis name generator person = Person() # Supply information to execute the query for _ in range(n): yield person.full_name(), round(rand(),2), round(rand(),2), round(rand(),2), "Available" def create_Investors(population_size): # Connect to database and create the table Investor con = db_connect() cursor = create_table(con) # Randomly generate 1000 Investors and add them to the Investors table cursor.executemany("INSERT INTO Investors(Name, Innocence, Experience, Charisma, Status) VALUES(?,?,?,?,?)", data_iterator(population_size)) return con, cursor<file_sep>import sqlite3 from sqlite3 import Error from random import random as rand from random import sample import flask from threading import Lock import math, sys from Investors import create_Investors from dash import Dash, no_update import dash_core_components as dcc import dash_html_components as html from dash.dependencies import Output, Input, State from plotly import graph_objs as go # Thread lock lock = Lock() # Refresh interval in secs refresh_interval = 10 # The size of Investors pool population_size = 10000 # Create a Investors table and return a cursor object con, cursor = create_Investors(population_size) # curr_week = 0 # Fetch all rows from the Investors table data_rows = cursor.execute("SELECT * from Investors").fetchall() # sample 10 initial members ids randomly initial_members = sample(data_rows, 10) ##<<---Update the status of selected member(s) in Inverstors table to 'Un-Available'--->>## # Iterator to run the executemany update query def Status_iterator(rows): for r in rows: yield "Un-Available", r['id'] # To avoid recursive cursor error we use thread locks for each query try: lock.acquire(True) cursor.executemany("Update Investors SET Status=? where id = (?)", Status_iterator(initial_members)) con.commit() except Error as e: print(e.args[0]) print("line 44") sys.exit() finally: lock.release() ##<<---Create a Members table to hold information for current active members--->>## try: lock.acquire(True) # drop the table if it already exists cursor.execute("DROP TABLE IF EXISTS Members") # commit changes to database con.commit() # Create the Members table cursor.execute("CREATE TABLE Members(M_Id integer PRIMARY KEY, \ Name text, Innocence real, Experience real, Charisma real, \ Status text, Money_trend text, Recruitor integer, Investor_id\ integer, start_week integer, end_week integer)") con.commit() except Error as e: print(e.args[0]) print("line 66") sys.exit() finally: lock.release() ##<---Insert Mummy and other 10 members into the Members table--->## # Iterator/function that returns/yields # data to insert into Members table def insert_Iterator(rows, rec_id, start_week): # end_week -> math.floor((1-Innocence) x Experience x Charisma x 10) for r in rows: yield r['Name'], r['Innocence'], r['Experience'], r['Charisma'],\ "Active", "0-", rec_id, r['Id'], start_week, start_week + \ math.floor((1-r['Innocence'])*r['Experience']*r['Charisma']*10) try: lock.acquire(True) cursor.execute("Insert into Members(Name, Innocence, Experience, Charisma, \ Status, Money_trend, Recruitor, Investor_id, start_week, end_week) VALUES\ ('Mummy', 0, 1, 1, 'Proactive', '5000-', -1, 0, 0, 10000)") cursor.executemany("Insert into Members(Name, Innocence, Experience, Charisma, \ Status, Money_trend, Recruitor, Investor_id, start_week, end_week) VALUES\ (?,?,?,?,?,?,?,?,?,?)", insert_Iterator(initial_members, 1, 0)) con.commit() except Error as e: print(e.args[0]) print("line 94") sys.exit() finally: lock.release() # Function to generate the money trend plot def get_figure(data): member_money = data['Money_trend'].split('-')[:-1] y = [int(m) for m in member_money] x = list(range(1,len(y)+1)) hover_info = ["Money Earned : $"+m for m in member_money] trace_exp = go.Scatter( x=x, y=y, text=hover_info, hoverinfo='text', mode="lines+markers", name="Money", line=dict(color="#bcdeba") ) data=[trace_exp] colors = { 'background': 'white', 'text': 'black' } layout = go.Layout( showlegend = False, hovermode='closest', plot_bgcolor = colors['background'], paper_bgcolor = colors['background'], font = dict(color = colors['text']), height=300, xaxis=dict( autorange=True, showgrid=False, zeroline=False, showline=False, ticks='', showticklabels=False ), yaxis=dict( autorange=True, showgrid=True, zeroline=False, showline=True, tickwidth=2, showticklabels=True ) ) fig = go.Figure(data = data, layout = layout) return fig # Function to generate/update weeks info def get_weeks_div(data, week, cnt_new, avl_inv): avg_money, active_cnt, cnt_elim = 0, 0, 0 # get mummys' total earnings for member in data: if member['end_week'] - member['start_week'] == 0: cnt_elim += 1 if member['M_Id'] == 1: mummy_total = member['Money_trend'].split('-')[-2] else: active_cnt += 1 money = member['Money_trend'].split('-')[-2] avg_money += int(money) if active_cnt == 0: avg_money = 0 else: avg_money /= active_cnt div_data = [ html.Br(), html.H5("Stats for this Week"), html.H6("Available Investors : "+str(avl_inv)), html.H6("Total Active Members : "+str(active_cnt)), html.H6("Mummys' Total Earnings: $"+mummy_total, id="mummy_money"), html.Br(), "New Members: "+str(cnt_new), html.Br(), "Under Elimination: "+str(cnt_elim), html.Br(), html.P("Avg. Member Earning: $"+str(round(avg_money, 2)), id="mem_money") ] return div_data new_data = cursor.execute("Select * from Members WHERE Status='Active' OR Status='Proactive'").fetchall() names = [str(d['M_Id']) + '. ' + d['Name'] for d in new_data] external_stylesheets = [ 'https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css' ] app = Dash(__name__, external_stylesheets=external_stylesheets) app.layout = html.Div([ #### HEADER #### html.Div([ html.H2('''Welcome to Mummy Money Get Rich\u2122'''), html.Img(src="/assets/pic.png") ], className="banner"), #### LEFT STATS PANEL #### html.Div([ html.Div([ html.Div([html.Div([html.H4("Stats Card")], className="card-title"), html.Div(get_weeks_div(new_data, 0, 10, 9990), id="weeks_div"), html.Br() ], id="info_div"), html.Button('End Scheme', id="end_btn", className="btn btn-danger"), ], className="col-sm-3 mx-auto text-center", id="stats_div"), #### RIGHT MONEY TREND PANEL #### html.Div([ html.Div([html.H4("Mummy Money Trend")], className="card-title"), html.Div([ dcc.Dropdown(options=[{'label': v, 'value': v} for v in names], value="1. Mummy", id="ddownbtn", searchable = False) ], id="ddownDiv"), html.Div([ html.Div([dcc.Graph(id='live-graph', figure=get_figure(new_data[0]))], id='graph-div'), dcc.Interval(id='graph-update', interval=refresh_interval*1000, n_intervals=0), ]) ], className="col-sm-8 mx-auto text-center", id="fig_div") ], className="row no-gutter", id="display_div"), html.Div([ html.Div([ html.Div(html.H4("New Recruits"), className="card-title"), html.Div("NONE", id="recruit_data") ], id="recr_div", className="col-sm-3 mx-auto text-center"), html.Div([ html.Div(html.H4("Eliminated"), className="card-title"), html.Div("NONE", id="elim_data") ], id="elim_div", className="col-sm-3 mx-auto text-center"), html.Div([ html.Div(html.H4("Withdrawn"), className="card-title"), html.Div("NONE", id="with_data") ], id="with_div", className="col-sm-3 mx-auto text-center") ], className="row no-gutter", id="membs_info_div") ], className="container-fluid") # Iterator to update member tree def tree_update_iterator(child_members, new_parent): for child_member in child_members: yield new_parent, child_member['M_Id'] # The function update_member_table: # 1. Updates the member tree # 2. Updates leaving member as Inactive # 3. Adds money to mummys earning if # members tenure ran out # Set 'withdrawal' as TRUE if member withdrew his funds deliberately def update_member_table(leaving_member, active_members, n, withdrawal=False): global cursor, con # New parent of child members new_parent = leaving_member['Recruitor'] # Get mummys' record mummy_record = cursor.execute("SELECT * FROM Members WHERE M_Id = 1").fetchone() # Get all child members child_members = [member for member in active_members if member['Recruitor'] == leaving_member['M_Id']] try: lock.acquire(True) # Mark the leaving member as inactive cursor.execute("UPDATE Members SET Status=? WHERE M_Id=?", ('Inactive', leaving_member['M_Id'])) # Update the tree cursor.executemany("UPDATE Members SET Recruitor=? WHERE M_Id=?", tree_update_iterator(child_members, new_parent)) if withdrawal == False: # Update Mummys' money by adding the leaving members money mummy_total_money = int(mummy_record['Money_trend'].split('-')[-2]) members_total_money = int(leaving_member['Money_trend'].split('-')[-2]) new_mummy_money = mummy_record['Money_trend'] + str(mummy_total_money + members_total_money) + '-' cursor.execute("UPDATE Members SET Money_trend=? WHERE M_Id=?", (new_mummy_money, 1)) # Commit changes to database con.commit() except Error as e: print(e.args[0]) print("Line 275") sys.exit() finally: lock.release() def insert_member(rows, rec_id, start_week): return rows['Name'], rows['Innocence'], rows['Experience'], rows['Charisma'],\ "Active", "0-", rec_id, rows['Id'], start_week, start_week + \ math.floor((1-rows['Innocence'])*rows['Experience']*rows['Charisma']*10) def recruit_member(member, active_members, n): global con, cursor ### If no Investors are available return ### try: lock.acquire(True) available_investors = cursor.execute("SELECT * FROM\ Investors WHERE Status='Available'").fetchall() except Error as e: print(e.args[0]) print("line 298") sys.exit() finally: lock.release() if len(available_investors) < 1: return ### Else run the recuiting simulation ### # Count direct-members recruited by member X = sum(1 for membs in active_members if membs['M_Id'] == member["M_Id"]) # Get mummys' record try: lock.acquire(True) mummy_record = cursor.execute("SELECT * FROM Members WHERE M_Id = 1").fetchone() except Error as e: print(e.args[0]) print("line 316") sys.exit() finally: lock.release() # Compute the recruiting threshold probability if X == 1: p_thresh = 0 else: p_thresh = member['Experience'] * member['Charisma'] * (1-math.log10(X)) # Probability of recruiting p_recruit = rand() # A new member is recruited if this condition is met if p_recruit > p_thresh: # Randomly sample a Investor new_member = sample(available_investors, 1)[0] # Acceptance threshold probaility p_accept_thresh = new_member['Innocence'] * (1 - new_member['Experience']) # Probability of acceptance p_accept = rand() # Investor accepts the offer if p_accept > p_accept_thresh: # Insert the new member in Members table try: lock.acquire(True) cursor.execute("INSERT into Members(Name, Innocence, Experience, Charisma, \ Status, Money_trend, Recruitor, Investor_id, start_week, end_week) VALUES\ (?,?,?,?,?,?,?,?,?,?)", (insert_member(new_member, member['M_Id'], n))) new_recruit = str(cursor.lastrowid) + '. ' + new_member['Name'] # Update status of member in Investors table cursor.execute("UPDATE Investors SET Status=? WHERE Id=?", ('Un-Available', new_member['Id'])) # Add $400 to Mummys account and $100 to recruiting members account mummy_total_money = int(mummy_record['Money_trend'].split('-')[-2]) members_total_money = int(member['Money_trend'].split('-')[-2]) new_mummy_money = mummy_record['Money_trend'] + str(mummy_total_money + 400) + '-' new_member_money = member['Money_trend'] + str(members_total_money + 100) + '-' cursor.execute("UPDATE Members SET Money_trend=? WHERE M_Id=?",(new_member_money, member['M_Id'])) cursor.execute("UPDATE Members SET Money_trend=? WHERE M_Id=?", (new_mummy_money, 1)) con.commit() except Error as e: print(e.args[0]) print("Line 368") sys.exit() finally: lock.release() return new_recruit return -1 def run_weeks_simulation(active_members, n): week_summ_dict = { 'recruited' : [], 'eliminated' : [], 'withdrawn' : [] } for member in active_members: # Generate probility of withdrawl p_withdraw = rand() # Members tenure ran out if member['end_week']-member['start_week'] == 0: # Update the Members table update_member_table(member, active_members, n) week_summ_dict['eliminated'].append(\ str(member['M_Id']) + '. ' + member['Name']) # Member withdraws elif p_withdraw > 0.85: update_member_table(member, active_members, n, withdrawal=True) week_summ_dict['withdrawn'].append(\ str(member['M_Id']) + '. ' + member['Name']) # Run recruiting simulation else: res = recruit_member(member, active_members, n) if res != -1: week_summ_dict['recruited'].append(res) return week_summ_dict @app.callback([Output('graph-div', 'children')], [Input('ddownbtn', 'value')]) def change_graph_member(value): global cursor # Get selected members M_Id m_id = int(value.split('.')[0]) # Fetch members data from Members table try: lock.acquire(True) data = cursor.execute("SELECT * FROM Members WHERE M_Id=?", [m_id]).fetchone() except Error as e: print(e.args[0]) print("line 420") sys.exit() finally: lock.release() # Generate figure fig = get_figure(data) new_fig = dcc.Graph(figure=fig, id="live-graph") return [new_fig] # Dynamic update calls @app.callback([Output('live-graph', 'figure'), Output('ddownDiv', 'children'), \ Output('weeks_div', 'children'), Output('recruit_data', 'children'),\ Output('elim_data', 'children'), Output('with_data', 'children')],\ [Input('graph-update', 'n_intervals')], [State('ddownbtn', 'value')]) def update_graph_scatter(n, value): global con, cursor # print(n, curr_week) # Get list of active members try: lock.acquire(True) active_members = cursor.execute("Select * from Members WHERE Status='Active'").fetchall() except Error as e: print(e.args[0]) print("line 447") sys.exit() finally: lock.release() # Run the weeks simulation for all active members week_summary = run_weeks_simulation(active_members, n) # print(week_summary) # Update recruited, eliminated and withdrawn members' list recruit_list, elim_list, with_list = [], [], [] if len(week_summary['recruited']) == 0: recruit_list = "NONE" else: for rec_member in week_summary['recruited']: recruit_list.append(html.P(rec_member, className="rec_text")) if len(week_summary['eliminated']) == 0: elim_list = "NONE" else: for elim_member in week_summary['eliminated']: elim_list.append(html.P(elim_member, className="elim_text")) if len(week_summary['withdrawn']) == 0: with_list = "NONE" else: for with_member in week_summary['withdrawn']: with_list.append(html.P(with_member, className="with_text")) # Get updated-list of active members after simulation try: lock.acquire(True) active_members = cursor.execute("Select * from Members WHERE Status='Active' or Status='Proactive'").fetchall() except Error as e: print(e.args[0]) print("line 464") sys.exit() finally: lock.release() # Update Options for dropdown selector opts = [{'label' : str(active_member['M_Id']) + '. ' + active_member['Name'], \ 'value' : str(active_member['M_Id']) + '. ' + active_member['Name']} \ for active_member in active_members] #+ [{'label' : '1. Mummy', 'value' : '1. Mummy'}] # Update value for dropdown selector if value != '1. Mummy': # Create selection options from list of active members mem_ids = set([str(m['M_Id'])+'. '+m['Name'] for m in active_members]) if value not in mem_ids: new_value = "1. Mummy" else: new_value = value else: new_value = "1. Mummy" # Create the new dropdown menu new_dropdown = dcc.Dropdown(options=opts, value=new_value, id="ddownbtn", searchable = False) m_id = int(new_value.split('.')[0]) # Fetch record for current selected user try: lock.acquire(True) member_record = cursor.execute("Select * from Members WHERE M_Id=?", [m_id]).fetchone() avl_inv = cursor.execute("SELECT COUNT(*) as cnt FROM Investors WHERE Status='Available'").fetchone()['cnt'] except Error as e: print(e.args[0]) print('line 497') sys.exit() finally: lock.release() fig = get_figure(member_record) weeks_div = get_weeks_div(active_members, n, len(week_summary['recruited']), avl_inv) return fig, [new_dropdown], weeks_div, recruit_list, elim_list, with_list @app.callback([Output('display_div', 'children'), Output('graph-update','max_intervals'),\ Output('membs_info_div', 'style')], [Input('end_btn', 'n_clicks')], \ [State('mummy_money', 'children'), State('info_div', 'children'), State('mem_money','children')]) def on_click(n_clicks, mm_text, info_div, m_text): global cursor if n_clicks is not None and n_clicks == 1: print("Button Clicked!!") try: lock.acquire(True) total_members = cursor.execute("SELECT COUNT(*) as mem_cnt FROM Members").fetchone() except Error as e: print(e.args[0]) sys.exit() finally: lock.release() res_div = html.Div([ html.H2("MUMMY TERMINATED THE PROGRAM!!"), html.Br(), html.Div([ html.Div([html.H4("Program Summary: ")], className="card-title"), html.Br(), html.H6(mm_text), html.H6(m_text), html.H6("Total member recruited: "+str(total_members['mem_cnt'])) ], className="mx-auto text-center col-sm-6", id="stats_div") ], className="mx-auto") return res_div, 0, {'display': 'none'} return no_update, no_update, no_update if __name__ == "__main__": app.run_server(debug=False)
7bfee3d9b1e14ffe40ca3132bd237d889bb3c7dc
[ "Markdown", "Python" ]
3
Markdown
jim-cassidy/Python_Challenge
5be9998b8744e50a9c736792a481864c15ad44ff
7c41a7be06feb3e14fece665a4ab9491a63d9994
refs/heads/master
<file_sep>Interpreter =========== Jobs of the interpreter: 1. parse the code: This takes Ruby code, and gives us back an AST (object we can work with, that represents the syntax) `Parser::Ruby22.parse("1+1")` 2. create the internal objects that allow Ruby to work (object model) ```ruby # Object { class: someclass, instance_variables: {} } # Class { # class stuff superclass: some_class, methods: {}, # constant stuff constants: {}, # also, a class is an object, so it has their keys, too class: someclass, instance_variables: {} } # Binding { self: some_object, local_variables: {}, return_value: some_object, next_binding: some_binding, } ``` 3. Evaluate the AST: manipulate the internal objects that allow Ruby to work (object model) how to manipulate it will depend on what kind of AST it is. eg, `"abc"` becomes (str "abc"), so we see that it has type str, and go create a new String object, then put it as the current return value in the current binding Starting spot ============= Given this code: ```ruby class User def initialize(name) self.name = name end def name @name end def name=(name) @name = name end end user = User.new("Josh") puts user.name ``` We need to Interpret it: * From the outside: * `bin/rri examples/print_username.rb` and it prints `Josh` * From the inside: * We should see that there is now a class "User" * with instance methods "initialize", "name", "name=" Our world should have an instance of User with @name="Josh" * "Josh\n" was printed <file_sep>require 'parser/ruby23' require 'pry' class Rri def self.parse(code) return Parser::Ruby23.parse(code) end def initialize(ast:, stdout:) @ast = ast @stdout = stdout end def interpret() interpret_ast(@ast) end def current_value @current_value end def string_class @string_class ||= { human_name: "String", class: 'FIXME', ivars: {}, } end def nil_object @nil_object ||= { human_name: "OUR NIL", class: 'FIXME', ivars: {}, } end private def interpret_ast(ast) case ast.type when :str @current_value = { class: string_class, ivars: {}, data: ast.children[0] } when :begin ast.children.each do |child| interpret_ast(child) end when :nil @current_value = nil_object else binding.pry end end end <file_sep>#!/usr/bin/env ruby lib_dir = File.expand_path('../lib', __dir__) $LOAD_PATH.unshift(lib_dir) require 'rri' ruby_filename = ARGV[0] ruby_code = File.read(ruby_filename) ast = Rri.parse(ruby_code) interpreter = Rri.new(ast: ast, stdout: $stdout) interpreter.interpret <file_sep>lib_root = File.expand_path('..', __dir__) RSpec.describe 'the binary, rri', bin: true do it 'runs the binary' do Dir.chdir lib_root do # first run against Ruby ruby_output = `ruby examples/print_username.rb` expect($?).to be_success # then against our executable rri_output = `bin/rri examples/print_username.rb` expect($?).to be_success # they should have the same output expect(rri_output).to eq ruby_output end end end <file_sep>require 'rri' RSpec.describe Rri do def interpret(code) ast = Rri.parse(code) rri = Rri.new(ast: ast, stdout: "") rri.interpret rri end def assert_object(object, assertions) assertions.each do |assertion_type, value| case assertion_type when :class correct_class = object.fetch(:class).equal? value expect(correct_class).to be_truthy, "Expected #{describe_object value}'s class to be #{describe_object object}" when :data expect(object.fetch :data).to eq value when :methods expect(object.fetch :methods).to eq value when :superclass expect(object.fetch :superclass).to equal value when :method_names expect(object.fetch(:methods).keys).to eq value when :classname expect(object.fetch(:class).fetch(:human_name)).to eq value when :has_method expect(object.fetch(:methods).keys).to include value when :ivars expect(object.fetch(:ivars).keys).to eq value when :is expect(object.equal?(value)).to be_truthy, "Expected #{describe_object value} to be #{describe_object object}" else raise "Unknown assertion type: #{assertion_type.inspect}" end end end def describe_object(object) if nil.equal? object raise "ACTUAL RUBY NIL (vs our interpreter's nil)!" elsif object.key?(:human_name) "{human_name: #{object.fetch :human_name}, ...}" else "{class: #{object.fetch(:class).fetch(:human_name)}, ...}" end end describe 'setting up the world' do # come back to this if we need it end it 'interprets strings', w: true do rri = interpret("'abc'") assert_object rri.current_value, class: rri.string_class, data: "abc", ivars: [] end it 'interprets nil', w: true do rri = interpret('nil') assert_object rri.current_value, is: rri.nil_object end it 'interprets multiple expressions', w: true do rri = interpret("'a'; 'b'") assert_object rri.current_value, class: rri.string_class, data: "b" end it 'sets and gets local variables' do rri = interpret("a = 'abc'; b = 'def'; a") assert_object rri.current_value, data: "abc" rri = interpret("a = 'abc'; b = 'def'; b") assert_object rri.current_value, data: "def" end describe 'class' do it 'defines classes as constants under Object' do rri = interpret("class User; end") user_class = rri.object_class[:constants][:User] assert_object user_class, class: rri.classy_class, ivars: [], methods: {}, superclass: rri.object_class end describe 'evaluating the body' do it 'defaults to nil' do rri = interpret("class User; end") assert_object rri.current_value, is: rri.nil_object end it 'returns the last line in the body' do rri = interpret("class User; 'abc'; end") assert_object rri.current_value, class: rri.string_class, data: "abc" end it 'records method definitions' do rri = interpret("class User; end") user_class = rri.object_class[:constants][:User] assert_object user_class, method_names: [] rri = interpret("class User; def zomg; end; end") user_class = rri.object_class[:constants][:User] assert_object user_class, method_names: [:zomg] end it 'evalutates in a binding for the class it is defining' do rri = interpret("class User; self; end") user_class = rri.object_class[:constants][:User] assert_object rri.current_value, is: user_class rri = interpret("class User; self; end; self") assert_object rri.current_value, is: rri.main_object end it 'sets the superclass to Object by default' do rri = interpret('class A; end; A') assert_object rri.current_value, superclass: rri.object_class end end end describe 'toplevel' do describe 'main' do it 'is an instance of Object with no instance variables' do rri = interpret 'self' assert_object rri.main_object, class: rri.object_class, ivars: [] end it 'has to_s and inspect defined on its singleton class to return the string "main"' end it 'sets self to main' do rri = interpret("self") assert_object rri.current_value, is: rri.main_object end it 'has no local variables' do rri = interpret("") expect(rri.stack.last[:locals]).to be_empty end it 'defines methods in Object' do rri = interpret "def lol; end" assert_object rri.object_class, has_method: :lol end end describe 'invoking methods' do it 'invokes it on "self" if no target is provided' do rri = interpret("def a; self; end; a") assert_object rri.current_value, is: rri.main_object end it 'invokes it on the target, if the target is provided' do rri = interpret("class A; def b; self; end; end; A.new.b") assert_object rri.current_value, classname: :A end it 'has its own set of local variables' do rri = interpret("a = 'main'; def c; a = 'from c'; a; end; c") assert_object rri.current_value, data: 'from c' rri = interpret("a = 'main'; def c; a = 'from c'; a; end; c; a") assert_object rri.current_value, data: 'main' end it 'has a return value' do rri = interpret("def a; 'whatev'; end; a") assert_object rri.current_value, class: rri.string_class, data: "whatev" end it 'defaults the return value to nil' do rri = interpret("def a; end; a") assert_object rri.current_value, is: rri.nil_object end it 'evaluates one argument in the context of the caller' do rri = interpret("def a(b); b; end; b='Josh'; a(b);") assert_object rri.current_value, class: rri.string_class, data: "Josh" end it 'evaluates multiple arguments in the context of the caller' do rri = interpret("def a(b, c); b; c; end; b='Josh'; c='Lovisa'; a(b, c);") assert_object rri.current_value, class: rri.string_class, data: "Lovisa" end end describe 'instance variables' do specify 'setting an ivar emits the ivar as the current value' do rri = interpret("@a = 'b'") assert_object rri.current_value, class: rri.string_class, data: "b" end it 'getting an ivar sets it as the current value' do rri = interpret("@a = 'b'; 'c'; @a") assert_object rri.current_value, class: rri.string_class, data: "b" end it 'stores the ivars on self' do rri = interpret("@a = 'b'") assert_object rri.main_object, ivars: [:@a] end it 'defaults instance variables to nil' do rri = interpret("@a") assert_object rri.current_value, is: rri.nil_object end end describe 'builtin methods' do describe 'Class' do it 'creates a new instance of the class, with no ivars, and initializes it' do rri = interpret 'class A def initialize(b) @b = b end def b @b end end A.new("hello").b' assert_object rri.current_value, class: rri.string_class, data: "hello" end end describe 'Object (technically Kernel ;)' do it 'has a puts method, which sends strings to the stdout, with a trailing newline' do rri = interpret('puts "abc"; puts "def\n"') expect(rri.stdout).to eq "abc\ndef\n" assert_object rri.current_value, is: rri.nil_object end end end end
fd1e40830900bb579e9f30f7d9c16cab1503420a
[ "Markdown", "Ruby" ]
5
Markdown
standsleeping/ruby_interpreter
3c89faf786a85c21c37e408d726a917a3bdb9f20
02bbbd113f15d98804446b0e72cd0a454d83459d
refs/heads/master
<file_sep><?php header("Access-Control-Allow-Origin: *"); echo "<code>"; echo get_page($_GET['url']); echo "</code>"; //echo get_page("http://www.google.com"); function get_page($url){ $ch = curl_init(); curl_setopt($ch, CURLOPT_URL,$url); /* $proxy = 'http://proxy.company.com:8080'; $proxyauth = 'domain\proxy_username:proxy_password'; curl_setopt($ch, CURLOPT_PROXY, $proxy); curl_setopt($ch, CURLOPT_PROXYUSERPWD, $proxyauth); */ curl_setopt($ch, CURLOPT_FOLLOWLOCATION, 1); curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); curl_setopt($ch, CURLOPT_HEADER, 0); $data = curl_exec($ch); curl_close($ch); $base = preg_replace('#\/[^/]*$#', '', $url); $data = preg_replace('~(?:src|action|href)=[\'"]\K/(?!/)[^\'"]*~',"$base$0",$data); return $data; //return preg_replace("href\\s*=\\s*(?:[\"'](?<1>[^\"']*)[\"']|(?<1>\\S+))", "&lt;", $data); } ?> <file_sep># ecal-webproject-infomesh 30 years of the web website during Summer University at swissnex SF ### General - Check SSL certificate (google block access from ipad chrome) - Move to the new url (infomaniak !) ### Interface/css - (si on a le temps) Amélioration du jump d'event en event. Clic sur l'événements et mouvement dynamique. Fin ne retourne pas au début - (important) Regex pour url readmore - (important, tibor heeeelp) popup reduce doesn't work on iphone/ipad - changement de la couleur des titres buggée - doit pas recharger l'iframe si c'est le premier - timeline déja la quand on ouvre le projet mais elle devrait slider avec - slider reload pas au bon endroit timelime - ~~clic partout pour fermer popup~~ (à améliorer) Ferme toujours quand on clique sur un lien. ### Splashscreen - Mots trop long sur mobile ### Mobile - Menu navigation. - Boutons navigations pas actifs - (important, pietro) Intro splash screen, longueur de texte sur mobile. Fin de phrase qui freeze - Splashscreen. Point ou pas point dans les phrases. - Splashscreen. Source des phrases ? - Transitions (supprimer sur mobile) - Vérifier couleurs quand fond noir/fond blanc (normalement ok déja fait) - (éventuellement) Message pour annoncer que la version mobile est réduite ? ### Popup sur mobile - Recréer la div (supprimer et recréer) ou la remettre à zéro, quand on a scrollé et qu'on reouvre un popup, le scroll est déja en bas ### Timeline - Evenement début de l'année envoie l'année d'avant ! - Hightlight pas en bleu pour les événements au début - Enlever drag drop sur mobile - Ajouter dynamiquement une variable début fin pour la durée de la timeline. - Gérer les événements qui sont hors du temps. - Checker deux fois le reflow - evenement se highlight pas ### Contenue texte / Page-about / information / evenement timeline - Récrire le texte de la page about avec Eryk. - Remplacer les logos sur la page about par les vectoriel final. - Checker toutes les descriptions des projets d'étudiants - Checker toutes les informations dans les timeline. - Checker la liste des facts. ### Adapt student project #### worldwidemap - Projet ne s'affiche plus ? #### web landscape - Plus d'immeuble à la fin de la ville ! - Description/source #### web influencer identity - loading des images ? Image qui saute avant de s'afficher sur mobile. Est-ce que sur mobile on garde que la basse def ? - Améliorer la composition de la fin #### web dictionnary - Choix des sources, texte description #### 30y_of_hacks.exe - Description #### web phenomena - vérifier la liste des popup - Description ### HTACCESS - Autoriser la page /proxy.php dans le .htaccess
e07e9d355cb22ab7ad773512e3f9144135253fa6
[ "Markdown", "PHP" ]
2
PHP
lsdebianchi/infomesh
e42df0d021239399a46102fb49390f59343cd672
6f7aff36a97adc27c22e739ff93788b66f67bd42
refs/heads/master
<file_sep> var Engine = Matter.Engine, World = Matter.World, Bodies = Matter.Bodies; var engine; var world; var particles = []; var p; var plinkos = []; var cols = 11; var rows = 10; function setup() { var canvas = createCanvas(600, 800); engine = Engine.create(); world = engine.world; newParticles(); var spacing = width / cols; for (var i = 0; i < rows; i++) { for (var j = 0; j < cols; j++) { var x = i* spacing; if (j % 2 == 0) { x += spacing/2 } var y = spacing + j*spacing; var p = new Plinko(x, y, 4); plinkos.push(p) } } console.log(plinkos) } function newParticles(){ var p = new Particle(300, 0, 10) particles.push(p); } function draw() { if(frameCount % 60 == 0){ newParticles(); } background(51); Engine.update(engine); for (var i = 0; i < particles.length; i++) { particles[i].show(); } for (var i = 0; i < plinkos.length; i++) { plinkos[i].show(); } }
647e722d56666afbbaae249f3838ffff44b2af69
[ "JavaScript" ]
1
JavaScript
Anay2805/Plinko-Part-1
0acaeadeb236d634ab28dd5eaeb06c16820f09d3
b0f4e22aaf32303551fc95c6cfd7350db6800741
refs/heads/master
<file_sep>module.exports = { siteTitle: 'Curriculum Vitae', siteDescription: `Curriculum Vitae`, keyWords: ['devops', 'react', 'nodejs', 'elasticsearch'], authorName: '<NAME>', twitterUsername: 'MikePolinowski', githubUsername: 'mpolinowski', authorAvatar: '/assets/images/angular_momentum.png', authorDescription: `As a Ms.Sc. in <strong>for Chemistry & Physics</strong> I started out working in the field of environmental analysis as a laboratory technician. <br /> <br />   The implied experience in PC administration, networking, <strong>web development</strong> and data analysis lead to my first job abroad in the development department of a network surveillance company. <br /> <br />   I am currently working as a <strong>LINUX administrator</strong> and <strong>IoT</strong> product development for MQTT sensor networks.`, skills: [ { name: 'Laboratory Technician', level: 80 }, { name: 'DevOps', level: 60 }, { name: 'Web Development', level: 65 }, { name: 'Product Development', level: 40 }, { name: 'Customer Service', level: 60 }, { name: 'Quality Assurance', level: 40 }, { name: 'Print Design', level: 40 } ], jobs: [ { company: "Waletech (INSTAR) Shenzhen, China", begin: { month: 'Mar', year: '2015' }, duration: 'present', occupation: "Chief Technology Officer", description: "Expanding the target for INSTAR products to the smarthome sector by the integration IoT standards." }, { company: "INSTAR Deutschland GmbH Guangzhou, China", begin: { month: 'Apr', year: '2012' }, duration: '3 yrs', occupation: "General Manager", description: "Leading a team in the development and maintenance of a customer online knowledge base (Wiki), based Node.js and React.js on a CentOS and Elasticsearch backend. Integration of this online help platform into all in-house software products. As well as setting up internal training programs for the customer service team. New product development and quality assurance in cooperation with the production line." }, { company: "INSTAR Deutschland GmbH Guangzhou, China", begin: { month: 'Mar', year: '2011' }, duration: '1 yrs', occupation: "Support Technician", description: "First and second tier technical support for end customers and on-site quality assurance for the production cycle. Creation of print and online documentation for customers and trainings material for new employees. Enabling the system integration into third-party software and hardware products." }, { company: "University Cologne, Germany", begin: { month: 'Oct', year: '2004' }, duration: '5 yrs', occupation: "Laboratory Technician", description: "Mass spectrometry and Terahertz spectroscopy for the detection of iron compounds in high energy and low pressure plasmas (Ms.Sc.) for the department of physics and astro science. Combined with a teaching position for laboratory courses for the department of inorganic chemistry." }, { company: "University Wuppertal, Germany", begin: { month: 'Mar', year: '2002' }, duration: '4 semester', occupation: "Scientific Assistant", description: "Development of new spectroscopic methods in atmospheric analysis for the department of physical chemistry." }, { company: "University Wuppertal, Germany", begin: { month: 'Apr', year: '2001' }, duration: '2 semester', occupation: "Scientific Assistant", description: "Teaching position for laboratory courses in inorganic chemistry and instrumental analysis." }, /* ... */ ], social: { twitter: "https://twitter.com/MikePolinowski", linkedin: "https://www.linkedin.com/in/mike-polinowski-6396ba121/", github: "https://github.com/mpolinowski", email: "<EMAIL>" }, siteUrl: 'https://mpolinowski.github.io/curriculum-vitae/', pathPrefix: '/curriculum-vitae', // Note: it must *not* have a trailing slash. siteCover: '/assets/images/worldmap.png', background_color: '#24292e', theme_color: '#1a8f6e', display: 'minimal-ui', icon: 'src/assets/gatsby-icon.png', headerLinks: [ { label: '<NAME>', url: '/', } ] }<file_sep>--- date: "2019-06-17" title: "Working with TMUX" categories: - LINUX --- ![<NAME>, H<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Installing tmux](#installing-tmux) - [Shell Splitting](#shell-splitting) - [Working with Windows](#working-with-windows) - [Detachment](#detachment) <!-- /TOC --> [tmux](http://tmux.sourceforge.net/) is a terminal multiplexer. It allows you to access a tmux terminal using multiple virtual terminals. tmux takes advantage of a client-server model, which allows you to attach terminals to a tmux session. This means that you can run several terminals at once concurrently off of a single tmux session without spawning any new actual terminal sessions. This also means that sudden disconnects from a cloud server running tmux will not kill the processes running inside the tmux session. ## Installing tmux Before installing tmux first update apt to ensure we have the latest packages. Then install tmux: ```bash sudo apt-get update sudo apt-get install tmux ``` ## Shell Splitting To begin a new tmux session: ```bash tmux ``` As it is, you can treat this like a completely new terminal. In fact, it really is just a new terminal running from inside tmux. This new terminal is called a window in the context of tmux. Let's split our window into two panes. tmux can be controlled using a prefix key (by default, Ctrl-b) and a command key. The command key to split into two panes is `%`. From within tmux. We can also split our second pane horizontally with `"`: ``` Ctrl-b % Ctrl-b " ``` To switch to the next pane (panes are numbered left-to-right, top-down): ``` Ctrl-b o ``` Exiting out of a shell with `exit` will destroy the associated pane. Adjacent panes will expand to make use of the freed space. Closing the last pane will end tmux. ![TMUX](./TMUX_01.png) ## Working with Windows A step in context above panes are windows. Windows behave similarly to tabs in a browser. To create a new window: ``` Ctrl-b c ``` tmux will switch to the new window automatically. You can see the new window indicated in the status-line. Windows are numbered from 0, so our new window is number 1. Now you can create panes and treat this window like we did before. We can even create another window. Our three windows are numbered 0, 1, and 2. To move to the next window in the index: ``` Ctrl-b n ``` To move backwards in the index: ``` Ctrl-b p ``` ## Detachment When you exit the last shell in the last pane in the last window of a session, the session will terminate. Instead of exiting from the session entirely, you can _detach_ from the session. A session will continue to run in the background, running whatever shells were on it when you detached. When you re-attach to the session later, you will return to the session in the same state it was when you detached. All windows and panes are retained by the session. To detach from a session: ```bash Ctrl-b d ``` To re-attach to a session: ```bash tmux attach -t [session] ``` Sessions, like windows, are numbered beginning from 0. If you forgot which session you wanted to attach to, you can view active sessions: ```bash tmux list-sessions ``` A list of all command keys is accessible with: ```bash Ctrl-b ? ```<file_sep>--- date: "2017-08-21" title: "create-react-app and Material-UI" categories: - Javascript - React - Material-UI --- ![Harbin, China](./photo-33682907794_96afc3900c_o.png) <!-- TOC --> - [Table of Content](#table-of-content) - [01 Installing Material-UI](#01-installing-material-ui) - [02 Using Material-UI Components](#02-using-material-ui-components) - [03 Final Design](#03-final-design) <!-- /TOC --> This is a practice run to add some [Material Design](https://material.io) to a react app, generated by the [create-react-app](https://github.com/facebookincubator/create-react-app) starter kit. So let's get started! ``` npm install -g create-react-app create-react-app reactive-material cd reactive-material npm start ``` Then open http://localhost:3000/ to see your app. You don’t need to install or configure tools like Webpack or Babel. They are preconfigured and hidden so that you can focus on the code. Just create a project, and you’re good to go. ### Table of Content - [01 Installing Material-UI](#01-installing-material-ui) - [02 Using Material-UI Components](#02-using-material-ui-components) - [03 Final Design](#03-final-design) ## 01 Installing Material-UI Now that we made sure that our React App is up-and-running, let's *CTRL+C* the process and follow the [Installation Instructions](https://material-ui-1dab0.firebaseapp.com/getting-started/installation/) for Material-UI (v.1.0.0 beta): To install and save in your package.json dependencies, run: ``` npm install material-ui@next --save ``` Next, [Typography](https://material-ui-1dab0.firebaseapp.com/style/typography/#general): The Roboto font, that Material-UI is optimized for, can be installed by: ``` npm install typeface-roboto --save ``` Remember, the Roboto font will not be automatically loaded by Material-UI - but need to be imported to the entrypoint of your app: *import 'typeface-roboto'* ! Next, [Material Design Icons](http://google.github.io/material-design-icons/#icon-font-for-the-web). Install the icons using npm package manager: ``` npm install material-design-icons --save ``` And last but not least - In order to use prebuilt SVG Material icons, such as those found in the [component demos](https://material-ui-1dab0.firebaseapp.com/component-demos) you have to install the material-ui-icons package: ``` npm install material-ui-icons --save ``` Now that you downloaded the more interesting part of the internet, let's start with React! ## 02 Using Material-UI Components The basic usage is simple: e.g. adding a [ButtonAppBar](https://material-ui-1dab0.firebaseapp.com/demos/app-bar/#app-bar-with-buttons) - create a JavaScript file */src/ButtonAppBar.js* that is named after the component that you want to use. Copy the code from the Material-UI page and fine tune the CSS and JSX, where needed: ```js import React from 'react'; import PropTypes from 'prop-types'; import { withStyles } from 'material-ui/styles'; import AppBar from 'material-ui/AppBar'; import Toolbar from 'material-ui/Toolbar'; import Typography from 'material-ui/Typography'; import Button from 'material-ui/Button'; import IconButton from 'material-ui/IconButton'; import MenuIcon from 'material-ui-icons/Menu'; const styles = theme => ({ root: { width: '100%', position: 'fixed', top: 0, left: 0, zIndex: 10, }, flex: { flex: 1, }, menuButton: { marginLeft: -12, marginRight: 20, }, }); function ButtonAppBar(props) { const classes = props.classes; return ( <div className={classes.root}> <AppBar position="static"> <Toolbar> <IconButton className={classes.menuButton} color="contrast" aria-label="Menu"> <MenuIcon /> </IconButton> <Typography type="title" color="inherit" className={classes.flex}> Reactive Material </Typography> <Button color="contrast">Material Button</Button> </Toolbar> </AppBar> </div> ); } ButtonAppBar.propTypes = { classes: PropTypes.object.isRequired, }; export default withStyles(styles)(ButtonAppBar); ``` Now go to */src/App.js*, import your component and use it inside the JSX code: ```js import React, { Component } from 'react'; import styles from './app.css'; import 'typeface-roboto'; import ButtonAppBar from './ButtonAppBar' class App extends Component { render() { return ( <div> <header className={styles.header}> <ButtonAppBar /> </header> </div> ); } } export default App; ``` Restart your app with *npm start* and reload *localhost:3000* - voila' : ![](./01_mui.png) As you can see from the examples, we added a few custom styles to our app. /src/index.js imports the complete /src/index.css file scoped globally. While our /src/app.js only imports app specific styles *import styles from /app.css, that are available with the className attribute /header className={styles.header}/. You can use locally scoped [CSS Modules](https://github.com/css-modules/css-modules) for each component that you add to your app! ## 03 Final Design This is not yet a production ready design - but it will be a good starting point for future projects. I ended up removing the ButtonAppBar and replaced it with [Responsive Drawer](https://material-ui-1dab0.firebaseapp.com/demos/drawers/#responsive-drawer) component. It gave me a little bit of an headache that the Navbar, that is included here, was *not sticky*. But everything seems to work now - beside some necessary cleanUp inside the JSX. All other components are now nested inside the Drawer component, that is then nested inside the main app component: ![](./02_mui.png) Would you use create-react-app and Material-UI in a future project? **Absolutely!**<file_sep># frozen_string_literal: true require 'mysql2' require File.expand_path(File.dirname(__FILE__) + "/base.rb") require 'htmlentities' class ImportScripts::VBulletin < ImportScripts::Base BATCH_SIZE = 1000 DBPREFIX = "" ROOT_NODE = 2 # CHANGE THESE BEFORE RUNNING THE IMPORTER DATABASE = "vb5" TIMEZONE = "Europe/Berlin" ATTACHMENT_DIR = '/vb5-attachments/' AVATAR_DIR = '/vb5-avatars/' def initialize super @old_username_to_new_usernames = {} @tz = TZInfo::Timezone.get(TIMEZONE) @htmlentities = HTMLEntities.new @client = Mysql2::Client.new( host: "localhost", username: "root", database: DATABASE, password: "" ) end def execute import_groups import_users import_categories import_topics import_posts import_attachments close_topics post_process_posts end def import_groups puts "", "importing groups..." groups = mysql_query <<-SQL SELECT usergroupid, title FROM #{DBPREFIX}usergroup ORDER BY usergroupid SQL create_groups(groups) do |group| { id: group["usergroupid"], name: @htmlentities.decode(group["title"]).strip } end end def import_users puts "", "importing users" user_count = mysql_query("SELECT COUNT(userid) count FROM #{DBPREFIX}user").first["count"] batches(BATCH_SIZE) do |offset| users = mysql_query <<-SQL SELECT u.userid, u.username, u.homepage, u.usertitle, u.usergroupid, u.joindate, u.email, CASE WHEN u.scheme='blowfish:10' THEN token WHEN u.scheme='legacy' THEN REPLACE(token, ' ', ':') END AS password, IF(ug.title = 'Administrators', 1, 0) AS admin FROM #{DBPREFIX}user u LEFT JOIN #{DBPREFIX}usergroup ug ON ug.usergroupid = u.usergroupid ORDER BY userid LIMIT #{BATCH_SIZE} OFFSET #{offset} SQL break if users.size < 1 # disabled line below, caused issues # next if all_records_exist? :users, users.map {|u| u["userid"].to_i} create_users(users, total: user_count, offset: offset) do |user| username = @htmlentities.decode(user["username"]).strip { id: user["userid"], name: username, username: username, email: user["email"].presence || fake_email, admin: user['admin'] == 1, password: user["<PASSWORD>"], website: user["homepage"].strip, title: @htmlentities.decode(user["usertitle"]).strip, primary_group_id: group_id_from_imported_group_id(user["usergroupid"]), created_at: parse_timestamp(user["joindate"]) # post_create_action: proc do |u| # @old_username_to_new_usernames[user["username"]] = u.username # import_profile_picture(user, u) # import_profile_background(user, u) # end } end end end def import_profile_picture(old_user, imported_user) query = mysql_query <<-SQL SELECT filedata, filename FROM #{DBPREFIX}customavatar WHERE userid = #{old_user["userid"]} ORDER BY dateline DESC LIMIT 1 SQL picture = query.first return if picture.nil? if picture['filedata'] file = Tempfile.new("profile-picture") file.write(picture["filedata"].encode("ASCII-8BIT").force_encoding("UTF-8")) file.rewind upload = UploadCreator.new(file, picture["filename"]).create_for(imported_user.id) else filename = File.join(AVATAR_DIR, picture['filename']) unless File.exists?(filename) puts "Avatar file doesn't exist: #{filename}" return nil end upload = create_upload(imported_user.id, filename, picture['filename']) end return if !upload.persisted? imported_user.create_user_avatar imported_user.user_avatar.update(custom_upload_id: upload.id) imported_user.update(uploaded_avatar_id: upload.id) ensure file.close rescue nil file.unlind rescue nil end def import_profile_background(old_user, imported_user) query = mysql_query <<-SQL SELECT filedata, filename FROM #{DBPREFIX}customprofilepic WHERE userid = #{old_user["userid"]} ORDER BY dateline DESC LIMIT 1 SQL background = query.first return if background.nil? file = Tempfile.new("profile-background") file.write(background["filedata"].encode("ASCII-8BIT").force_encoding("UTF-8")) file.rewind upload = UploadCreator.new(file, background["filename"]).create_for(imported_user.id) return if !upload.persisted? imported_user.user_profile.upload_profile_background(upload) ensure file.close rescue nil file.unlink rescue nil end def import_categories puts "", "importing top level categories..." categories = mysql_query("SELECT nodeid AS forumid, title, description, displayorder, parentid FROM #{DBPREFIX}node WHERE parentid=#{ROOT_NODE} UNION SELECT nodeid, title, description, displayorder, parentid FROM #{DBPREFIX}node WHERE contenttypeid = 23 AND parentid IN (SELECT nodeid FROM #{DBPREFIX}node WHERE parentid=#{ROOT_NODE})").to_a top_level_categories = categories.select { |c| c["parentid"] == ROOT_NODE } create_categories(top_level_categories) do |category| { id: category["forumid"], name: @htmlentities.decode(category["title"]).strip, position: category["displayorder"], description: @htmlentities.decode(category["description"]).strip } end puts "", "importing child categories..." children_categories = categories.select { |c| c["parentid"] != ROOT_NODE } top_level_category_ids = Set.new(top_level_categories.map { |c| c["forumid"] }) # cut down the tree to only 2 levels of categories children_categories.each do |cc| while !top_level_category_ids.include?(cc["parentid"]) cc["parentid"] = categories.detect { |c| c["forumid"] == cc["parentid"] }["parentid"] end end create_categories(children_categories) do |category| { id: category["forumid"], name: @htmlentities.decode(category["title"]).strip, position: category["displayorder"], description: @htmlentities.decode(category["description"]).strip, parent_category_id: category_id_from_imported_category_id(category["parentid"]) } end end def import_topics puts "", "importing topics..." # keep track of closed topics @closed_topic_ids = [] topic_count = mysql_query("select count(nodeid) cnt from #{DBPREFIX}node where parentid in ( select nodeid from #{DBPREFIX}node where contenttypeid=23 ) and contenttypeid=22;").first["cnt"] batches(BATCH_SIZE) do |offset| topics = mysql_query <<-SQL SELECT t.nodeid AS threadid, t.title, t.parentid AS forumid,t.open,t.userid AS postuserid,t.publishdate AS dateline, nv.count views, 1 AS visible, t.sticky, CONVERT(CAST(rawtext AS BINARY)USING utf8) AS raw FROM #{DBPREFIX}node t LEFT JOIN #{DBPREFIX}nodeview nv ON nv.nodeid=t.nodeid LEFT JOIN #{DBPREFIX}text txt ON txt.nodeid=t.nodeid WHERE t.parentid in ( select nodeid from #{DBPREFIX}node where contenttypeid=23 ) AND t.contenttypeid = 22 ORDER BY t.nodeid LIMIT #{BATCH_SIZE} OFFSET #{offset} SQL break if topics.size < 1 # disabled line below, caused issues # next if all_records_exist? :posts, topics.map {|t| "thread-#{topic["threadid"]}" } create_posts(topics, total: topic_count, offset: offset) do |topic| raw = preprocess_post_raw(topic["raw"]) rescue nil next if raw.blank? topic_id = "thread-#{topic["threadid"]}" @closed_topic_ids << topic_id if topic["open"] == "0" t = { id: topic_id, user_id: user_id_from_imported_user_id(topic["postuserid"]) || Discourse::SYSTEM_USER_ID, title: @htmlentities.decode(topic["title"]).strip[0...255], category: category_id_from_imported_category_id(topic["forumid"]), raw: raw, created_at: parse_timestamp(topic["dateline"]), visible: topic["visible"].to_i == 1, views: topic["views"], } t[:pinned_at] = t[:created_at] if topic["sticky"].to_i == 1 t end end end def import_posts puts "", "importing posts..." # make sure `firstpostid` is indexed begin mysql_query("CREATE INDEX firstpostid_index ON thread (firstpostid)") rescue end post_count = mysql_query("SELECT COUNT(nodeid) cnt FROM #{DBPREFIX}node WHERE parentid NOT IN ( SELECT nodeid FROM #{DBPREFIX}node WHERE contenttypeid=23 ) AND contenttypeid=22;").first["cnt"] batches(BATCH_SIZE) do |offset| posts = mysql_query <<-SQL SELECT p.nodeid AS postid, p.userid AS userid, p.parentid AS threadid, CONVERT(CAST(rawtext AS BINARY)USING utf8) AS raw, p.publishdate AS dateline, 1 AS visible, p.parentid AS parentid FROM #{DBPREFIX}node p LEFT JOIN #{DBPREFIX}nodeview nv ON nv.nodeid=p.nodeid LEFT JOIN #{DBPREFIX}text txt ON txt.nodeid=p.nodeid WHERE p.parentid NOT IN ( select nodeid from #{DBPREFIX}node where contenttypeid=23 ) AND p.contenttypeid = 22 ORDER BY postid LIMIT #{BATCH_SIZE} OFFSET #{offset} SQL break if posts.size < 1 # disabled line below, caused issues # next if all_records_exist? :posts, posts.map {|p| p["postid"] } create_posts(posts, total: post_count, offset: offset) do |post| raw = preprocess_post_raw(post["raw"]) next if raw.blank? next unless topic = topic_lookup_from_imported_post_id("thread-#{post["threadid"]}") p = { id: post["postid"], user_id: user_id_from_imported_user_id(post["userid"]) || Discourse::SYSTEM_USER_ID, topic_id: topic[:topic_id], raw: raw, created_at: parse_timestamp(post["dateline"]), hidden: post["visible"].to_i == 0, } if parent = topic_lookup_from_imported_post_id(post["parentid"]) p[:reply_to_post_number] = parent[:post_number] end p end end end # find the uploaded file information from the db def find_upload(post, attachment_id) sql = "SELECT a.filedataid, a.filename, fd.userid, LENGTH(fd.filedata) AS dbsize, filedata FROM #{DBPREFIX}attach a LEFT JOIN #{DBPREFIX}filedata fd ON fd.filedataid = a.filedataid WHERE a.nodeid = #{attachment_id}" results = mysql_query(sql) unless (row = results.first) puts "Couldn't find attachment record for post.id = #{post.id}, import_id = #{post.custom_fields['import_id']}" return nil end filename = File.join(ATTACHMENT_DIR, row['userid'].to_s.split('').join('/'), "#{row['filedataid']}.attach") real_filename = row['filename'] real_filename.prepend SecureRandom.hex if real_filename[0] == '.' unless File.exists?(filename) if row['dbsize'].to_i == 0 puts "Attachment file #{row['filedataid']} doesn't exist" return nil end tmpfile = 'attach_' + row['filedataid'].to_s filename = File.join('/tmp/', tmpfile) File.open(filename, 'wb') { |f| #f.write(PG::Connection.unescape_bytea(row['filedata'])) f.write(row['filedata']) } end upload = create_upload(post.user.id, filename, real_filename) if upload.nil? || !upload.valid? puts "Upload not valid :(" puts upload.errors.inspect if upload return nil end return upload, real_filename rescue Mysql2::Error => e puts "SQL Error" puts e.message puts sql return nil end def import_attachments puts '', 'importing attachments...' current_count = 0 total_count = mysql_query("SELECT COUNT(nodeid) cnt FROM #{DBPREFIX}node WHERE contenttypeid=22 ").first["cnt"] success_count = 0 fail_count = 0 attachment_regex = /\[attach[^\]]*\]n(\d+)\[\/attach\]/i Post.find_each do |post| current_count += 1 print_status current_count, total_count new_raw = post.raw.dup new_raw.gsub!(attachment_regex) do |s| matches = attachment_regex.match(s) attachment_id = matches[1] upload, filename = find_upload(post, attachment_id) unless upload fail_count += 1 next end html_for_upload(upload, filename) end if new_raw != post.raw PostRevisor.new(post).revise!(post.user, { raw: new_raw }, bypass_bump: true, edit_reason: 'Import attachments from vBulletin') end success_count += 1 end end def close_topics puts "", "Closing topics..." sql = <<-SQL WITH closed_topic_ids AS ( SELECT t.id AS topic_id FROM post_custom_fields pcf JOIN posts p ON p.id = pcf.post_id JOIN topics t ON t.id = p.topic_id WHERE pcf.name = 'import_id' AND pcf.value IN (?) ) UPDATE topics SET closed = true WHERE id IN (SELECT topic_id FROM closed_topic_ids) SQL DB.exec(sql, @closed_topic_ids) end def post_process_posts puts "", "Postprocessing posts..." current = 0 max = Post.count Post.find_each do |post| begin new_raw = postprocess_post_raw(post.raw) if new_raw != post.raw post.raw = new_raw post.save end rescue PrettyText::JavaScriptError nil ensure print_status(current += 1, max) end end end def preprocess_post_raw(raw) return "" if raw.blank? # decode HTML entities raw = @htmlentities.decode(raw) # fix whitespaces raw = raw.gsub(/(\\r)?\\n/, "\n") .gsub("\\t", "\t") # [HTML]...[/HTML] raw = raw.gsub(/\[html\]/i, "\n```html\n") .gsub(/\[\/html\]/i, "\n```\n") # [PHP]...[/PHP] raw = raw.gsub(/\[php\]/i, "\n```php\n") .gsub(/\[\/php\]/i, "\n```\n") # [HIGHLIGHT="..."] raw = raw.gsub(/\[highlight="?(\w+)"?\]/i) { "\n```#{$1.downcase}\n" } # [CODE]...[/CODE] # [HIGHLIGHT]...[/HIGHLIGHT] raw = raw.gsub(/\[\/?code\]/i, "\n```\n") .gsub(/\[\/?highlight\]/i, "\n```\n") # [SAMP]...[/SAMP] raw = raw.gsub(/\[\/?samp\]/i, "`") # replace all chevrons with HTML entities # NOTE: must be done # - AFTER all the "code" processing # - BEFORE the "quote" processing raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub("<", "\u2603") + "`" } .gsub("<", "&lt;") .gsub("\u2603", "<") raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub(">", "\u2603") + "`" } .gsub(">", "&gt;") .gsub("\u2603", ">") # [URL=...]...[/URL] raw.gsub!(/\[url="?(.+?)"?\](.+?)\[\/url\]/i) { "<a href=\"#{$1}\">#{$2}</a>" } # [URL]...[/URL] # [MP3]...[/MP3] raw = raw.gsub(/\[\/?url\]/i, "") .gsub(/\[\/?mp3\]/i, "") # [MENTION]<username>[/MENTION] raw = raw.gsub(/\[mention\](.+?)\[\/mention\]/i) do old_username = $1 if @old_username_to_new_usernames.has_key?(old_username) old_username = @old_username_to_new_usernames[old_username] end "@#{old_username}" end # [USER=<user_id>]<username>[/USER] raw = raw.gsub(/\[user="?(\d+)"?\](.+?)\[\/user\]/i) do user_id, old_username = $1, $2 if @old_username_to_new_usernames.has_key?(old_username) new_username = @old_username_to_new_usernames[old_username] else new_username = old_username end "@#{new_username}" end # [FONT=blah] and [COLOR=blah] # no idea why the /i is not matching case insensitive.. raw.gsub! /\[color=.*?\](.*?)\[\/color\]/im, '\1' raw.gsub! /\[COLOR=.*?\](.*?)\[\/COLOR\]/im, '\1' raw.gsub! /\[font=.*?\](.*?)\[\/font\]/im, '\1' raw.gsub! /\[FONT=.*?\](.*?)\[\/FONT\]/im, '\1' # [CENTER]...[/CENTER] raw.gsub! /\[CENTER\](.*?)\[\/CENTER\]/im, '\1' # fix LIST raw.gsub! /\[LIST\](.*?)\[\/LIST\]/im, '<ul>\1</ul>' raw.gsub! /\[\*\]/im, '<li>' # [QUOTE]...[/QUOTE] raw = raw.gsub(/\[quote\](.+?)\[\/quote\]/im) { "\n> #{$1}\n" } # [QUOTE=<username>]...[/QUOTE] raw = raw.gsub(/\[quote=([^;\]]+)\](.+?)\[\/quote\]/im) do old_username, quote = $1, $2 if @old_username_to_new_usernames.has_key?(old_username) old_username = @old_username_to_new_usernames[old_username] end "\n[quote=\"#{old_username}\"]\n#{quote}\n[/quote]\n" end # [YOUTUBE]<id>[/YOUTUBE] raw = raw.gsub(/\[youtube\](.+?)\[\/youtube\]/i) { "\n//youtu.be/#{$1}\n" } # [VIDEO=youtube;<id>]...[/VIDEO] raw = raw.gsub(/\[video=youtube;([^\]]+)\].*?\[\/video\]/i) { "\n//youtu.be/#{$1}\n" } raw end def postprocess_post_raw(raw) # [QUOTE=<username>;<post_id>]...[/QUOTE] raw = raw.gsub(/\[quote=([^;]+);n(\d+)\](.+?)\[\/quote\]/im) do old_username, post_id, quote = $1, $2, $3 if @old_username_to_new_usernames.has_key?(old_username) old_username = @old_username_to_new_usernames[old_username] end if topic_lookup = topic_lookup_from_imported_post_id(post_id) post_number = topic_lookup[:post_number] topic_id = topic_lookup[:topic_id] "\n[quote=\"#{old_username},post:#{post_number},topic:#{topic_id}\"]\n#{quote}\n[/quote]\n" else "\n[quote=\"#{old_username}\"]\n#{quote}\n[/quote]\n" end end # remove attachments raw = raw.gsub(/\[attach[^\]]*\]\d+\[\/attach\]/i, "") # [THREAD]<thread_id>[/THREAD] # ==> http://my.discourse.org/t/slug/<topic_id> raw = raw.gsub(/\[thread\](\d+)\[\/thread\]/i) do thread_id = $1 if topic_lookup = topic_lookup_from_imported_post_id("thread-#{thread_id}") topic_lookup[:url] else $& end end # [THREAD=<thread_id>]...[/THREAD] # ==> [...](http://my.discourse.org/t/slug/<topic_id>) raw = raw.gsub(/\[thread=(\d+)\](.+?)\[\/thread\]/i) do thread_id, link = $1, $2 if topic_lookup = topic_lookup_from_imported_post_id("thread-#{thread_id}") url = topic_lookup[:url] "[#{link}](#{url})" else $& end end # [POST]<post_id>[/POST] # ==> http://my.discourse.org/t/slug/<topic_id>/<post_number> raw = raw.gsub(/\[post\](\d+)\[\/post\]/i) do post_id = $1 if topic_lookup = topic_lookup_from_imported_post_id(post_id) topic_lookup[:url] else $& end end # [POST=<post_id>]...[/POST] # ==> [...](http://my.discourse.org/t/<topic_slug>/<topic_id>/<post_number>) raw = raw.gsub(/\[post=(\d+)\](.+?)\[\/post\]/i) do post_id, link = $1, $2 if topic_lookup = topic_lookup_from_imported_post_id(post_id) url = topic_lookup[:url] "[#{link}](#{url})" else $& end end raw end def parse_timestamp(timestamp) Time.zone.at(@tz.utc_to_local(timestamp)) end def mysql_query(sql) @client.query(sql, cache_rows: false) end end ImportScripts::VBulletin.new.perform<file_sep>--- date: "2019-01-05" title: "Zigbee Sensors in FHEM" categories: - LINUX - IoT - MQTT - Smarthome --- import GifContainer from "../../src/components/ImageContainer"; ![Hongkong](./photo-19194765263_69ad0f_o.png) <!-- TOC --> - [ZigBee Communication without a proprietary Hub](#zigbee-communication-without-a-proprietary-hub) - [Setting up the bridge](#setting-up-the-bridge) - [Running the bridge](#running-the-bridge) - [Determine location of CC2531 USB sniffer and checking user permissions](#determine-location-of-cc2531-usb-sniffer-and-checking-user-permissions) - [Installation](#installation) - [Configuration](#configuration) - [Running ZigBee2MQTT](#running-zigbee2mqtt) - [Running ZigBee2MQTT as a Daemon](#running-zigbee2mqtt-as-a-daemon) - [Update zigbee2mqtt to the latest version](#update-zigbee2mqtt-to-the-latest-version) - [Pairing Devices](#pairing-devices) - [Adding our Sensors to FHEM](#adding-our-sensors-to-fhem) - [Adding SQL Logging](#adding-sql-logging) - [Installing MariaDB on our Raspberry Pi](#installing-mariadb-on-our-raspberry-pi) - [Configuring MariaDB for FHEM](#configuring-mariadb-for-fhem) - [Log Filter](#log-filter) - [Average / Reduce](#average--reduce) - [Compare with Data from Web-APIs](#compare-with-data-from-web-apis) - [Visualize Logs](#visualize-logs) - [Add the MQTT input to an existing Flow](#add-the-mqtt-input-to-an-existing-flow) <!-- /TOC --> ## ZigBee Communication without a proprietary Hub Zigbee is a specification for a suite of high-level communication protocols used to create personal area networks with small, low-power digital radios, such as for home automation, medical device data collection, and other low-power low-bandwidth needs, designed for small scale projects which need wireless connection. Hence, Zigbee is a low-power, low data rate, and close proximity wireless ad hoc network. The [Zigbee2MQTT](https://github.com/Koenkk/zigbee2mqtt) bridge enables you to use selected [supported ZigBee devices](https://www.zigbee2mqtt.io/information/supported_devices.html) __without the otherwise mandatory Hub and Vendor Cloud__ connection. Among the devices that you can control are smart (ZigBee) light bulbs from Belkin, IKEA, OSRAM and Philips as well as smart sensors from SmartThings and Xiaomi. The ZigBee bridge consists of a __ZigBee Sniffer Module__ and a controlling software written in __Node.js__. It bridges events and allows you to control your Zigbee devices via __MQTT__. In this way you can integrate your Zigbee devices with whatever smart home infrastructure you are using. [Needed Hardware](https://www.zigbee2mqtt.io/getting_started/what_do_i_need.html): * CC debugger * CC2531 USB sniffer * Downloader cable CC2531 * PC or Raspberry Pi to run bridge The Debugger, Sniffer and connecting cable can be ordered directly through [online shops like AliExpress](https://community.home-assistant.io/t/zigbee2mqtt-getting-rid-of-your-proprietary-zigbee-bridges-xiaomi-hue-tradfri/52108/193). The debugger and cable are only used once to flash the firmware onto the stick. There is alternative way to do that [using an Arduino board instead](https://www.zigbee2mqtt.io/information/alternative_flashing_methods.html). ### Setting up the bridge 1. Install [SmartRF Flash programmer](http://www.ti.com/tool/FLASH-PROGRAMMER) (__NOT V2__). This software is free but requires a Texas Instruments account in order to download. --- ![Zigbee2MQTT](./flash_00.png) --- 2. Install the [CC debugger driver](http://www.ti.com/general/docs/lit/getliterature.tsp?baseLiteratureNumber=swrc212&fileType=zip) on your PC (Windows only). Before continuing, verify that the CC Debugger driver has been installed correctly. In case the CC Debugger is not recognized correctly [install the driver manually](https://www.youtube.com/watch?v=jyKrxxXOvQY). --- ![Zigbee2MQTT](./driver_00.png) --- 3. Connect `CC debugger` --> `Downloader cable CC2531` --> `CC2531 USB sniffer`. --- ![Zigbee2MQTT](./ccdebugger_00.png) --- 4. Connect __BOTH__ the `CC2531 USB sniffer` and the `CC debugger` to your PC using USB. 5. If the light on the CC debugger is RED press set reset button on the CC debugger. The light on the CC debugger should now turn GREEN. If not use [CC debugger user guide](http://www.ti.com/lit/ug/swru197h/swru197h.pdf) to troubleshoot your problem. 6. Download and unzip the firmware [CC2531ZNP-Prod.hex](https://github.com/Koenkk/Z-Stack-firmware/tree/master/coordinator/default/CC2531) 7. Start SmartRF Flash Programmer, setup as shown below and press `Perform actions`. --- ![Zigbee2MQTT](./flash_01.png) ![Zigbee2MQTT](./flash_02.png) --- ### Running the bridge After flashing your CC2531 USB sniffer with CC2531ZNP-Prod.hex firmware we can remove the CC Debugger and connecting cable and plug the Sniffer into our Linux computer you can use a __Raspberry Pi 3__ with __Raspbian Stretch Lite__, but will work on any Linux machine. #### Determine location of CC2531 USB sniffer and checking user permissions We first need to determine the location of the CC2531 USB sniffer. Connect the CC2531 USB to your Raspberry Pi. Most of the times the location of the CC2531 is `/dev/ttyACM0`. This can be verified by: ```bash ls -l /dev/ttyACM0 crw-rw---- 1 root dialout 166, 0 Apr 21 13:14 /dev/ttyACM0 # <-- CC2531 on /dev/ttyACM0 ``` --- ![Zigbee2MQTT](./zigbee2mqtt_01.png) --- As an alternative, the device can also be mapped by an ID. This can be handy if you have multiple serial devices connected to your pi. In the example below the device location is: `/dev/serial/by-id/usb-Texas_Instruments_TI_CC2531_USB_CDC___0X00124B0018E32D5B-if00` ```bash ls -l /dev/serial/by-id total 0 lrwxrwxrwx 1 root root 13 Apr 21 13:14 usb-Texas_Instruments_TI_CC2531_USB_CDC___0X00124B0018ED113B-if00 -> ../../ttyACM0 ``` --- ![Zigbee2MQTT](./zigbee2mqtt_02.png) --- #### Installation To use ZigBee2MQTT we first need to install Node.js and Git: ```bash sudo apt update sudo apt install git ``` You can confirm that you have installed Git and Node correctly by running the following commands: ```bash git --version node -v ``` --- ![Zigbee2MQTT](./zigbee2mqtt_03.png) --- Then clone [ZigBee2MQTT from Github](https://github.com/Koenkk/zigbee2mqtt) and change the owner of the `/opt/zigbee2mqtt` directory to your default Linux user - in our case this is `nodeadmin`: ```bash sudo git clone https://github.com/Koenkk/zigbee2mqtt.git /opt/zigbee2mqtt sudo chown -R pi:pi /opt/zigbee2mqtt ``` Then change into the directory and use `npm` to install all dependencies: ```bash cd /opt/zigbee2mqtt npm install ``` --- ![Zigbee2MQTT](./zigbee2mqtt_04.png) --- #### Configuration Before we can start zigbee2mqtt we need to edit the configuration.yaml file. This file contains the configuration which will be used by zigbee2mqtt. Open the configuration file: ```bash nano /opt/zigbee2mqtt/data/configuration.yaml ``` --- ![Zigbee2MQTT](./zigbee2mqtt_05.png) --- I am going to use a local Mosquitto MQTT server running on a INSTAR Full HD Camera on _192.168.2.115_. Alternatively, you can install a [local Mosquitto Server](https://wiki.instar.com/Software/Linux/Node-RED/#installing-mosquitto) on your Raspberry Pi and add it via `server: 'mqtt://localhost'` in the config file. ```yaml # Home Assistant integration (MQTT discovery) homeassistant: false # allow new devices to join permit_join: true # MQTT settings mqtt: # MQTT base topic for zigbee2mqtt MQTT messages base_topic: zigbee2mqtt # MQTT server URL server: 'mqtt://192.168.2.115' # MQTT server authentication, uncomment if required: # user: my_user # password: <PASSWORD> # Serial settings serial: # Location of CC2531 USB sniffer port: /dev/ttyACM0 ``` I am not sure if Zigbee2MQTT offer TLS support - I will have to look into that. Also, I deactivated the user authentication on my MQTT server for now to have an easier time debugging - user name and password have to be added here before reactivating it. Make sure that `permit_join` is set to `true`, to allow new devices to be added. It can be set to `false` once all your Zigbee devices are integrated. For the serial port you can use the [location of CC2531 USB sniffer](#determine-location-of-cc2531-usb-sniffer-and-checking-user-permissions) that we determined to be either `/dev/ttyACM0` or `/dev/serial/by-id/usb-Texas_Instruments_TI_CC2531_USB_CDC___0X00124B0018ED113B-if00` - both can be used interchangeably (the location might differ in your case!). #### Running ZigBee2MQTT Now that we have setup everything correctly we can start zigbee2mqtt directly from our console to see if there are any error messages: ```bash cd /opt/zigbee2mqtt npm start ``` You should see the status LED on your CC2531 light up green and hopefully a friendly console log like this: --- ![Zigbee2MQTT](./zigbee2mqtt_06.png) --- You can see that the service is running, connected to the local MQTT server and published a message to `zigbee2mqtt/bridge/state` with the payload `online`. We can use the program [MQTT Explorer](https://github.com/thomasnordquist/MQTT-Explorer) to verify that the message was received by our Mosquitto server: --- ![Zigbee2MQTT](./zigbee2mqtt_07.png) --- Looks like we were successful! #### Running ZigBee2MQTT as a Daemon To run zigbee2mqtt as a service and start it automatically on boot we will run zigbee2mqtt with `systemctl`: ```bash sudo nano /etc/systemd/system/zigbee2mqtt.service ``` Add the following to this file to automatically run the `index.js` file from the `zigbee2mqtt` bridge inside the Node runtime: ``` [Unit] Description=zigbee2mqtt After=network.target [Service] ExecStart=/usr/bin/node /opt/zigbee2mqtt/index.js StandardOutput=syslog StandardError=syslog SyslogIdentifier=zigbee2mqtt Restart=always # Restart service after 10 seconds if node service crashes RestartSec=10 [Install] WantedBy=multi-user.target ``` --- ![Zigbee2MQTT](./zigbee2mqtt_08.png) --- Save the file, exit and verify that the configuration works: ```bash sudo systemctl daemon-reload sudo systemctl start zigbee2mqtt systemctl status zigbee2mqtt.service ``` --- ![Zigbee2MQTT](./zigbee2mqtt_09.png) --- Now that everything works, we want `systemctl` to start `zigbee2mqtt` automatically on boot, this can be done by executing: ``` sudo systemctl enable zigbee2mqtt.service ``` --- ![Zigbee2MQTT](./zigbee2mqtt_10.png) --- To stop, restart or check the log of our service run the following commands: ```bash # Stopping zigbee2mqtt sudo systemctl stop zigbee2mqtt # Starting zigbee2mqtt sudo systemctl start zigbee2mqtt # View the log of zigbee2mqtt sudo journalctl -u zigbee2mqtt.service -f ``` #### Update zigbee2mqtt to the latest version ```bash # Stop zigbee2mqtt and go to directory sudo systemctl stop zigbee2mqtt cd /opt/zigbee2mqtt # Backup configuration cp -R data data-backup # Update git checkout HEAD -- npm-shrinkwrap.json git pull rm -rf node_modules npm install # Restore configuration cp -R data-backup/* data rm -rf data-backup # Start zigbee2mqtt sudo systemctl start zigbee2mqtt ``` ### Pairing Devices The [number of devices that you can use with the zigbee2mqtt bridge](https://www.zigbee2mqtt.io/information/supported_devices.html) is steadily growing (there are alternative [pairing instructions available here](https://www.zigbee2mqtt.io/getting_started/pairing_devices.html)). In this tutorial we are going to add 3 [XiaoMi Mi Home](https://xiaomi-mi.com/sockets-and-sensors/) sensors - a [Temperature & Humidity Sensor](https://xiaomi-mi.com/sockets-and-sensors/xiaomi-mi-temperature-humidity-sensor/), a [Wireless Switch](https://xiaomi-mi.com/sockets-and-sensors/xiaomi-mi-wireless-switch/) and a [Home Occupancy Sensor](https://xiaomi-mi.com/sockets-and-sensors/xiaomi-mi-occupancy-sensor/). __IMPORTANT__: Before you start, make sure that [permit_join: true](#configuration) is set in your `configuration.yaml`. Otherwise new devices cannot join the network! Also verify that the [ZigBee2MQTT Bridge is running](#running-zigbee2mqtt-as-a-daemon). We first start with the __Temperature & Humidity Sensor__ that has a pairing button on its side. Press the button for about 5 seconds - the __blue LED will start to blink__: in my case it started to blink slowly in 3-time sequences. After a few seconds the blinking sped up and stopped and the device was paired with the ZigBee Bridge. The pairing can be found inside a log file in `/opt/zigbee2mqtt/data/log/`: ```yaml 4/22/2019, 3:34:17 PM - info: New device 'undefined' with address 0x00158d00023a21c8 connected! 4/22/2019, 3:34:17 PM - info: MQTT publish: topic 'zigbee2mqtt/bridge/log', payload '{"type":"device_connected","message":"0x00158d00023a21c8","meta":{}}' 4/22/2019, 3:34:17 PM - warn: Device with modelID 'undefined' is not supported. 4/22/2019, 3:34:17 PM - warn: Please see: https://www.zigbee2mqtt.io/how_tos/how_to_support_new_devices.html 4/22/2019, 3:34:17 PM - info: MQTT publish: topic 'zigbee2mqtt/0x00158d00023a21c8', payload '{"temperature":29.63,"linkquality":134}' 4/22/2019, 3:34:17 PM - info: MQTT publish: topic 'zigbee2mqtt/0x00158d00023a21c8', payload '{"temperature":29.63,"linkquality":134,"humidity":60.12}' 4/22/2019, 3:35:05 PM - info: MQTT publish: topic 'zigbee2mqtt/0x00158d00023a21c8', payload '{"temperature":29.12,"linkquality":134,"humidity":60.12}' 4/22/2019, 3:35:05 PM - info: MQTT publish: topic 'zigbee2mqtt/0x00158d00023a21c8', payload '{"temperature":29.12,"linkquality":134,"humidity":57.13}' 4/22/2019, 3:35:08 PM - info: Device incoming... 4/22/2019, 3:35:08 PM - info: MQTT publish: topic 'zigbee2mqtt/bridge/log', payload '{"type":"pairing","message":"device incoming"}' ``` Shortly afterwards the sensor data started coming in: --- ![Zigbee2MQTT](./zigbee2mqtt_11.png) --- So far so good. But the __device ID__ was a bit hard to read and memorize: `0x00158d00023a21c8`. Luckily this can be adjusted in `/opt/zigbee2mqtt/data/configuration.yaml`: --- ![Zigbee2MQTT](./zigbee2mqtt_12.png) --- All paired devices will be listed at the bottom of the configuration file - just change the `friendly_name` to something human-readable. Remember that this will also change the MQTT topic the Zigbee bridge is using to publish the sensor readouts: ```yaml info: MQTT publish, topic: 'zigbee2mqtt/temp_hum_sensor', payload: '{"temperature":26.33,"linkquality":78,"humidity":54.32,"battery":99,"voltage":3005}' ``` The topic changed from `zigbee2mqtt/0x00158d00023a21c8` to `zigbee2mqtt/temp_hum_sensor`! __Note__: I thought I remembered that this worked without restarting the service - but this time it was necessary to do that ¯\\_(ツ)_\/¯ But pairing both the __Wireless Switch__ and __Home Occupancy Sensor__ turned out to be more difficult. Holding the pairing button - that here could only be reached with a metal pin - gave the same pattern. The __blue LED started flashing__ - first slowly, then fast. But I kept getting timeouts for both devices - that identified as `0x00158d00023d229c` and `0x00158d00020b3dff` - for the pairing process inside the `/opt/zigbee2mqtt/data/log/` ```yaml error: Cannot get the Node Descriptor of the Device: 0x00158d00023d229c (Error: Timed out after 10000 ms) error: Cannot get the Node Descriptor of the Device: 0x00158d00020b3dff (Error: Timed out after 10000 ms) ``` You can [find an instruction](https://www.zigbee2mqtt.io/getting_started/pairing_devices.html) that you need to press and hold the pairing button for 5 seconds (until the blue LED starts flashing), then release and keep pressing the button repeatedly (1s interval) afterwards - which feels a little bit counter-intuitive. But it actually works. Checking the log file shows that a __Occupancy Sensor__ with the ID `0x00158d00020b3dff` was connected: ```yaml 4/22/2019, 3:55:03 PM - warn: Device with modelID 'undefined' is not supported. 4/22/2019, 3:55:03 PM - warn: Please see: https://www.zigbee2mqtt.io/how_tos/how_to_support_new_devices.html 4/22/2019, 3:55:41 PM - info: Device incoming... 4/22/2019, 3:55:41 PM - info: MQTT publish: topic 'zigbee2mqtt/bridge/log', payload '{"type":"pairing","message":"device incoming"}' 4/22/2019, 3:55:49 PM - info: MQTT publish: topic 'zigbee2mqtt/0x00158d00020b3dff', payload '{"occupancy":true,"linkquality":110}' ``` Repeating the same for the __Wireless Switch__ and a new device with ID `0x00158d00023d229c` showed up - excellent! ```yaml 4/22/2019, 3:50:57 PM - info: New device 'undefined' with address 0x00158d00023d229c connected! 4/22/2019, 3:50:57 PM - info: MQTT publish: topic 'zigbee2mqtt/bridge/log', payload '{"type":"device_connected","message":"0x00158d00023d229c","meta":{}}' 4/22/2019, 3:50:57 PM - warn: Device with modelID 'undefined' is not supported. 4/22/2019, 3:50:57 PM - warn: Please see: https://www.zigbee2mqtt.io/how_tos/how_to_support_new_devices.html 4/22/2019, 3:50:58 PM - warn: Device with modelID 'undefined' is not supported. 4/22/2019, 3:50:58 PM - warn: Please see: https://www.zigbee2mqtt.io/how_tos/how_to_support_new_devices.html 4/22/2019, 3:51:28 PM - info: MQTT publish: topic 'zigbee2mqtt/0x00158d00023d229c', payload '{"click":"single","linkquality":102}' ``` Again, we can adjust that a human-readable name is displayed instead of the ID in `/opt/zigbee2mqtt/data/configuration.yaml`: ```yaml devices: '0x00158d00023a21c8': friendly_name: 'temp_hum_sensor' retain: false '0x00158d00023d229c': friendly_name: 'smart_button' retain: false '0x00158d00020b3dff': friendly_name: 'motion_pir_sensor' retain: false ``` The new names will now be used for the corresponding MQTT topics: ```yaml 4/22/2019, 3:55:49 PM - info: MQTT publish: topic 'zigbee2mqtt/motion_pir_sensor', payload '{"occupancy":true,"linkquality":110}' 4/22/2019, 3:57:19 PM - info: MQTT publish: topic 'zigbee2mqtt/motion_pir_sensor', payload '{"occupancy":false,"linkquality":110}' 4/22/2019, 3:59:11 PM - info: MQTT publish: topic 'zigbee2mqtt/temp_hum_sensor', payload '{"temperature":26.55,"linkquality":141,"humidity":60.77}' 4/22/2019, 3:59:12 PM - info: MQTT publish: topic 'zigbee2mqtt/temp_hum_sensor', payload '{"temperature":26.55,"linkquality":141,"humidity":54.63}' 4/22/2019, 4:00:10 PM - info: MQTT publish: topic 'zigbee2mqtt/smart_button', payload '{"linkquality":110,"duration":835,"click":"single"}' 4/22/2019, 4:00:17 PM - info: MQTT publish: topic 'zigbee2mqtt/smart_button', payload '{"linkquality":102,"duration":835,"click":"double"}' 4/22/2019, 4:00:19 PM - info: MQTT publish: topic 'zigbee2mqtt/smart_button', payload '{"linkquality":99,"duration":835,"click":"triple"}' 4/22/2019, 4:00:20 PM - info: MQTT publish: topic 'zigbee2mqtt/smart_button', payload '{"linkquality":102,"duration":835,"click":"quadruple"}' 4/22/2019, 4:00:23 PM - info: MQTT publish: topic 'zigbee2mqtt/smart_button', payload '{"linkquality":99,"duration":835,"click":"long"}' 4/22/2019, 4:00:24 PM - info: MQTT publish: topic 'zigbee2mqtt/smart_button', payload '{"linkquality":99,"duration":1094,"click":"long_release"}' 4/22/2019, 4:00:30 PM - info: MQTT publish: topic 'zigbee2mqtt/motion_pir_sensor', payload '{"occupancy":true,"linkquality":97}' 4/22/2019, 4:00:30 PM - info: MQTT publish: topic 'zigbee2mqtt/temp_hum_sensor', payload '{"temperature":27.73,"linkquality":141,"humidity":54.63}' 4/22/2019, 4:00:30 PM - info: MQTT publish: topic 'zigbee2mqtt/temp_hum_sensor', payload '{"temperature":27.73,"linkquality":141,"humidity":69.32}' ``` Note that the __Occupancy Sensor__ sends a `true` or `false` payload depending on if motion was detected or not. The __Temperature & Humidity Sensor__ just sends number values for temperature and humidity. And the __Wireless Button__ sends click events and can discern between `single`, `double`, `triple`, `quadruple` and `long / long_release` clicks. --- ![Zigbee2MQTT](./zigbee2mqtt_13.png) --- ## Adding our Sensors to FHEM First we need to add a [new module](https://github.com/oskarn97/fhem-xiaomi-mqtt) to FHEM that allows us to communicate with our Zigbee2MQTT Bridge: ```bash update add https://raw.githubusercontent.com/oskarn97/fhem-xiaomi-mqtt/master/controls_xiaomi-zb2mqtt.txt update check xiaomi-zb2mqtt update all xiaomi-zb2mqtt shutdown restart ``` --- ![Zigbee2MQTT](./zigbee2mqtt_14.png) --- Once FHEM is back up we can define the bridge module that will handle the communication for us: ``` define ZigbeeBridge XiaomiMQTTDevice bridge ``` Just a few seconds later we already start seeing readings coming in: --- ![Zigbee2MQTT](./zigbee2mqtt_15.png) --- ```json { "type": "devices", "message": [{ "ieeeAddr": "0x00124b0018ed113b", "type": "Coordinator" }, { "ieeeAddr": "0x00158d00023a21c8", "type": "EndDevice", "model": "WSDCGQ01LM", "friendly_name": "temp_hum_sensor", "nwkAddr": 29097, "manufId": 4151, "manufName": "LUMI", "powerSource": "Battery", "modelId": "lumi.sens" }, { "ieeeAddr": "0x00158d00023d229c", "type": "EndDevice", "model": "WXKG01LM", "friendly_name": "smart_button", "nwkAddr": 40807, "manufId": 4151, "manufName": "LUMI", "powerSource": "Battery", "modelId": "lumi.sensor_switch" }, { "ieeeAddr": "0x00158d00020b3dff", "type": "EndDevice", "model": "RTCGQ01LM", "friendly_name": "motion_pir_sensor", "nwkAddr": 2091, "manufId": 4151, "manufName": "LUMI", "powerSource": "Battery", "modelId": "lumi.sensor_motion" }] } ``` Our sensors have been recognized through the FHEM connector - nice! And we have a new _Room_ in our list called __XiaoMiMQTTDevice__ holding all our sensors: --- ![Zigbee2MQTT](./zigbee2mqtt_16.png) --- ## Adding SQL Logging ### Installing MariaDB on our Raspberry Pi We will start by installing the MariaDB (_MySQL_) Client and Server. Additionally, we need two Perl packages to be able to connect FHEM to the DB Client: ```bash sudo apt-get update sudo apt-get install mysql-server mysql-client sudo apt-get install libdbi-perl libclass-dbi-mysql-perl ``` This should install and start your MariaDB instance. We can now continue to configure our database. By default the database can only be accessed by localhost - this might be perfectly fine (and is the secure option), but if you want to be able to access data on your Raspberry Pi from different computers we have to comment out the line `bind-address = 127.0.0.1` inside: ```bash sudo nano /etc/mysql/mariadb.conf.d/50-server.cnf ``` --- ![Zigbee2MQTT](./zigbee2mqtt_17.png) --- We can now enter our database with the following command (just press enter when asked for a password): ``` sudo mysql -u root -p ``` We will assign a new root password with the first line (see below - change the 'instar' string with your real password): ```sql GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' IDENTIFIED BY 'instar'; GRANT USAGE ON *.* TO 'root'@'%' WITH GRANT OPTION; exit ``` For those change to take affect we now need to restart our SQL server `sudo service mysql restart`. --- ![Zigbee2MQTT](./zigbee2mqtt_18.png) --- ### Configuring MariaDB for FHEM We can now use a SQL Client, e.g. [SQLElectron-GUI](https://sqlectron.github.io/#gui) to further configure our database. --- ![Zigbee2MQTT](./zigbee2mqtt_19.png) --- We are going to create a database called `fhem` as well as a user `fhemuser` with the login `instar` (change all of those at will) that will be used for our FHEM-generated data: ```sql CREATE DATABASE `fhem` DEFAULT CHARACTER SET = `utf8`; CREATE USER 'fhemuser'@'%' IDENTIFIED BY 'instar'; REVOKE CREATE ROUTINE, CREATE VIEW, CREATE USER, ALTER, SHOW VIEW, CREATE, ALTER ROUTINE, EVENT, SUPER, INSERT, RELOAD, SELECT, DELETE, FILE, SHOW DATABASES, TRIGGER, SHUTDOWN, REPLICATION CLIENT, GRANT OPTION, PROCESS, REFERENCES, UPDATE, DROP, REPLICATION SLAVE, EXECUTE, LOCK TABLES, CREATE TEMPORARY TABLES, INDEX ON *.* FROM 'fhemuser'@'%'; UPDATE mysql.user SET max_questions = 0, max_updates = 0, max_connections = 0 WHERE User = 'fhemuser' AND Host = '%'; GRANT CREATE ROUTINE, CREATE VIEW, ALTER, SHOW VIEW, CREATE, ALTER ROUTINE, EVENT, INSERT, SELECT, DELETE, TRIGGER, GRANT OPTION, REFERENCES, UPDATE, DROP, EXECUTE, LOCK TABLES, CREATE TEMPORARY TABLES, INDEX ON `fhem`.* TO 'fhemuser'@'%'; USE `fhem`; CREATE TABLE `history` ( `TIMESTAMP` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `DEVICE` varchar(64) DEFAULT NULL, `TYPE` varchar(64) DEFAULT NULL, `EVENT` varchar(512) DEFAULT NULL, `READING` varchar(64) DEFAULT NULL, `VALUE` varchar(255) DEFAULT NULL, `UNIT` varchar(32) DEFAULT NULL, KEY `IDX_HISTORY` (`DEVICE`,`READING`,`TIMESTAMP`,`VALUE`), KEY `DEVICE` (`DEVICE`,`READING`) ); CREATE TABLE `current` ( `TIMESTAMP` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `DEVICE` varchar(64) DEFAULT NULL, `TYPE` varchar(64) DEFAULT NULL, `EVENT` varchar(512) DEFAULT NULL, `READING` varchar(64) DEFAULT NULL, `VALUE` varchar(255) DEFAULT NULL, `UNIT` varchar(32) DEFAULT NULL ); ``` --- ![Zigbee2MQTT](./zigbee2mqtt_20.png) --- When successful, this will have created a database with the name `fhem` and two tables called `current` and `history` (For me the new table did not show up in SQLElecton right away after it was created. I disconnected, edited the database entry - see 2 screenshots up - and set the `Initial Database` to `fhem`. After reconnecting everything looked as it should): --- ![Zigbee2MQTT](./zigbee2mqtt_21.png) --- Let's now add our database to FHEM. We do this by copying the `db.conf` template to the FHEM root directory and editing the MySQL section: ```bash sudo cp /opt/fhem/contrib/dblog/db.conf /opt/fhem/db.conf sudo chown fhem:dialout /opt/fhem/db.conf sudo nano /opt/fhem/db.conf ``` --- ![Zigbee2MQTT](./zigbee2mqtt_22.png) --- ```perl %dbconfig= ( connection => "mysql:database=fhem;host=localhost;port=3306", user => "fhemuser", password => "<PASSWORD>", # optional enable(1) / disable(0) UTF-8 support (at least V 4.042 is necessary) utf8 => 1 ); ``` Now all we have to do is to tell FHEM to read the configuration file and start using our database setup to store all events: ```bash define DBLogging DbLog /opt/fhem/db.conf .*:.* ``` --- ![Zigbee2MQTT](./zigbee2mqtt_23.png) --- And don't forget to the set the __DBLogType__ to `Current/History` (see bottom of the screenshot above). We can verify that the logging has started by querying the content of our __Current Table__: ```sql SELECT * FROM current; ``` --- ![Zigbee2MQTT](./zigbee2mqtt_24.png) --- To make sure that the connection with your database is re-established if for whatever reason it fails, run the following command in FHEM: `define DBLogging_Reopen at +*00:15:00 set DBLogging reopen`. ### Log Filter We set up FHEM to log every event, which quickly leads to a huge pile of - mostly useless - data. You can check your current history with the followinf command: ```bash SELECT DEVICE, COUNT(*) FROM history GROUP BY DEVICE ORDER BY 2 DESC; ``` --- ![Zigbee2MQTT](./zigbee2mqtt_25.png) --- In my case only the temperature/humidity sensor and motion detector generates data that I want to use in later projects - the rest should be excluded. For this, we can go back to our DBLogging device in FHEM - `http://<RaspiIP>:8083/fhem?detail=DBLogging` and change the default __DbLogSelectionMode__ to `Exclude/Include`: --- ![Zigbee2MQTT](./zigbee2mqtt_26.png) --- * __Exclude__: DbLog behaves just as usual. This means everything specified in the regex in DEF will be logged by default and anything excluded via the DbLogExclude attribute will not be logged * __Include__: Nothing will be logged, except the readings specified via regex in the DbLogInclude attribute (in source devices). Neither the Regex set in DEF will be considered nor the device name of the source device itself. * __Exclude/Include__: Just almost the same as Exclude, but if the reading matches the DbLogExclude attribute, then it will further be checked against the regex in DbLogInclude whicht may possibly re-include the already excluded reading. That means with `Exclude/Include` selected for our logging service we can now edit every device that is generating log entries and `set attr <name> DbLogExclude .*`: --- ![Zigbee2MQTT](./zigbee2mqtt_29.png) --- The regular expression `.*` includes everything and so nothing is logged anymore. __Note__: If you already have many devices included, simply use the following command to set this attribute for all your devices: `attr .* DbLogExclude .*`. To automate this for future devices use the following _notify_ function: ```bash define n_DbLogExclude notify global:DEFINED.* attr $EVTPART1 DbLogExclude .* ``` In a second step we can go in and use `set attr <name> DbLogInclude <variable>` to include selected variables for our log: --- ![Zigbee2MQTT](./zigbee2mqtt_30.png) --- ### Average / Reduce Additionally FHEM offers a Log reduction function that allows us to average every event that is older then n-days to only 1 event per hour: ```bash set <name> reduceLogNbl <n> average ``` If we want to keep our data for 3 months this would be `set DBLogging reduceLogNbl 90 average`: --- ![Zigbee2MQTT](./zigbee2mqtt_27.png) --- We can automate this in FHEM with: ```bash define at_DbLoggingReduce at *04:30:00 set DBLogging reduceLogNbl 90 average ``` --- ![Zigbee2MQTT](./zigbee2mqtt_28.png) --- You can further reduce your data logs for entries older then 24hrs: ```bash define DBLogging_Reduce at +*24:00:00 set DBLogging reduceLog 1 ``` ## Compare with Data from Web-APIs I now have a reading for the temperature inside my office - and might add a few more temperature sensors in the future. But it would be interesting to see how well the temperatures I measure fit the temperatures I can get from an online weather services like [DarkSky.net](https://darksky.net/forecast/22.5446,114.0545/si12/en). Click the link and select your city - you will find the coordinates that DarkSky assigns to your location inside the URL - in case of Shenzhen, China this is `22.5446,114.0545`: --- ![Zigbee2MQTT](./zigbee2mqtt_31.png) --- Once we know our ID we can start using the integrated weather module in FHEM to query the corresponding information for our location. Just make sure that your Raspberry Pi has the following Perl libraries installed so FHEM knows how to handle the API's JSON response (`shutdown restart` FHEM after the installation was successful): ```bash sudo apt -y install libjson-perl libdatetime-format-strptime-perl ``` The command to add a weather device in FHEM then looks like this: ```bash define <name> Weather [API=<API>[,<apioptions>]] [apikey=<apikey>] [location=<location>] [interval=<interval>] [lang=<lang>] ``` For me this results to `define ShenZhenWeather Weather API=DarkSkyAPI,cachemaxage:600 apikey=<KEY> location=22.5446,114.0545 interval=3600 lang=en`. __Note__: the API key inside this command does not work - you have to get your own free key by [registering with DarkSky](https://darksky.net/dev). You should see your first reading coming in a few seconds later: --- ![Zigbee2MQTT](./zigbee2mqtt_32.png) --- We can also output the data for a 7 day forecast on our FHEM interface by defining an HTML output: ```bash define ShenZhenWeatherHtml weblink htmlCode {WeatherAsHtml("ShenZhenWeatherHtml",7)} ``` <!-- define SZWeatherForecast weblink htmlCode {"ShenZhen 7-Day Forecast:<br>".WeatherAsHtml("ShenZhenWeatherHtml",7)} --> --- ![Zigbee2MQTT](./zigbee2mqtt_34.png) --- We can now reduce the amount of variables that we record by using `event-on-update-reading`, so by the next update I only receive (and log - see `DbLogInclude`) the following: --- ![Zigbee2MQTT](./zigbee2mqtt_35.png) --- ``` apparentTemperature,cloudCover,condition,humidity,ozone,pressure,temp_c,uvIndex,visibility,wind,windGust,wind_direction ``` --- ![Zigbee2MQTT](./zigbee2mqtt_36.png) --- ### Visualize Logs We can quickly write the API response to a log file: ``` define FileLog_ShenZhenWeather FileLog ./log/ShenZhenWeather-%Y.log ShenZhenWeather ``` You can check that the file is written with `cat /opt/fhem/log/ShenZhenWeather-2019.log`. --- ![Zigbee2MQTT](./zigbee2mqtt_33.png) --- We can now use the integrated plot function to create a little SVG plot for our log: --- ![Zigbee2MQTT](./zigbee2mqtt_37.png) ![Zigbee2MQTT](./zigbee2mqtt_38.png) --- Generating the plot will add received data points to your plot. You can adjust the axis range and highlight colours to your need. Going back to the __ShenZhenWeather__ log definition, we can now also add the temperature and humidity reading from our XiaoMi sensor to the same log file: --- ![Zigbee2MQTT](./zigbee2mqtt_39.png) ![Zigbee2MQTT](./zigbee2mqtt_40.png) --- And then go and add it to our plot - voilá : --- ![Zigbee2MQTT](./zigbee2mqtt_41.png) ![Zigbee2MQTT](./zigbee2mqtt_42.png) --- ## Using Zigbee Devices to control INSTAR IP Cameras in Node-RED We now set up the Zigbee Bridge, connected Zigbee sensors and made sure that the messages are send to our Mosquitto MQTT server. We also found out the topics that our bridge uses for each sensors to publish its messages - `zigbee2mqtt/motion_pir_sensor`, `zigbee2mqtt/temp_hum_sensor` and `zigbee2mqtt/smart_button` - and what kind of payload to expect. Now we can use the program [MQTT.fx](/Advanced_User/OpenHAB_Home_Automation/Mosquitto_Installation_on_Windows/#testing-the-mqtt-server-with-mqttfx) to verify that the message was received by our Mosquitto server: --- ![Zigbee2MQTT](./zigbee2mqtt_x11.png) --- Every time the state changes the sensor will send a Zigbee message to our Bridge, which then forwards its payload to our Mosquitto server - perfect! We can now start our [Node-RED](https://nodered.org/) - that we either installed under [Windows 10](/Advanced_User/Node-RED_and_MQTT/), on a [Raspberry Pi](/Advanced_User/openHABian_on_Raspberry_Pi/Node-RED_Setup/) or a [Debian Linux Server](/Software/Linux/Node-RED/), add __MQTT Nodes__ and subscribe to all 3 MQTT topics: --- ![Zigbee2MQTT](./zigbee2mqtt_x12.png) --- Connect all 3 of them to a __Debug Node__ and deploy the flow. Click the button, warm up the temp sensor and trigger the motion detection - the MQTT messages will show up in the debug pane in Node-RED: --- ![Zigbee2MQTT](./zigbee2mqtt_13.png) --- ### Add the MQTT input to an existing Flow In the following sequence we are using the input for the wireless button to switch __Alarm Areas__ on our cameras - we already used the same sequence for the [Amazon Alexa Integration](/Advanced_User/Alexa_Voice_Control_without_Cloud/) (the complete flow can be found in the [Home Assistant Tutorial](/Software/Linux/Home_Assistant/)): --- ![Zigbee2MQTT](./zigbee2mqtt_x14.png) --- You can copy the following JSON code and import it into Node-RED ([How do I import flows to Node-RED?](/Software/Linux/Node-RED/#using-node-red-to-communicate-with-your-camera)): ```json [{"id":"ebde9a4c.90efe8","type":"link out","z":"17b43655.05bd3a","name":"","links":["57b22bf7.0e2874","5eb0d369.b3b15c","b5ce97db.b3f4f8","f335d7f4.4bbe18"],"x":1308,"y":100,"wires":[]},{"id":"accb31dd.a130c","type":"mqtt in","z":"17b43655.05bd3a","name":"Wireless Button","topic":"zigbee2mqtt/smart_button","qos":"1","broker":"40415d07.67dd94","x":780,"y":100,"wires":[["2537690e.6230d6"]]},{"id":"2537690e.6230d6","type":"json","z":"17b43655.05bd3a","name":"","property":"payload","action":"","pretty":false,"x":929,"y":100,"wires":[["17df313d.2e7aff"]]},{"id":"17df313d.2e7aff","type":"change","z":"17b43655.05bd3a","name":"single / double","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.click","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":1076,"y":100,"wires":[["1f97ee5b.dfc642"]]},{"id":"1f97ee5b.dfc642","type":"string","z":"17b43655.05bd3a","name":"","methods":[{"name":"replaceAll","params":[{"type":"str","value":"single"},{"type":"str","value":"{\"val\" : \"on\"}"}]},{"name":"replaceAll","params":[{"type":"str","value":"double"},{"type":"str","value":"{\"val\" : \"off\"}"}]}],"prop":"payload","propout":"payload","object":"msg","objectout":"msg","x":1226,"y":100,"wires":[["ebde9a4c.90efe8"]]},{"id":"40415d07.67dd94","type":"mqtt-broker","z":"","name":"Debian Mosquitto","broker":"localhost","port":"1883","clientid":"","usetls":false,"compatmode":true,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","closeTopic":"","closePayload":"","willTopic":"","willQos":"0","willPayload":""}] ``` The __MQTT Node__ outputs the message from our button as a string. We first have to turn it into a JavaScript Object with a __JSON Node__. Next follows a __Change Node__ that changes the Message Payload to the value of __Click__ - as we learnt above, this value can either be `single`, `double`, `triple`, `quadruple`, `long` or `release`: --- ![Zigbee2MQTT](./zigbee2mqtt_x15.png) --- After that follows a __String Node__ (this node type [has to be installed manually](/Software/Linux/Node-RED/#the-node-red-admin-panel) - it is called `node-red-contrib-string`) that searches for the payload `single` or `double` and replaces them with something that we need to trigger the Flow below - in this case we need a JSON expression that either sets a value to `on` or `off`: `{"val" : "on"}` / `{"val" : "off"}`: --- ![Zigbee2MQTT](./zigbee2mqtt_x16.png) --- And last but not least we add a __Link Node__ to plug it in to all 4 sequences for the for the 4 alarm areas. Now every time we single-click the Wireless Button all areas are activated. A double-click deactivates them: <GifContainer gifUrl="/assets/gif/nodered_x01.gif" alt="Zigbee2MQTT" /> We attached a __Debug Node__ to the MQTT input to visualize the input given by the Wireless Button - __Note__ that only the `single` and `double` click trigger the Alarm Area Sequences - the rest of the input options can be used for other functions. <!-- attr WEB roomIcons Office:scene_office Logging:time_graph Unsorted:recycling XiaomiMQTTDevice:mqtt Everything:file_unknown ShenZhen:gitlab --><file_sep>--- date: "2019-09-15" title: "Updating a Magento Project from PHP v7.0 to v7.3" categories: - LINUX - Magento --- ![Katmandu, Nepal](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Check your Environment](#check-your-environment) - [Install PHP v7.3](#install-php-v73) - [Set the Timezone](#set-the-timezone) - [Increase PHP memory limit](#increase-php-memory-limit) - [Enable opcache.save_comments](#enable-opcachesave_comments) - [Switch to your new PHP Version](#switch-to-your-new-php-version) - [Checking your Code](#checking-your-code) - [mcrypt](#mcrypt) - [Eval option for mb_ereg_replace() and mb_eregi_replace()](#eval-option-for-mb_ereg_replace-and-mb_eregi_replace) - [Unquoted strings](#unquoted-strings) - [png2wbmp() and jpeg2wbmp()](#png2wbmp-and-jpeg2wbmp) - [INTL_IDNA_VARIANT_2003 variant](#intl_idna_variant_2003-variant) - [__autoload() method](#__autoload-method) - [track_errors ini setting and $php_errormsg variable](#track_errors-ini-setting-and-php_errormsg-variable) - [create_function() function](#create_function-function) - [mbstring.func_overload ini setting](#mbstringfunc_overload-ini-setting) - [(unset) cast](#unset-cast) - [parse_str() without a second argument](#parse_str-without-a-second-argument) - [gmp_random() function](#gmp_random-function) - [each() function](#each-function) - [assert() with a string argument](#assert-with-a-string-argument) - [$errcontext argument of error handlers](#errcontext-argument-of-error-handlers) - [read_exif_data() function](#read_exif_data-function) - [Re-Deploy Magento](#re-deploy-magento) <!-- /TOC --> ## Check your Environment First check the version of PHP you have installed: ```bash php -v PHP 7.0.4 (cli) (built: Dec 18 2019 15:01:47) ( NTS ) Copyright (c) 1997-2018 The PHP Group Zend Engine v3.2.0, Copyright (c) 1998-2018 Zend Technologies with Zend OPcache v7.0.4, Copyright (c) 1999-2018, by Zend Technologies ``` Then check what modules you are using: ```bash php -m [PHP Modules] bcmath calendar Core ctype curl date dom exif fileinfo filter ftp gd gettext hash iconv intl json libxml mbstring mysqli mysqlnd openssl pcntl pcre PDO pdo_mysql Phar posix readline Reflection session shmop SimpleXML soap sockets sodium SPL standard sysvmsg sysvsem sysvshm tokenizer wddx xml xmlreader xmlwriter xsl Zend OPcache zip zlib [Zend Modules] Zend OPcache ``` Note them down, as you will have to install all of them for the new version of PHP that you are going to install. ## Install PHP v7.3 Run below commands to upgrade the current packages to the latest version: ```bash sudo apt update sudo apt upgrade ``` Now setup PPA on Debian 10. Then import packages signing key. After that configure PPA for the PHP packages on your system: ```bash sudo apt install ca-certificates apt-transport-https wget -q https://packages.sury.org/php/apt.gpg -O- | sudo apt-key add - echo "deb https://packages.sury.org/php/ buster main" | sudo tee /etc/apt/sources.list.d/php.list ``` Now run the following commands to install PHP 7.3: ```bash sudo apt update sudo apt install php7.3 ``` Install the necessary PHP modules: ```bash sudo apt install php7.3-cli php7.3-common php7.3-curl php7.3-gd php7.3-json php7.3-mbstring php7.3-mysql php7.3-xml php7.3-bcmath php7.3-gd php7.3-intl php7.3-opcache php7.3-soap ``` Now we need to configure PHP v7.3 - to find out which `php.ini` file is the one that is loaded run `php -i`: ```bash php -i phpinfo() PHP Version => 7.3.26-1+0~20191218.33+debian10~1.gbpb5a34b System => Linux Magento2 4.19.0-6-amd64 #1 SMP Debian 4.19.67-2+deb10u2 (2019-11-11) x86_64 Build Date => Dec 18 2019 15:01:47 Server API => Command Line Interface Virtual Directory Support => disabled Configuration File (php.ini) Path => /etc/php/7.3/cli Loaded Configuration File => /etc/php/7.3/cli/php.ini Scan this dir for additional .ini files => /etc/php/7.3/cli/conf.d Additional .ini files parsed => /etc/php/7.3/cli/conf.d/10-mysqlnd.ini, ``` > Note: on my system there was a second configuration file in `/etc/php/7.3/apache2/php.ini`. In a later step I am going to install NGINX that is going to add more files in `/etc/php/7.3/fpm/php.ini` and `/etc/php/7.3/cli/php.ini`. The Magento documentation recommend to do all changes to all `php.ini` files on your system. We can see that the loaded configuration file is `/etc/php/7.3/cli/php.ini`: ```bash nano /etc/php/7.3/cli/php.ini ``` ### Set the Timezone Edit the `php.ini` configuration file and update the [timezone](https://www.php.net/manual/en/timezones.php) value in `date.timezone` setting tag: ```yaml date.timezone = "Europe/Berlin" ``` ### Increase PHP memory limit Simply increase the default value to the recommended value: * Compiling code or deploying static assets: `756M` * Installing and updating Magento components from Magento Marketplace: `2G` * Testing: `~3-4G` ```yaml memory_limit = 2G ``` ### Enable opcache.save_comments Enable [opcache.save_comments](https://www.php.net/manual/en/opcache.configuration.php#ini.opcache.save_comments) and it is recommended to enable the [PHP OpCache](http://php.net/manual/en/intro.opcache.php) for performance reasons. ```yaml [opcache] ; Determines if Zend OPCache is enabled opcache.save_comments=1 opcache.enable=1 ``` ## Switch to your new PHP Version To set PHP 7.3 as your active PHP version for CLI and Apache2 disable Apache2 modules for all other installed PHP versions and configure CLI using the update-alternatives command. Run the following commands to make changes. 1. Apache: ```bash sudo a2dismod php5.6 php7.1 php7.0 ## Shows error for modules not installed sudo a2enmod php7.3 sudo service apache2 restart ``` 2. CLI: ```bash sudo update-alternatives --set php /usr/bin/php7.3 sudo update-alternatives --set phar /usr/bin/phar7.3 sudo update-alternatives --set phar.phar /usr/bin/phar.phar7.3 sudo update-alternatives --set phpize /usr/bin/phpize7.3 sudo update-alternatives --set php-config /usr/bin/php-config7.3 ``` Running a PHP version check again should show you that you are now using the updated version: ```bash php -v PHP 7.3.26-1+0~20191218.33+debian10~1.gbpb5a34b (cli) (built: Dec 18 2019 15:01:47) ( NTS ) Copyright (c) 1997.3018 The PHP Group Zend Engine v3.2.0, Copyright (c) 1998-2018 Zend Technologies with Zend OPcache v7.3.26-1+0~20191218.33+debian10~1.gbpb5a34b, Copyright (c) 1999-2018, by Zend Technologies ``` 3. NGINX: Go to your NGINX server configuration, e.g.: ```bash nano /etc/nginx/sites-available/default ``` Change the FastCGI backend to use the new PHP-FPM socket, save and exit the file: ```bash location ~ \.php$ { include snippets/fastcgi-php.conf; fastcgi_pass unix:/run/php/php7.3-fpm.sock; } ``` Run the configuration test and restart the web server: ```bash nginx -t sudo service nginx restart ``` ## Checking your Code ### mcrypt The [mcrypt extension has been deprecated](https://www.php.net/manual/en/migration71.deprecated.php) in favour of OpenSSL, where it will be removed from the core and into PECL in PHP 7.3. Make sure that your code does not use something like: ```php $initVector = mcrypt_create_iv(mcrypt_get_iv_size(SAFETY_CIPHER, SAFETY_MODE), MCRYPT_RAND); $encrypted = mcrypt_encrypt(SAFETY_CIPHER, $key, $password, SAFETY_MODE, $initVector); echo base64_encode($initVector) . ENCRYPTION_DIVIDER_TOKEN . base64_encode($encrypted) . '<br/>'; echo mcrypt_decrypt(SAFETY_CIPHER, $key, $encrypted, SAFETY_MODE, $initVector) . '<br/>'; ``` ### Eval option for mb_ereg_replace() and mb_eregi_replace() The [e pattern modifier has been deprecated](https://www.php.net/manual/en/migration71.deprecated.php) for the [mb_ereg_replace()](https://www.php.net/manual/en/function.mb-ereg-replace.php) and [mb_eregi_replace()](https://www.php.net/manual/en/function.mb-eregi-replace.php) functions. ```php mb_eregi_replace ( string $pattern , string $replace , string $string [, string $option = "msri" ] ) : string ``` ```php mb_ereg_replace ( string $pattern , string $replacement , string $string [, string $option = "msr" ] ) : string ``` ### Unquoted strings [Unquoted strings](https://www.php.net/manual/en/migration72.deprecated.php) that are non-existent global constants are taken to be strings of themselves. This behaviour will now emit an E_WARNING: ```php <?php var_dump(NONEXISTENT); /* Output: Warning: Use of undefined constant NONEXISTENT - assumed 'NONEXISTENT' (this will throw an Error in a future version of PHP) in %s on line %d string(11) "NONEXISTENT" */ ``` ### png2wbmp() and jpeg2wbmp() The [png2wbmp()](https://www.php.net/manual/en/function.png2wbmp.php) and [jpeg2wbmp()](https://www.php.net/manual/en/function.jpeg2wbmp.php) functions from the [GD extension have now been deprecated](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.png2wbmp-jpeg2wbmp) and will be removed in the next major version of PHP. ```php png2wbmp ( string $pngname , string $wbmpname , int $dest_height , int $dest_width , int $threshold ) : bool ``` ```php jpeg2wbmp ( string $jpegname , string $wbmpname , int $dest_height , int $dest_width , int $threshold ) : bool ``` ### INTL_IDNA_VARIANT_2003 variant The Intl extension has deprecated the INTL_IDNA_VARIANT_2003 variant, which is currently being used as the default for [idn_to_ascii()](https://www.php.net/manual/en/function.idn-to-ascii.php) and [idn_to_utf8()](https://www.php.net/manual/en/function.idn-to-utf8.php). PHP 7.4 will see these defaults changed to INTL_IDNA_VARIANT_UTS46, and the [next major version of PHP will remove INTL_IDNA_VARIANT_2003 altogether](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.png2wbmp-jpeg2wbmp). ```php idn_to_ascii ( string $domain [, int $options = IDNA_DEFAULT [, int $variant = INTL_IDNA_VARIANT_UTS46 [, array &$idna_info ]]] ) : string ``` ```php idn_to_utf8 ( string $domain [, int $options = IDNA_DEFAULT [, int $variant = INTL_IDNA_VARIANT_UTS46 [, array &$idna_info ]]] ) : string ``` ### __autoload() method The [__autoload() method has been deprecated](https://www.php.net/manual/en/function.autoload.php): ```php __autoload ( string $class ) : void ``` ### track_errors ini setting and $php_errormsg variable When the track_errors ini setting is enabled, a $php_errormsg variable is created in the local scope when a non-fatal error occurs. Given that the preferred way of retrieving such error information is by using [error_get_last()](https://www.php.net/manual/en/function.error-get-last.php), [this feature has been deprecated](https://www.php.net/manual/en/function.error-get-last.php). ### create_function() function Given the security issues of this function, this dated function [has now been deprecated](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.create_function-function). The preferred alternative is to use anonymous functions. ```php create_function ( string $args , string $code ) : string ``` ### mbstring.func_overload ini setting Given the interoperability problems of string-based functions being used in environments with this setting enabled, it has now been deprecated. ### (unset) cast Casting any expression to this type will always result in NULL, and so this superfluous casting type has now been deprecated. ### parse_str() without a second argument Without the second argument to [parse_str()](https://www.php.net/manual/en/function.parse-str.php), the query string parameters would populate the local symbol table. Given the security implications of this, using parse_str() without a second argument [has now been deprecated](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.parse_str-no-second-arg). The function should always be used with two arguments, as the second argument causes the query string to be parsed into an array: ```php parse_str ( string $encoded_string [, array &$result ] ) : void ``` ### gmp_random() function This [function generates a random number](https://www.php.net/manual/en/function.gmp-random.php) based upon a range that is calculated by an unexposed, platform-specific limb size. Because of this, [the function has now been deprecated](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.gmp_random-function). The preferred way of generating a random number using the GMP extension is by [gmp_random_bits()](https://www.php.net/manual/en/function.gmp-random-bits.php) and [gmp_random_range()](https://www.php.net/manual/en/function.gmp-random-range.php): ```php gmp_random ([ int $limiter = 20 ] ) : GMP ``` ```php gmp_random_range ( GMP $min , GMP $max ) : GMP ``` ```php gmp_random_bits ( int $bits ) : GMP ``` ### each() function [This function](https://www.php.net/manual/en/function.each.php) is far slower at iteration than a normal [foreach()](https://www.php.net/manual/en/control-structures.foreach.php), and causes implementation issues for some language changes. It has therefore [been deprecated](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.each-function). ```php each ( array &$array ) : array ``` <!-- Affected pages: \template_63588_PyBvvUTffPT11855oAQQ\copy2server\app\code\TemplateMonster\FilmSlider\Block\Adminhtml\Slider\Edit.php \template_63588_PyBvvUTffPT11855oAQQ\copy2server\app\code\TemplateMonster\FilmSlider\Block\Adminhtml\SliderItem\Edit.php \template_63588_PyBvvUTffPT11855oAQQ\copy2server\app\code\TemplateMonster\GoogleMap\view\adminhtml\templates\custom-validation.phtml \template_63588_PyBvvUTffPT11855oAQQ\copy2server\app\code\TemplateMonster\LayoutSwitcher\view\frontend\templates\switcher.phtml --> ### assert() with a string argument Using [assert()](https://www.php.net/manual/en/function.assert.php) with a string argument required the string to be [eval()](https://www.php.net/manual/en/function.eval.php)'ed. Given the potential for remote code execution, using assert() with a string argument has now been [deprecated in favour of using boolean expressions](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.errcontext-arg-of-error-handlers). ```php assert ( mixed $assertion [, Throwable $exception ] ) : bool ``` ```php eval ( string $code ) : mixed ``` ### $errcontext argument of error handlers The $errcontext argument contains all local variables of the error site. Given its rare usage, and the problems it causes with internal optimisations, [it has now been deprecated](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.errcontext-arg-of-error-handlers). ### read_exif_data() function The [read_exif_data()](https://www.php.net/manual/en/function.read-exif-data.php) alias [has been deprecated](https://www.php.net/manual/en/migration72.deprecated.php#migration72.deprecated.read_exif_data-function). The [exif_read_data()](https://www.php.net/manual/en/function.exif-read-data.php) function should be used instead. ## Re-Deploy Magento ```bash php7.3 bin/magento setup:upgrade php7.3 bin/magento setup:di:compile php7.3 bin/magento setup:static-content:deploy -f chown -R www-data:www-data . ``` ## Purge Old PHP Version ```bash apt purge php7.2 php7.2-common php7.2-cli ```<file_sep>--- date: "2018-11-20" title: "Node-RED Interaction over HTTP" categories: - IoT - Node-RED --- ![Harbin, China](./photo-33795443263_fcb0014fd2_o.jpg) A collection of [HTTP recipes](https://cookbook.nodered.org/http/) for [Node-RED](https://nodered.org) - use the import feature to use: ![Node-RED](./node-red-http_01.png) ```json [{"id":"3c681253.aa4ade","type":"tab","label":"http magic","disabled":false,"info":""},{"id":"80872401.3206d8","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello","method":"get","swaggerDoc":"","x":80,"y":40,"wires":[["135db765.dafe79"]]},{"id":"135db765.dafe79","type":"template","z":"3c681253.aa4ade","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"<html>\n <head></head>\n <body>\n <h1>Hello World!</h1>\n </body>\n</html>","x":230,"y":40,"wires":[["c291ec46.38319"]]},{"id":"c291ec46.38319","type":"http response","z":"3c681253.aa4ade","name":"","x":370,"y":40,"wires":[]},{"id":"a90f0560.9d87e8","type":"template","z":"3c681253.aa4ade","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"<html>\n <head></head>\n <body>\n <h1>Hello {{req.query.name}}!</h1>\n </body>\n</html>","x":250,"y":100,"wires":[["b9be261c.23e078"]]},{"id":"918819e9.918768","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-query","method":"get","swaggerDoc":"","x":80,"y":100,"wires":[["a90f0560.9d87e8"]]},{"id":"b9be261c.23e078","type":"http response","z":"3c681253.aa4ade","name":"","x":390,"y":100,"wires":[]},{"id":"557ae155.bdb3f","type":"http response","z":"3c681253.aa4ade","name":"","x":450,"y":160,"wires":[]},{"id":"1540b4e5.3c429b","type":"template","z":"3c681253.aa4ade","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"<html>\n <head></head>\n <body>\n <h1>Hello {{req.params.name}}!</h1>\n </body>\n</html>","x":310,"y":160,"wires":[["557ae155.bdb3f"]]},{"id":"b486b3df.b88cc","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-param/:name","method":"get","swaggerDoc":"","x":110,"y":160,"wires":[["1540b4e5.3c429b"]]},{"id":"5d1573ae.cf787c","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-headers","method":"get","swaggerDoc":"","x":100,"y":220,"wires":[["4c54b269.478b9c"]]},{"id":"4c54b269.478b9c","type":"template","z":"3c681253.aa4ade","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"<html>\n <head></head>\n <body>\n <h1>User agent: {{req.headers.user-agent}}</h1>\n </body>\n</html>","x":280,"y":220,"wires":[["e9ad6e2f.89bd"]]},{"id":"e9ad6e2f.89bd","type":"http response","z":"3c681253.aa4ade","name":"","x":420,"y":220,"wires":[]},{"id":"fcfb1c0f.37bae","type":"inject","z":"3c681253.aa4ade","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"x":100,"y":300,"wires":[["a4ca8924.9f9318"]]},{"id":"a4ca8924.9f9318","type":"change","z":"3c681253.aa4ade","name":"Store time","rules":[{"t":"set","p":"timestamp","pt":"flow","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":270,"y":300,"wires":[[]]},{"id":"3b885f56.b5121","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-data","method":"get","swaggerDoc":"","x":120,"y":340,"wires":[["ade6d6a9.2b22b8"]]},{"id":"ade6d6a9.2b22b8","type":"change","z":"3c681253.aa4ade","name":"Copy time","rules":[{"t":"set","p":"timestamp","pt":"msg","to":"timestamp","tot":"flow"}],"action":"","property":"","from":"","to":"","reg":false,"x":200,"y":400,"wires":[["1f223db5.547852"]]},{"id":"1f223db5.547852","type":"template","z":"3c681253.aa4ade","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"<html>\n <head></head>\n <body>\n <h1>Time: {{ timestamp }}</h1>\n </body>\n</html>","x":310,"y":340,"wires":[["c8e904d1.7883c8"]]},{"id":"c8e904d1.7883c8","type":"http response","z":"3c681253.aa4ade","name":"","x":370,"y":400,"wires":[]},{"id":"905bb207.1e57b","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-json","method":"get","swaggerDoc":"","x":90,"y":480,"wires":[["ebdaf230.23dc4"]]},{"id":"ebdaf230.23dc4","type":"template","z":"3c681253.aa4ade","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"{\n \"title\": \"IN-9008 FHD :: Product Overview\",\n \"series\": [\"1080p\", \"Outdoor\"],\n \"models\": [\"IN-9008 HD\"],\n \"description\": \"IN-9008 HD Product Overview. 1.) High quality weatherproof aluminium housing, 2.) High-power infrared LED´s for night vision, 3.) Standard 4.2mm F/2.1 infrared lense, 4.) Photodiode for twilight switch, 5.) Metall wallmount, 6.) Wifi antenna, 7.) RJ45 ethernet connector, 8.) Reset button, 9.) Alarm I/O connector for external devices, 10.) DC power supply connector, 11.) Audio output for external speakers, 12.) Audio input for external microphones. The IN-9008 HD is weather proof and wireless outdoor camera. Thanks to the built-in high-powered infrared diodes it utilizes night vision with high performance which allows you to see deep into the dark. The integrated IR cut filter on the other hand allows for the capturing of native colours, as seen by the human eye. Through the use of an efficient h.264 compression codec it is able of recording video files on a SD memory card with comparably small size but best picture results. For surveillance purposes, the IN-9008 HD is ready for implementation to your existing system. With the help of the handy Onvif Protocol, integration is just a few button presses away. But also as a stand-alone device it offers you a number of alarming features like Email Notification or compatibility with signalling devices. The WPS function allows you to connect the camera to your WiFi network with a press of a button. Accessing your camera from the internet is even easier. The Point-2-Point connection can be established by scanning a QR code with our smartphone app. The InstarVision mobile app is available for Android, iOS and Windows Phone 8. The sensitivity of the integrated Panasonic Pyroelectric Sensor has been significantly improved thanks to a unique slit design of the pyroelectric elements. The separated sensing areas prevent thermal crosstalk between the single sensing elements. Therefore, reliable detection is possible even if the temperature difference between the background (e.g. floor wall) and the target object (human) is small. (4°C) Hardware Nightvision with 5 Infrared LED´s @ 850nm (barely visible for the human eye) for distances up to 12 - 20m Very compact high quality IP65 weatherproof aluminium housing Integrated Panasonic Pyroelectric Sensor - PIR Range up to 12m Wide Dynamic Range (WDR) HD 1080p colour CMOS Sensor Network RJ-45 Ethernet port for an easy integration into your 100Mbit home network Compatible with Wifi IEEE 802.11 b/g/n up to 54Mbit networks with WEP, WPA and WPA2 security DHCP and UPnP support for an automatic network integration Integrated individual DDNS (Internet) address to access your camera through the Internet Software Free Android, iPhone and Windows Phone App Web user interface allows an easy configuration and canera control through your default web browser (Chrome, Safari, Firefox, Internet Explorer) Multi user management and simultaneous login for 3 users Software-based motion and audio detection and email notification with attached snapshots Record videos directly on your PC (only ActiveX). The 1/3 inch WDR sensor is a Panasonic CMOS chip, enabling 1080p HD video at unparalleled low-light sensitivity. Panasonic offers a wide variety of image sensors (νMaicovicon®) for security and network cameras, which are based on the industry most advanced technology and can be applied to security cameras requiring high picture quality with high sensitivity and low noise. The CMOS image sensor, νMaicovicon®, features high picture quality equal to high-speed CCD technologies, high S/N ratio, low noise, and high dynamic range. Even in a dark condition, you can retrieve snapshots with excellent character detection, color reproduction, as well as object detection. Even in scenes with fast-moving objects, the image sensor ensures clear images without motion blur or strain. The frame composite function (WDR) enables high-dynamic range - you can simultaneously resolve dark and bright areas in your video frame. Many built-in features allows you to use this camera not only for surveillance, but also for your personal projects. The FTP support allows you to use the IN-9008 HD as a multi functional outdoor webcam for integration on your website.\",\n \"sublink1\": \"/Outdoor_Cameras/IN-9008_HD/\",\n \"subtitle1\": \"IN-9008 HD • \",\n \"sublink2\": \"/Outdoor_Cameras/\",\n \"subtitle2\": \"Outdoor Cameras\",\n \"sublink3\": \"\",\n \"subtitle3\": \"\",\n \"sublink4\": \"\",\n \"subtitle4\": \"\",\n \"badge\": \"Manual\",\n \"title2\": \"IN-9008 FHD :: Produkt Übersicht\",\n \"chapter\": \"INSTAR Products\",\n \"tags\": [\"IN-9008 HD\", \"INSTAR\", \"products\", \"1080p series\", \"indoor\", \"IP camera\", \"web cam\", \"overview\"],\n \"image\": \"/images/Search/P_SearchThumb_IN-9008HD.png\",\n \"imagesquare\": \"/images/Search/TOC_Icons/Wiki_Tiles_P-IN-9008HD_white.png\",\n \"short\": \"IN-9008 HD - Product Overview\",\n \"abstract\": \"IN-9008 HD Product Overview\"\n}","x":260,"y":480,"wires":[["55a221ea.ddea8"]]},{"id":"55a221ea.ddea8","type":"change","z":"3c681253.aa4ade","name":"Set Headers","rules":[{"t":"set","p":"headers","pt":"msg","to":"{}","tot":"json"},{"t":"set","p":"headers.content-type","pt":"msg","to":"application/json","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":420,"y":480,"wires":[["5db537e5.f88a08"]]},{"id":"5db537e5.f88a08","type":"http response","z":"3c681253.aa4ade","name":"","x":580,"y":480,"wires":[]},{"id":"b5a8500d.f5b2c","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-cookie","method":"get","swaggerDoc":"","x":100,"y":540,"wires":[["c1263851.637e08"]]},{"id":"c1263851.637e08","type":"function","z":"3c681253.aa4ade","name":"Format cookies","func":"msg.payload = JSON.stringify(msg.req.cookies,null,4);\nreturn msg;","outputs":1,"noerr":0,"x":310,"y":540,"wires":[["5c97f560.45567c"]]},{"id":"5c97f560.45567c","type":"template","z":"3c681253.aa4ade","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"<html>\n <head></head>\n <body>\n <h1>Cookies</h1>\n <p></p><a href=\"hello-cookie/add\">Add a cookie</a> &bull; <a href=\"hello-cookie/clear\">Clear cookies</a></p>\n <pre>{{ payload }}</pre>\n </body>\n</html>","x":500,"y":540,"wires":[["cf292169.1060d"]]},{"id":"cf292169.1060d","type":"http response","z":"3c681253.aa4ade","name":"","x":720,"y":540,"wires":[]},{"id":"d73d5ebe.6a9a2","type":"change","z":"3c681253.aa4ade","name":"Redirect to /hello-cookie","rules":[{"t":"set","p":"statusCode","pt":"msg","to":"302","tot":"num"},{"t":"set","p":"headers","pt":"msg","to":"{}","tot":"json"},{"t":"set","p":"headers.location","pt":"msg","to":"/hello-cookie","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":520,"y":600,"wires":[["cf292169.1060d"]]},{"id":"daab25e3.c2b7c8","type":"function","z":"3c681253.aa4ade","name":"Add a cookie","func":"msg.cookies = { };\nmsg.cookies[\"demo-\"+(Math.floor(Math.random()*1000))] = Date.now();\nreturn msg;","outputs":1,"noerr":0,"x":300,"y":580,"wires":[["d73d5ebe.6a9a2"]]},{"id":"f67f61ec.9dac5","type":"function","z":"3c681253.aa4ade","name":"Clear cookies","func":"// Find demo cookies and clear them\nvar cookieNames = Object.keys(msg.req.cookies).filter(function(cookieName) { return /^demo-/.test(cookieName);});\nmsg.cookies = {};\n\ncookieNames.forEach(function(cookieName) {\n msg.cookies[cookieName] = null;\n});\n\nreturn msg;","outputs":1,"noerr":0,"x":310,"y":620,"wires":[["d73d5ebe.6a9a2"]]},{"id":"8542590b.3b24c8","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-cookie/add","method":"get","swaggerDoc":"","x":110,"y":580,"wires":[["daab25e3.c2b7c8"]]},{"id":"3d8b565f.d7239a","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-cookie/clear","method":"get","swaggerDoc":"","x":110,"y":620,"wires":[["f67f61ec.9dac5"]]},{"id":"ba4c3a84.bc4b88","type":"inject","z":"3c681253.aa4ade","name":"make request","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"x":110,"y":860,"wires":[["270c6fcf.f9f1f"]]},{"id":"270c6fcf.f9f1f","type":"http request","z":"3c681253.aa4ade","name":"","method":"GET","ret":"txt","url":"https://www.npmjs.com/package/node-red","tls":"","x":274.5,"y":860,"wires":[["c93c33e1.70d51"]]},{"id":"dd74f8da.f09aa8","type":"debug","z":"3c681253.aa4ade","name":"","active":true,"tosidebar":true,"console":false,"complete":"false","x":470,"y":860,"wires":[]},{"id":"c93c33e1.70d51","type":"html","z":"3c681253.aa4ade","name":"","property":"","outproperty":"","tag":".package__counter___2s5Ci","ret":"text","as":"single","x":330,"y":920,"wires":[["dd74f8da.f09aa8"]]},{"id":"e0a80ccc.cd2cb","type":"http in","z":"3c681253.aa4ade","name":"","url":"/hello-file","method":"get","swaggerDoc":"","x":90,"y":700,"wires":[["a2a4c8cd.ab2538"]]},{"id":"a2a4c8cd.ab2538","type":"file in","z":"3c681253.aa4ade","name":"","filename":"C:\\Users\\INSTAR\\.node-red\\temp\\fort.png","format":"","sendError":true,"x":200,"y":760,"wires":[["f6621622.ef73e8"]]},{"id":"f6621622.ef73e8","type":"change","z":"3c681253.aa4ade","name":"Set Headers","rules":[{"t":"set","p":"headers","pt":"msg","to":"{}","tot":"json"},{"t":"set","p":"headers.content-type","pt":"msg","to":"image/png","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":300,"y":700,"wires":[["8691a477.40c9b8"]]},{"id":"8691a477.40c9b8","type":"http response","z":"3c681253.aa4ade","name":"","x":430,"y":700,"wires":[]},{"id":"996d5241.94f1f","type":"http request","z":"3c681253.aa4ade","name":"","method":"GET","ret":"txt","url":"","x":440,"y":1000,"wires":[["c6ff9c2d.c996"]]},{"id":"bf19aeec.97129","type":"inject","z":"3c681253.aa4ade","name":"rss feed","topic":"","payload":"http://vancouver.craigslist.org/search/sss?format=rss&query=cars","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":"","x":90,"y":1000,"wires":[["9373b92d.22fd28"]]},{"id":"9373b92d.22fd28","type":"change","z":"3c681253.aa4ade","name":"","rules":[{"t":"set","p":"url","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":280,"y":1000,"wires":[["996d5241.94f1f"]]},{"id":"c6ff9c2d.c996","type":"debug","z":"3c681253.aa4ade","name":"","active":true,"tosidebar":true,"console":false,"complete":"false","x":600,"y":1000,"wires":[]},{"id":"5eb8fc92.744284","type":"http request","z":"3c681253.aa4ade","name":"","method":"GET","ret":"txt","url":"https://jsonplaceholder.typicode.com/posts/{{post}}","tls":"","x":400,"y":1080,"wires":[["fd2534ea.053458"]]},{"id":"fd2534ea.053458","type":"debug","z":"3c681253.aa4ade","name":"","active":true,"tosidebar":true,"console":false,"complete":"payload","x":560,"y":1080,"wires":[]},{"id":"3d52a8bf.6c53b8","type":"inject","z":"3c681253.aa4ade","name":"post id","topic":"","payload":"2","payloadType":"str","repeat":"","crontab":"","once":false,"x":100,"y":1080,"wires":[["8b6e96c.6cd0d68"]]},{"id":"8b6e96c.6cd0d68","type":"change","z":"3c681253.aa4ade","name":"","rules":[{"t":"set","p":"post","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":240,"y":1080,"wires":[["5eb8fc92.744284"]]},{"id":"6f4bf643.a89ff8","type":"http request","z":"3c681253.aa4ade","name":"","method":"GET","ret":"txt","url":"https://wiki.instar.de/Search/Results?q={{{query}}}","tls":"","x":460,"y":1340,"wires":[["7ac7d192.cbd47"]]},{"id":"7ac7d192.cbd47","type":"debug","z":"3c681253.aa4ade","name":"","active":true,"console":"false","complete":"payload","x":620,"y":1340,"wires":[]},{"id":"76316725.0acdd8","type":"inject","z":"3c681253.aa4ade","name":"query parameter","topic":"","payload":"select astronomy.sunset from weather.forecast where woeid in (select woeid from geo.places(1) where text=\"maui, hi\")","payloadType":"str","repeat":"","crontab":"","once":false,"x":110,"y":1340,"wires":[["e3d5aa16.d8e528"]]},{"id":"e3d5aa16.d8e528","type":"change","z":"3c681253.aa4ade","name":"","rules":[{"t":"set","p":"query","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":290,"y":1340,"wires":[["6f4bf643.a89ff8"]]},{"id":"93e492a4.1094f","type":"http request","z":"3c681253.aa4ade","name":"","method":"GET","ret":"obj","url":"https://jsonplaceholder.typicode.com/posts/{{post}}","tls":"","x":400,"y":1140,"wires":[["937a0b9e.cac178"]]},{"id":"937a0b9e.cac178","type":"debug","z":"3c681253.aa4ade","name":"","active":true,"tosidebar":true,"console":false,"complete":"payload.title","x":580,"y":1140,"wires":[]},{"id":"8fdebd1f.f2fcf","type":"inject","z":"3c681253.aa4ade","name":"post id","topic":"","payload":"2","payloadType":"str","repeat":"","crontab":"","once":false,"x":100,"y":1140,"wires":[["44e33b78.cef614"]]},{"id":"44e33b78.cef614","type":"change","z":"3c681253.aa4ade","name":"","rules":[{"t":"set","p":"post","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":240,"y":1140,"wires":[["93e492a4.1094f"]]}] ```<file_sep>--- date: "2019-09-18" title: "Creating Magento 2 Cron Jobs" categories: - Magento --- ![Pokhara, Nepal](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> <!-- /TOC --> We can automate tasks using cron jobs in Magento. Let's start by adding a file `./Cron/AddItem.php`: ```php <?php namespace INSTAR\SampleModule\Cron; use INSTAR\SampleModule\Model\ItemFactory; use INSTAR\SampleModule\Model\Config; class AddItem { private $itemFactory; public function __construct(ItemFactory $itemFactory) { $this->itemFactory = $itemFactory; } public function execute() { $this->itemFactory->create() ->setName('Scheduled item') ->setDescription('Created at ' . time()) ->save(); } } ``` To execute this job on a schedule we now have to add a file `./etc/crontab.xml`: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:module:Magento_Cron:etc/crontab.xsd"> <group id="default"> <job name="instarAddItem" instance="INSTAR\SampleModule\Cron\AddItem" method="execute"> <!-- Every five minutes --> <schedule>*/5 * * * *</schedule> <!--<config_path>instar/general/cron_expression</config_path>--> </job> </group> </config> ``` This configuration set's our job up to be executed every 5 minutes: | Field | Description | Allowed Value | | -- | -- | -- | | MIN | Minute field | 0 to 59, * | | HOUR | Hour field | 0 to 23, * | | DOM | Day of Month | 1-31, * | | MON | Month field | 1-12, * | | DOW | Day Of Week | 0-6, * | | CMD | Command | Any command to be executed. | You can now execute all crontabs with the following Magento command: ```bash bin/magento cron:run Ran jobs by schedule. ``` You can check your Magento database to see if the crontab is now set to execute: ```sql SELECT * FROM cron_schedule; ``` You should find entries for `instarAddItem` inside the table: ```sql mysql> SELECT * FROM cron_schedule WHERE job_code = 'instarAddItem'; +-------------+---------------+---------+---------------------+-------------+-------------+ | schedule_id | job_code | status | scheduled_at | executed_at | finished_at | +-------------+---------------+---------+---------------------+-------------+-------------+ | 536 | instarAddItem | pending | 2020-01-24 10:20:00 | NULL | NULL | | 537 | instarAddItem | pending | 2020-01-24 10:25:00 | NULL | NULL | | 538 | instarAddItem | pending | 2020-01-24 10:30:00 | NULL | NULL | | 539 | instarAddItem | pending | 2020-01-24 10:35:00 | NULL | NULL | +-------------+---------------+---------+---------------------+-------------+-------------+ 4 rows in set (0.00 sec) ``` In this case the job has not yet been executed - re-run the crontab and check again: ```bash bin/magento cron:run ``` The first run was now scheduled to execute and ran successfully: ```sql mysql> SELECT * FROM cron_schedule WHERE job_code = 'instarAddItem'; +-------------+---------------+---------+---------------------+---------------------+---------------------+ | schedule_id | job_code | status | scheduled_at | executed_at | finished_at | +-------------+---------------+---------+---------------------+---------------------+---------------------+ | 536 | instarAddItem | success | 2020-01-24 10:20:00 | 2020-01-24 10:26:16 | 2020-01-24 10:26:16 | | 537 | instarAddItem | pending | 2020-01-24 10:25:00 | NULL | NULL | | 538 | instarAddItem | pending | 2020-01-24 10:30:00 | NULL | NULL | | 539 | instarAddItem | pending | 2020-01-24 10:35:00 | NULL | NULL | +-------------+---------------+---------+---------------------+---------------------+---------------------+ 4 rows in set (0.01 sec) ``` To automate this process create a cronjob or SystemD script to execute `bin/magento cron:run` in the desired time interval. ![Magento 2 Cronjobs](./Magento2_Module_Components_01.png)<file_sep>--- date: "2019-06-12" title: "Windows Server 2019 - Active Directory Setup" categories: - Windows --- ![Shanghai, China](./photo-f454_gf45g4e3ff.jpg) <!-- TOC --> - [Windows Powershell](#windows-powershell) - [Running Powershell Commands](#running-powershell-commands) - [Powershell Scripting](#powershell-scripting) <!-- /TOC --> ## Organization Units The main task for __Active Directory__ is to group __Objects__ (users, groups, contacts, computers, printers, shared folders, etc.) with __Attributes__ (name, email address, phone number, etc.) into __Organization Units__ (OU) and to manage access rights for each object. * Organization Unit * Sub-OU * Group * Objects (e.g. Users) ![Windows Server 2019](./Windows_Server_2019_01.png) ![Windows Server 2019](./Windows_Server_2019_02.png) ## User Account Management We can now start to map our company structure in units, sub-units and groups inside our active directory: ![Windows Server 2019](./Windows_Server_2019_03.png) Every time you create a OU you have the option to _Protect it from accindential deletion_. If it so happens that you made a mistake and this option was checked, you have to use Powershell to remove that protection before deleting it. In the example below I added a OU instead of a group. To delete it I had to run the following commands: ![Windows Server 2019](./Windows_Server_2019_04.png) ```powershell Get-ADOrganizationalUnit -Identity 'OU=Cloud,OU=INSTAR_Shenzhen,OU=INSTAR,DC=instar,DC=local' | Set-ADObject -ProtectedFromAccidentalDeletion:$false -PassThru | Remove-ADOrganizationalUnit -Confirm:$false ``` Finish the hirachy by adding groups and users to map structures e.g. based on offices, projects, etc. ![Windows Server 2019](./Windows_Server_2019_05.png)<file_sep>import React from 'react' import Helmet from 'react-helmet' export default () => ( <Helmet> <meta http-equiv="refresh" content="0;url=https://mpolinowski.github.io/machine-learning-with-sci-kit-learn" /> </Helmet> )<file_sep>--- date: "2019-06-11" title: "Windows Server 2019 - Active Directory Installation" categories: - Windows --- ![Shanghai, China](./photo-f454_gf45g4e3ff.png) <!-- TOC --> - [Prerequisites](#prerequisites) - [Installation](#installation) <!-- /TOC --> ## Prerequisites To be able to set up an __Active Directory__ on our domain controller we have to first set up: * A DNS service * A static IP address for our Server * A name for our domain controller that can be resolved by the DNS service * A NetBIOS Domain Name that has to be prepend to every username on login Let's start with adding a static IP. This setting can be found under __Network & Internet__. Click on __Change adapter options__ and in the next window on __Properties__. Open the __Internet Protocol Version 4__ settings and assign an IP address from your personal local network that is not currently in use: ![Windows Server 2019](./Windows_Server_2019_01.png) ## Installation Now we are able to add __Active Directory__ to our domain controller by opening the __Server Manager__ and clicking on __Add roles and features__: ![Windows Server 2019](./Windows_Server_2019_02.png) Checking __Active Directory Domain Services__ will show you a list of all the features that are required. Click on __Add Features__. Secondly we need to add the __DNS Server__ in the same way: ![Windows Server 2019](./Windows_Server_2019_03.png) ![Windows Server 2019](./Windows_Server_2019_04.png) Now click __Next__ until you reached the __Confirmation__ tab then click __Install__. Once the Installation process is done head back to the __Server Manager__ and click on the notification button. You should see a notification that you now have to promote your server to __Domain Controller__. ![Windows Server 2019](./Windows_Server_2019_05.png) Now choose a Domain Name for your network: ![Windows Server 2019](./Windows_Server_2019_06.png) Type in a Directory Service Restore Mode (__DSRM__) password: ![Windows Server 2019](./Windows_Server_2019_07.png) Now click __Next__ a couple of times (only change the NetBIOS domain name, if desired): ![Windows Server 2019](./Windows_Server_2019_08.png) Hit __Install__ once your _Prerequisite Check_ gives you a green light: ![Windows Server 2019](./Windows_Server_2019_09.png) Your server should restart automatically once the installation finished and you have to reconnect to your server afterwards: ![Windows Server 2019](./Windows_Server_2019_10.png) Once you are back in verify that Active Directory was installed successfully - you should now be able to find the __Active Directory Administrative Center__ in your start menu: ![Windows Server 2019](./Windows_Server_2019_11.png) Searching for `DNS` inside the start menu should also bring up the __DNS Manager__. A quick ping should show you that the server name is resolved correctly: ![Windows Server 2019](./Windows_Server_2019_12.png)<file_sep>--- date: "2019-03-26" title: "Developing an App for the Homey Smarthome Gateway" categories: - Smarthome - IoT - Javascript - Node --- ![<NAME>, Cambodia](./photo-84ch678fgd_67gfh6df55_7.jpg) ## Setting up the Development Environment A [Homey App](https://apps.developer.athom.com/index.html) is software that runs on [Homey](https://www.athom.com/en/homey/), similar to apps on a smartphone. Homey connects all your devices at home, even different brands and technologies. Control your home from one app. Automate your home with Flow, play your favorite Music and save energy with Insights. Homey is the central hub for your smart home. #### Install Node.js Apps are written in JavaScript, and run on Node.js. To develop an app for Homey we first have to Download Node.js from the [Node.js website](https://nodejs.org/en/) and install it on your computer. #### Install athom-cli Open a command line, and install the [athom-cli program](https://www.npmjs.com/package/athom-cli) by running the following command: ```bash npm install -g athom-cli ``` #### Log-in In the command-line, log in with your [Athom account](https://accounts.athom.com/login): ``` athom login ``` #### Create an app Now create your first app by running: ``` athom app create ``` ![Homey Smarthome Gateway & INSTAR Cameras](./homey_instar_app_01.png) #### Run your project You are now ready to run your first app: ``` athom app run ``` ![Homey Smarthome Gateway & INSTAR Cameras](./homey_instar_app_02.png) Exit your app by pressing `CTRL + C`. When using `athom app run`, the App will be removed when Homey is restarted. Run `athom app run --clean` to run the app and erase all previous settings. Run `athom app install` to keep the app running for a longer time without debugger attached.<file_sep>--- date: "2018-01-02" title: "Machine Learning with SciKit Learn" categories: - Machine Learning - Python --- ![Shenzhen, China](./photo-33796028333_a7fa30ab08_o.jpg) <!-- TOC depthFrom:2 depthTo:4 --> - [Prerequisite](#prerequisite) - [SciKit Introduction](#scikit-introduction) - [Data Representation](#data-representation) - [Data Preprocessing](#data-preprocessing) - [SciKit-Learn API](#scikit-learn-api) - [Estimation](#estimation) - [Predictor](#predictor) - [Transformer](#transformer) - [Unsupervised Learning Introduction](#unsupervised-learning-introduction) - [Clustering Tasks](#clustering-tasks) - [Unsupervised Learning IRL](#unsupervised-learning-irl) - [Clustering](#clustering) - [Visualization](#visualization) - [k-Means Algorithm](#k-means-algorithm) - [Initialization Methods](#initialization-methods) - [Mean-Shift Algorithm](#mean-shift-algorithm) - [DBSCAN Algorithm](#dbscan-algorithm) - [Evaluating Performance](#evaluating-performance) - [Supervised Learning Introduction](#supervised-learning-introduction) - [Classification Tasks](#classification-tasks) - [Regression Tasks](#regression-tasks) - [Supervised Learning IRL](#supervised-learning-irl) - [Data Split](#data-split) - [Cross Validation](#cross-validation) - [Metric Evaluation](#metric-evaluation) - [Table of Confusion](#table-of-confusion) - [Evaluation of Tasks with binary output labels](#evaluation-of-tasks-with-binary-output-labels) - [Evaluation of Regression Tasks](#evaluation-of-regression-tasks) <!-- /TOC --> An Introduction in building machine learning applications with the SciKit Python library. Learn data preprocessing and implement supervised and unsupervised algorithms as well as performing error analysis to evaluate their performance. # Prerequisite First we need to install the [Anaconda Environment](https://www.anaconda.com/download/) for Windows, macOS or LINUX. This package combines everything we need to get started with Python. From libraries like [SciKit-Learn](https://scikit-learn.org/stable/), Pandas and Matplotlib to Jupyter Notebook, that will help us to execute our Python scripts. We will begin working with the [Seaborn Package](https://anaconda.org/anaconda/seaborn) dataset that is included in the Anaconda package to become familiar with Python based data analysis. In the later steps, we are going to use publicly available data from the [UCI Archive](https://archive.ics.uci.edu): * [Wholesale Customer Dataset](http://archive.ics.uci.edu/ml/datasets/Wholesale+customers) * [Adult Fertility Study](https://archive.ics.uci.edu/ml/datasets/Fertility) * https://archive.ics.uci.edu/ml/machine-learning-databases/00244/ * https://archive.ics.uci.edu/ml/machine-learning-databases/adult/ * [Bank+Marketing Study](https://archive.ics.uci.edu/ml/datasets/Bank+Marketing) * https://archive.ics.uci.edu/ml/machine-learning-databases/00222/ # SciKit Introduction [SciKit-Learn](https://scikit-learn.org/) is a OpenSource library for building models based on built-in machine learning and statistical algorithms. The library offers both supervised and unsupervised models, that we will use to analyze our data with. The library is used to: * Interpret data and train models * Perform predictions from data sets * Cross validation and performance metric analysis * To create sample data sets and test algorithms ## Data Representation To feed data into SciKit it needs to be represented as a table or matrix. Most data used in machine learning is 2-dimensional - that means it can be represented by a classical Excel sheet with rows and columns: * Rows represent observations (instances) * Columns represent characteristics (features) Datasets often have many features that will be represented in the __Feature Matrix__. In most cases it will only be one or two features that will separated for the later analysis of the data set - this skimmed down dataset is called the __Target Matrix__: __Feature Matrix__ * Contains data from each instance for all features * The dimensions of the matrix are `[n_i, n_f]` denoting the number of instances and features. __Target Matrix__ * Is usually 1-dimensional as it only contains 1 feature for all instances. If more than 1 feature is necessary to describe the model the dimension increases accordingly. The Feature Matrix is usually stored in the variable __X__, while the variable __Y__ is used to store the __Target Matrix__. Both matrices can be created by using a __NumPy Array__ or a __Panda DataFrame__. In the following example we are going to look at plant statistic from the [Seaborn Package](https://anaconda.org/anaconda/seaborn) included in the [Anaconda Environment](https://www.anaconda.com/download/). Each row in the set will represent a species of the Setosa family and columns represent the plant characteristics of the sepal as well as petal length and width: ![Python SciKit-Learn](./python-ml_01.png) 1. We import the __Seaborn__ package into the variable __sns__. 2. We can now extract the __Iris Dataset__ from it and store the data inside the variable __iris__. 3. We then drop the __Species Feature__ from the dataset and store it inside the variable __X__. Thus the __Feature Matrix__ consists of all the features __BUT__ the target for all instances. Making it a 2 dimensional dataset. 4. Now we can have a look at the top 10 rows of our data to get an idea what it looks like. 5. The __Shape__ command shows us that we have a __Feature Matrix__ that consists of _150 rows_ (instances) and _4 columns_ (features). 6. We will now build the __Target Matrix__ based on the __Species Feature__ and store it in the variable __Y__. 7. And we see that the first 10 species all belong to the Setosa family. The __Target Matrix__ is now reduced from the earlier 2 to 1 dimension - only consisting of the target feature for all instances. ## Data Preprocessing IRL datasets are usually not analysis-friendly (__messy data__), as they are containing _noisy data_, _missing entries_ and _outliers_ that need to be dealt with before feeding them to our algorithm. __Dealing with Missing Values__: * Eliminate Data * Or Replace it * __Mean Imputation__ - filling out missing fields using the mean or median value (_may introduce bias to our model_) * __Regression Imputation__ - Use prediction to fill out the values (_may end up overfitting the model_) * String-based values should be replaced with a class (_like "uncategorized"_) __Dealing with Outliers__: Outliers represent values that are far from the mean (often set to __3-6 standard deviations__ when the data set follows a Gaussian distribution). If the values follow a Gaussian distribution, _global outliers_ are located at the tails of the bell curve. While _local outliers_ are inside the distribution but far off the group of data points they are associated with. E.g. a vehicle that can drive 500 MPH is a global outlier in a car statistic. While a truck that only has 40 hp is a local outlier in the group labeled as trucks, but is still well inside the curve of the vehicle dataset. * Delete Outliers. * If all instances above a certain value of a feature behave the same way, you can __define a top__ for that feature and apply it to outliers. * Replace the value (__Mean__ or __Regression__). * String values (e.g. misspelled features) can be eliminated or corrected (when possible). In the following example we will again use the [Seaborn Package](https://anaconda.org/anaconda/seaborn) included in the [Anaconda Environment](https://www.anaconda.com/download/) and take a look at the age of the passengers of the __Titantic__: ![Python SciKit-Learn](./python-ml_02.png) 1. We import the __Seaborn__ package and store the __Titanic Dataset__ inside the variable _titanic_. 2. We then load the __Age Feature__ from the dataset and store it in the variable _age_. 3. Displaying the first 10 rows shows us that we already have a missing entry (__NaN__, _not a number_). The _shape_ command shows us that there are 891 rows in total. 4. We can check how many of those 891 have a value of NaN with the _isnull_ function. Summing them up shows us that we have 177 passengers of the Titanic where we do not know there age. 5. We can now use the __Mean Method__ to replace all of those with the mean age. For this we call the mean method on the values in _age_, round them up and store them inside the variable _mean_. Printing out the value, we can see that the mean age was _30_. 6. We can now use the __fillna Method__ to fill out every value that is NaN with the mean value. Taking a look at the first 10 rows again shows that the missing value has now been filled with the mean value 30. 7. To display our distribution - to be able to spot __Outliers__ - we import _PyPlot_ from the _MatPlotLib_ library as _plt_. We use the plt method to build a histogram of the values stored inside the _age_ variable and display the output with the show function. 8. To spot outliers we will set the __Minimum Value__ that we will accept for our model as the mean value for age MINUS _3-times the standard deviation_ of the age dataset. This turns out to be a negative value - given that this does not make any sense in our particular dataset, we can ignore outliers on minimum side of the distribution. 9. To spot outliers we will set the __Maximum Value__ that we will accept for our model as the mean value for age PLUS _3-times the standard deviation_ of the age dataset. Everyone who is listed as ~ 69 or above can be treated as an outlier. 10. We can thus define our _outlier_ variable as every value inside _age_ that is greater than _max\_val_. Counting the outliers shows us that we have _7_ inside our dataset. 11. We decide to remove all outliers from our dataset by only accepting values into our _age_ variable that are smaller or equal to _max\_val_. The shape command shows us that out of the initial 891 passengers we now eliminated 7 from our analysis - _884_. ## SciKit-Learn API SciKit-Learn offers us a unified syntax to make machine learning more accessible. The SciKit-Learn API is divided into 3 interfaces: * __Estimator Interface__: Used to create models and integrate your data. * __Predictor Interface__: Used to make predictions based on the models created. * __Transformer Interface__: Used to transform data files. ## Estimation This is the interface that you use to initialize a model and apply a fit() method to your data. Your data is received as two variables - __X_train__ is the feature matrix and __Y_train__ the target matrix for your model. _Unsupervised Models_ only use the first of those two arguments. A _Supervised Model_ takes both. ```python from sklearn.naive_bayes import GaussianNB model = GaussianNB() model.fit(X_train, Y_train) ``` In the example of a supervised model, we imported the model we want to use, store it in the variable `model` and then apply it to our two arguments using the fit() method. The Estimator can perform 3 more tasks for us: * __Feature Extraction__: A transformation of the input data into numerical features. * __Feature Selection__: Selecting a feature from your data that most contributes to the prediction output. * __Dimensionality__: Converting your data into a lower dimension. ## Predictor The Predictor interface performs prediction based on the model you trained. In supervised models it creates a new dataset called __X_test__ and re-feeds it to your model. The implementation looks as follows: ```python Y_pred = model.predict(X_test) ``` This allows us to quantify the __Confidence__ or __Performance__ of a model by comparing how far _X\_test_ differs from _Y\_test_. ## Transformer The Transform interface gives us a transform() method to preprocess our input data. Using the same transformation for the data that we use to train our model as well as the for the data we later use the model on to perform predictions ensures that both datasets are comparable in their distribution. An example is the __Normalization__ of a dataset: ```python from sklearn.preprocessing import StandardScaler scaler = StandardScaler() scaler.fit(X_train) X_train = scaler.transform(X_train) ``` Here we imported the transformer and store it inside the variable `scaler`. Our dataset is then fit to the imported method and the transformation performed. # Unsupervised Learning Introduction In unsupervised learning the model is modelled to the data, without any relationship to an output label. It can be used to show up clusters of similarities inside unlabeled data. ## Clustering Tasks Finding clusters in unlabeled data involves grouping instances that are similar to each other, while differing visibly from instances in other groups. The most popular Clustering Algorithms are: 1. __k-means__: Separating instances in _n_ clusters of equal variance by minimizing the sum of the squared distances between 2 points - _Centroid-based Model_. 2. __Mean-shift clustering__: Using centroids to create a cluster, where every instances is a candidate to become a centroid (mean of the points in that cluster) - _Centroid-based Model_. 3. __DBSCAN__: _Density-based spatial clustering of applications with noise_ separates areas with high densities of points as clusters from areas with a low density - _Density-based Model_. 4. __Gaussian__: The belonging to a cluster is shown as a deviation from a distribution as used in an expectation maximization model - _Distribution-based Models_. 5. __Hierarchial__: Similarity as proximity inside the data space - _Connectivity-based Model_ # Unsupervised Learning IRL We want to use unsupervised models to analyze data sets from real-world applications. The Objectives are: * Understanding different clustering techniques * Using Panda Dataframes * Data visualizations with MatPlotLib * Working with algorithms like k-means, mean-shift and DBSCAN * Using performance metrics to decide which one to use ## Clustering Clustering is a type of unsupervised machine-learning technique to find pattern in unlabeled input data and divide data points into _n_ clusters based on similarity (and difference to data points in other cluster). The assignment to a cluster can either be __hard__ (absolute designation) or __soft__ (probability of belonging to a cluster). Real-world applications are: * Search Engine Results * Recommendation Programs * Image Recognition * Market Segmentation for targeted Marketing We are going to use the [Wholesale Customer Dataset](#prerequisite) from the UC Irvine Machine Learning Repository to explore those techniques. ## Visualization Visual representations of datasets help us to understand relationships in datasets, results and performance of a model. To work with Visualizations we can load our data into a __Dataframe__ using __Pandas__. Panda dataframes manage stored data in a 2-dimensional, size-mutable matrix with labelled axes and are often stored in _*.csv_ files - like the example of the [Wholesale Customer Dataset](#prerequisite). Data presented this way can be easily loaded into a frame using the Panda function __read.csv()__ (there are alternatives for data based in Excel sheets or SQL databases - _read.xlx()_ and _read.sql()_): ```python import pandas as pd file_path = "./wholesale-customers-data.csv" data = pd.read_csv(file_path) ``` To create a visual representation of such a dataframe we can use the Python library __Matplotlib__ and create: * Histograms (_plt.hist()_) * Scatter Plots (_plt.scatter()_) * Bar Charts (_plt.bar()_) * Pie Charts (_plt.pie()_) ![Python SciKit-Learn](./python-ml_03.png) In this example we used the __Numpy__ random number generator and _make\_circles()_ method to generate a dataset and created a scatter plot and a histogram from it using Matplotlib. ## k-Means Algorithm The k-means algorithm is used on unlabelled data to divide it into _K_ number of clustered subgroups based on similarity. The __Centroid__ of each cluster represents a collection of features that can be used to define the members of the cluster. * __Initialization__: Based on the number of clusters set by you, centroids are generated by initial estimates or at random. * __Assignment__: All data points are assigned to the nearest cluster. * __Update__: Centroids are recalculated by computing the mean of all data points inside the cluster. The algorithm runs until: * It reaches the number of preset iterations. * Data points no longer change from one cluster to another. * The Euclidean distance between all data points and their assigned centroid is minimized. ### Initialization Methods The __k-means++__ (default) method chooses the initial centroids randomly with a maximized distance from other centroids. The number of centroids has to be chosen by you to minimize the average cluster distance in relation to it's centroids. At a small number of centroids, the distance between forming clusters is high. The distance reduces the more initial centroids are added to the calculation - until a point where it stagnates. Adding more centroids after that point will falsify the results by over-representation of features by data points inside the cluster. By plotting the distances between clusters against the number of clusters, the ideal number of centroids is given by the breaking point, where the rate decreases suddenly. In the example below we can see that the distances between points inside a cluster is very high, when we start with 1 centroid. It is very low with 15 centroids. k can be set to 5 as it is the point where the similarity between data points inside the cluster no longer increase significantly when we keep sub-dividing clusters: ![Python SciKit-Learn](./python-ml_04.png) Here we imported the _sklearn.cluster_ packages as KMeans and initialize _ideal\_k_, which is then filled with a for-loop calculating the inertia (the average distance between data points within a cluster) as a function of the number of clusters inside our dataset. With every loop the number of clusters _i_ is increased by 1 until we reach 15. We can then convert the array to a Numpy array and plot the data. We can see a sharp decrease in rate around 4-6, telling us that _k=5_ would be a good place to initialize our analysis with. We can now initialize our analysis by setting the number of centroids to 5, fit the dataset with k-means and predict what cluster a data point belongs to. To visualize the result we want to set the colour for each data point inside a cluster to `c=pred_kmeans`. The results below show the plot for a number of 5, 4 and 6 clusters inside the dataset: ![Python SciKit-Learn](./python-ml_05.png) ## Mean-Shift Algorithm While the __k-mean__ algorithm assigns a data point to a cluster as a function of the distance to a centroid, the __mean-shift__ algorithm evaluates the density of data points in the data space to define clusters. The mean-shift algorithm represents the data points as a __density distribution__ (KDE - _Kernel Density Estimation_). ![Python SciKit-Learn](./python-ml_06.png) As we can see we are getting the same result here as with the k-means algorithm - we just did not have to define the number of centroids. __Note__ that you can influence the amount of clusters found by assigning different __bandwidths__ - the example above varies the bandwidth between 0.4-0.6 as the data set was normalized between 0-1 (when you are working with data with values between 0-2000, you should adjust the bandwidth to ~ 100-500 to get sensible results). The bandwidth represents the size of a window that is drawn around each data point. With every iteration the mean-shift algorithm calculates the mean of each window, based on the data points it contains, and shifts each window towards the mean. This process is repeated until every point is shifted to the nearest peak in the density distribution. The number of shifts a data point has to undergo depends on the distance to the peak and the window size (bandwidth) that is used. Every data point in a peak of the distribution belongs to that cluster. ## DBSCAN Algorithm The _density-based spatial clustering of applications with noise_ (__DBSCAN__) algorithm groups points that are close to each other and marks points that have no close neighbors as __outliers__. The algorithm requires two main parameters: * __Epsilon__ (_eps_): as the maximum distances within which the algorithm searches for neighbors. Epsilon, just like the bandwidth in case of the mean-shift algorithm, has to be adapted to the value of your data point. * __Minimum Number of Observations__ (_min\_sample_): as the number of data points required to form a high density area. Note that SciKit learn this value is set to 5 by default and it is optional for you to change this value if necessary. Given these requirements each data point can be classified as: 1. __Core Point__: when it has at least the minimum number of data points within it's eps radius. 2. __Border Point__: when it is within the eps radius of a core point but does not have the required minimum number of data points within it's own radius. 3. __Noise Point__: when none of the above is true. ![Python SciKit-Learn](./python-ml_07.png) ## Evaluating Performance Once we applied an algorithm to form clusters inside our data set we now have to have a way to evaluate the performance of those clusters - was the algorithm and parameters we choose the best option for the given task? In case of a __Supervised Algorithm__ this can be done by comparing the predictions we get with the true value we know. In case of an __Unsupervised Model__ this is not possible. For clustering algorithm we have the option to measure the similarity of data points within a cluster to estimate the performance of the chosen algorithm. SciKit Learn offers two method to evaluate the performance of unsupervised clustering algorithms by measuring how well-defined the clusters edges are (instead of measuring the dispersion within the cluster). We have to keep in mind that those methods don't take the size of each cluster into account. 1. __Silhouette Coefficient Score__: Calculates the mean distance between each point inside their cluster (a) and the mean distance to it's nearest other clusters (b). The coefficient is calculated by `s = (b-a)/max(a,b)` and results in a score between -1 and 1 - the lower the value, the worse the performance of the cluster. A special case is a value of 0 where clusters start to overlap. This scoring system __does not work__ with density based algorithms like DBSCAN. 2. __Calinski-Harabasz Index__: Calculates the variance of each cluster by the mean square error of each point to the centroid of that cluster. This is then compared to the overall inter-cluster variance. A higher value describes a better definition/separation of each cluster. This scoring system __does not work__ with density based algorithms like DBSCAN. SciKit Learn does not offer a scoring system that works reliable for density-based algorithms. ![Python SciKit-Learn](./python-ml_08.png) As we see above, we are getting comparable scores for the k-mean and mean-shift algorithm with the __silhouette\_score__ - k-mean (_0.360_) is works slightly better than mean-shift (_0.344_). The DBSCAN algorithm performs poorly in comparison (_0.089_). But the scoring system might fail us here. The sores we get from the __calinski\_harabaz\_score__ are in line with this observation (k-means_score=1377.88, meanshift=1304.07, dbscan_sore=0.16). # Supervised Learning Introduction A supervised model explores the relation between a set of features and a target value (label / class). E.g. a person's demographic and their ability to re-pay loans. ## Classification Tasks Classifications are used to build models with discrete categories as labels. Such tasks output a prediction as a probability of an instance belonging to a label. Common Classification Algorithms are: 1. __Decision Trees__: A tree-like structure that simulates the decision process based on previous decisions. 2. __Naive Bayes Classifier__: Relies on probabilistic equations which assume independence among features with the ability to consider several attributes. 3. __Artificial Neutral Networks__: Replicate the structure of a biological neural network to perform pattern recognition tasks. ## Regression Tasks Used for data with continuous quantities as labels, where the value is represented by a quantity and not a set of possible outcomes - e.g. a linear regression. # Supervised Learning IRL Finding an algorithm for a task is usually a process of trial & error using testing to validate the resulting model and comparing the result with results from other algorithms. ## Data Split To avoid introducing bias into a supervised model, the data set is partitioned into 3 sets: 1. __Training Set__: This set is used to train the models with different algorithms. It consists of input data paired with an outcome / label. It is _not used_ for performance evaluation of each model later on. 2. __Validation Set__: This set is used to perform unbiased evaluations of each model and fine-tune parameters to achieve the best performance. The validation set therefore influences the training indirectly. In the end the model that performs the best is chosen to be used on the next set of data. 3. __Testing Set__: This set consists of data that had no influence over the trained model and is used for a final, unbiased performance evaluation for future predictions by the model. The __Split Ratio__ for those three sets depends on the size of our data set. For sets that contain _100 - 100,000 instances_ a split ration for training, validating, testing of _60/20/20%_ is used. For large data sets with _more than a million instances_ a split ration of _98/1/1%_ can be sufficient to determining the performance of a model. The __Algorithm__ also has an effect on the split ratio you have to use. E.g. if you are working with a model that has a lot of parameters, you might want to work with a larger validation set, while the testing set can remain small. In the following we are using the Iris dataset from sklearn and load it into a Panda data frame. Printing the shape of the data frame, we can see that it consists of 150 rows and 4 columns of data and 1 column for the target value with 150 rows. ![Python SciKit-Learn](./python-ml_09.png) To split up our data in a training (_\_train_), validating (_\_dev_) testing (_\_test_) set, we can use the __train\_test\_split__ method from sklearn. We are going to use a `test_size` of `0.2` - meaning that we are training our model with 80% of the data - which corresponds to 120 rows, while 30 rows are allocated for testing our model later on. In a second step we need to subtract another portion of the data to from our validation group - which removes another 30 rows from our training data. ## Cross Validation To further reduce bias in our models we can further partition our data into __K__ number of groups and re-sample data from those groups as we train and validate our model - this is called __K-fold Cross Validation__, where K is usually replaced by the number of samples used in the training process. This process can replace the testing step discussed above or can be done in addition: * __three-split approach__: A testing set is subtracted from the data. The rest of the data is used in the cross-validation process. * __two-split approach__: The complete data set is used for cross-validation. In practice this process contains 4 steps that are repeated __K-times__: 1. First the data is shuffled. 2. The data is split according to the process described above. 3. The model is trained and the selected validation group is used to fine-tune parameters. 4. The results from the training are stored and the process begins again with step 1 and is repeated K times. You end up with K data sets and a model that is trained K-times - the results refined with every iteration. In the following we are going to use the same data set as before and use the __three-split-approach__ by first removing 20% of our data to form a testing set. We then import the __KFold Method__ from Sklearn to split our data in 10 subgroups: ![Python SciKit-Learn](./python-ml_10.png) We now have a data set with 10 subgroups that can be referenced by an index number. We can use a _for-loop_ to iterate over the data. ## Metric Evaluation The accuracy of a model can be calculated as a percentage by comparing it's predicted values with real (unseen as not used in the training of the model) measurements. This can visualized in the so called __Confusion Matrix__ - a 2-dimensional matrix that contains the predictions as columns and the occurrence or non-occurrence of events as rows. In the field of machine learning and specifically the problem of statistical classification, a confusion matrix, also known as an error matrix, is a specific table layout that allows visualization of the performance of an algorithm, typically a supervised learning one. Each row of the matrix represents the instances in a predicted class while each column represents the instances in an actual class (or vice versa). If a classification system has been trained to distinguish between apples, oranges and pineapples, a confusion matrix will summarize the results of testing the algorithm for further inspection. Assuming a sample of 27 animals — 8 apples, 6 oranges, and 13 pineapples, the resulting confusion matrix could look like the table below: | Prediction / Actual | Apple (A) | Orange (A) | Pineapple (A) | | ------------------- |:---------:|:----------:|:-------------:| | Apple (P) | **5** | 2 | 0 | | Orange (P) | 3 | **3** | 2 | | Pineapple (P) | 0 | 1 | **11** | In this confusion matrix, of the 8 actual apples, the system predicted that three were oranges, and of the six oranges, it predicted that one was a pineapple and two were apples. We can see from the matrix that the system in question has trouble distinguishing between apples and oranges, but can make the distinction between pineapples and other types of animals pretty well. All correct predictions are loaded in the diagonal of the table (highlighted in bold), so it is easy to visually inspect the table for prediction errors, as they will be represented by values outside the diagonal. ## Table of Confusion In predictive analytics, a table of confusion (sometimes also called a confusion matrix), is a table with two rows and two columns that reports the number of false positives, false negatives, true positives, and true negatives. This allows more detailed analysis than mere proportion of correct classifications (accuracy). Accuracy is not a reliable metric for the real performance of a classifier, because it will yield misleading results if the data set is unbalanced (that is, when the numbers of observations in different classes vary greatly). For example, if there were 95 apples and only 5 oranges in the data, a particular classifier might classify all the observations as apples. The overall accuracy would be 95%, but in more detail the classifier would have a 100% recognition rate (sensitivity) for the apple class but a 0% recognition rate for the orange class. Assuming the confusion matrix above, its corresponding table of confusion, for the apple class, would be: | Prediction / Actual | Apple (A) | Not-Apple (A) | | ------------------- | ----------------- | ----------------- | | Apple (P) | 5 True Positives | 2 False Positives | | Not-Apple (P) | 3 False Negatives | 17 True Negatives | ### Evaluation of Tasks with binary output labels The performance of the model can be calculated based on the number of predictions that turned out to be true. The performance is given per feature (e.g. our model has a high accuracy to predict pineapples, but is a bit confused when it comes to distinguishing between apples and oranges). The performance table for a pineapple gives us the accuracy for the models prediction capabilities to recognize pineapples as follows: | Actual / Prediction | Pineapple (P) | Any other fruit (P) | Sum | Accuracy | | -- | -- | -- | -- | -- | | Pineapple (A) | 976 | 34 | 1000 | 97.6 % | | Any other fruit (A) | 157 | 843 | 1000 | 84.3 % | When the model saw a pineapple, out of 1000 instances it correctly predicted the type of fruit 976 times - giving us an accuracy of 97.6 %. But when seeing the image of an orange or apple it still concluded that it was a pineapple 157 times out of 1000 - resulting in an accuracy of 84.3 %. * __Accuracy Metric__ : To calculate the Accuracy of the model over all instances the sum of all __True Positives__ and __True Negatives__ is divided by the total number of instances: _Accuracy = (TP + TN)/m_ * __Precision Metric__ : To calculate the precision of model over all instances to classify positive binary labels we need to calculate: _Precision = TP / (TP + FP)_ e.g. (976) / (976 + 157) = 86.1% * __Recall Metric__ : The recall metric is the number of correctly predicted positive labels against all correctly predicted labels, positive and negative: _Recall = TP / (TP + FN)_ e.g. (976) / (976 + 34) = 97.6% ![Python SciKit-Learn](./python-ml_11.png) ### Evaluation of Regression Tasks Regression tasks have continuous outputs without a fixed number of output labels - here we cannot use any of the metrics above to evaluate the accuracy or precision of the predictions of our model. * __Mean Absolute Error__ : The MAE is the average distance between the predicted result and the actual value, without taking into account the direction of the error: _MAE = 1/m * ∑ (over all m instances) (y(actual) - y(predicted)_ * __Root Mean Square Error__ : RMSE = √ (1/m * ∑ (over all m instances) (y(actual) - y(predicted))² In both cases the ideal model (prediction = actual value) would result in an error of `0`. In real applications you would try different models on your data and compare their prediction error - the one with the lowest error value wins. ![Python SciKit-Learn](./python-ml_12.png)<file_sep>--- date: "2019-01-12" title: "Centos Network Configuration" categories: - LINUX --- ![Battambang, Cambodia](./photo-19196703263_69f9f0df5f_o.jpg) <!-- TOC --> - [Change DNS Server Manually](#change-dns-server-manually) - [DNS Settings using the NetworkManager](#dns-settings-using-the-networkmanager) - [Default gateway on CentOS](#default-gateway-on-centos) <!-- /TOC --> ## Change DNS Server Manually Check current DNS Server: ```bash cat /etc/resolv.conf ``` You can change the nameserver IP address by editing this file. __Note__ that your edit is going to be overwritten by the __NetworkManager__ again once you restart your server: ```bash nano /etc/resolv.conf ``` Edit/update nameserver entry as follows (you can set maximum 3 nameserver IP address): ```yaml # Generated by NetworkManager search fritz.box nameserver 192.168.2.1 nameserver 192.168.2.5 ``` ## DNS Settings using the NetworkManager Run the following nmcli command to view your network interfaces: ```bash [root@CentOS8 ~]# nmcli connection NAME UUID TYPE DEVICE docker0 09eef622-55b4-4d6e-9ee1-a05c1e8d5e69 bridge docker0 enp2s0 280ed14d-7c8b-4586-853d-420df9f65412 ethernet enp2s0 ``` To set up DNS IP address to `192.168.2.1` and `192.168.2.5`, run: ```bash nmcli con mod {interfaceNameHere} ipv4.dns "192.168.2.254 192.168.2.18" ``` For the case above this would be `nmcli con mod enp2s0 ipv4.dns "192.168.2.1 192.168.2.5"` Reload new DNS settings by running any one of the following command: ``` systemctl restart NetworkManager.service ``` ## Default gateway on CentOS On CentOS you can check the routing table with: ```bash [root@CentOS8 ~]# route -n Kernel IP routing table Destination Gateway Genmask Flags Metric Ref Use Iface 0.0.0.0 192.168.2.5 0.0.0.0 UG 100 0 0 enp2s0 172.17.0.0 0.0.0.0 255.255.0.0 U 0 0 0 docker0 192.168.2.0 0.0.0.0 255.255.255.0 U 100 0 0 enp2s0 ``` The last line of the table indicates the default gateway of the machine. In this case the default gateway for the __enp2s0__ interface is `192.168.2.5`. Or alternatively use: ```bash [root@CentOS8 ~]# ip route show default via 192.168.2.5 dev enp2s0 proto static metric 100 172.17.0.0/16 dev docker0 proto kernel scope link src 172.17.0.1 192.168.2.0/24 dev enp2s0 proto kernel scope link src 192.168.2.111 metric 100 ``` You can control default gateway using route command - temporarely (will default back on your next rebood): ```bash route del default gw <default_gateway_ip> route add default gw <default_gateway_ip> ``` Or you can use the following command: ```bash ip route replace default via <default_gateway_ip> dev enp2s0 ``` To change default gateway permanently update `/etc/sysconfig/network-scripts/ifcfg-{interfaceNameHere}` accordingly. ``` [root@CentOS8 ~]# nano /etc/sysconfig/network-scripts/ifcfg-enp2s0 GATEWAY=192.168.2.5 ```<file_sep>--- date: "2018-01-15" title: "react-transition-group" categories: - Javascript - React --- import GifContainer from "../../src/components/ImageContainer"; ![<NAME>, Cambodia](./photo-11628005984_73bc506457_o.png) ## Introduction to animated transitions with React Router It is using the official [React Router documentation](https://reacttraining.com/react-router/web/example/animated-transitions) and employs the [react-transition-group](https://reactcommunity.org/react-transition-group/) <!-- TOC --> - [Introduction to animated transitions with React Router](#introduction-to-animated-transitions-with-react-router) - [create-react-app](#create-react-app) - [React Router](#react-router) - [Page Components & URL parameters](#page-components--url-parameters) - [Using Switch to catch 404's](#using-switch-to-catch-404s) - [Adding Transitions to Routes](#adding-transitions-to-routes) <!-- /TOC --> ### create-react-app Lets get started with a React skeleton app. Navigate to a sensible directory and type the following into your Terminal (you need to have [Node.js](https://nodejs.org) version 6+ with npm version 5.2+ installed globally): ```bash npx create-react-app react-transition-group-demo cd react-transition-group-demo ``` As we already stated, we are going to use React Router and the React Transition group for our app. Lets install those as well: ```bash npm install react-router-dom react-transition-group ``` Now open the app root inside your code editor and navigate to ./src/app.js and delete everything. ### React Router First we need to import React and React Router: ```jsx import React, { Component } from 'react' import { BrowserRouter as Router, Link, Route, // for later Redirect, // for later Switch, // for later } from 'react-router-dom' ``` Build a Nav component using the __router__: ```jsx class App extends Component { render() { return ( <Router> <div style={styles.fill}> <ul style={styles.nav}> <NavLink to="/hsl/355/87/53">Crimson</NavLink> <NavLink to="/hsl/210/5/15">Darkslategray</NavLink> <NavLink to="/rgb/28/221/105">Springgreen</NavLink> <NavLink to="/rgb/43/29/201">Mediumblue</NavLink> </ul> </div> </Router> ) } } const NavLink = (props) => ( <li style={styles.navItem}> <Link {...props} style={{ color: 'inherit' }} /> </li> ) export default App ``` And make it look pretty with some __CSS-in-JS__: ```jsx let styles = {} styles.fill = { position: 'absolute', left: 0, right: 0, top: 0, bottom: 0 } styles.nav = { padding: 0, margin: 0, position: 'absolute', top: 0, height: '40px', width: '100%', display: 'flex' } styles.navItem = { textAlign: 'center', flex: 1, listStyleType: 'none', padding: '10px' } ``` You can now start the app and open it in your browser on _http://localhost:3000_ ```bash npm start ``` ![react-transition-group demo app](./rtg_01.png) We now have 4 links on to of the page that lead to a hsl and a rgb component (that we still have to build). And the routes carry __URL parameters__ in form of HSL or RGB values that we will use inside those components to render the background in the specific colour, defined by those values - `/rgb/:r/:g/:b` or `/hsl/:h/:s/:l`. ### Page Components & URL parameters Lets build those 2 components by adding them to the ./src/app.js file: __RGB__ ```jsx ... const RGB = ({ match }) => { const { params } = match return ( <div style={{ ...styles.rgb, background: `rgb(${params.r}, ${params.g}, ${params.b})` }}>rgb({params.r}, {params.g}, {params.b})</div> ) } ... styles.rgb = { ...styles.fill, color: 'white', paddingTop: '20px', fontSize: '30px' } ``` __HSL__ ```jsx ... const HSL = ({ match }) => { const { params } = match return ( <div style={{ ...styles.hsl, background: `hsl(${params.h}, ${params.s}%, ${params.l}%)` }}>hsl({params.h}, {params.s}%, {params.l}%)</div> ) } ... styles.hsl = { ...styles.fill, color: 'white', paddingTop: '20px', fontSize: '30px' } ``` Both components receive a __match__ object from which we can grab our URL parameters from. We want them to use the URL parameters change the background of the component. And show us the `hsl({params.h}, {params.s}%, {params.l}%)` values that were used. ### Using Switch to catch 404's Now we need is a way to tell React Router that we only want to render the first Route that matches, even if there's more than one match. All we need to do is wrap our Routes inside of a Switch then just as we wanted, only the first match will ever be rendered. To prevent the 404 Error to pop up on our our home route, we can add a simple Index component to the mix. An alternative would be to use the __Redirect__ function in React Router, to redirect unknown URLs to a specific site - we use a Home route - that doesn't have a component assigned to it - in our example below: ```jsx ... <div style={styles.content}> <Switch> <Route exact path="/hsl/:h/:s/:l" component={HSL} /> <Route exact path="/rgb/:r/:g/:b" component={RGB} /> <Route exact path="/home" render={() => ( <Redirect to="/" /> )} /> <Route exact path="/" component={Index} /> <Route render={() => <div>Not Found</div>} /> </Switch> </div> ... const Index = () => ( <div style={styles.index}> <h1>Hello World</h1> <p>Click a link to generate a coloured background.</p> </div> ) ... styles.index = { ...styles.fill, marginTop: '7%', textAlign: 'center' } styles.content = { ...styles.fill, top: '40px', textAlign: 'center' } ``` Only if a route matching the exact path defined for the HSL or RGB component, the selected component will be rendered. Otherwise the request will fall through to the __catch-all__ route - our simple 404 component. The complete router component now looks like this: ```jsx <Router> <div style={styles.fill}> <ul style={styles.nav}> <NavLink to="/home">Home</NavLink> <NavLink to="/hsl/355/87/53">Crimson</NavLink> <NavLink to="/hsl/210/5/15">Darkslategray</NavLink> <NavLink to="/rgb/28/221/105">Springgreen</NavLink> <NavLink to="/rgb/43/29/201">Mediumblue</NavLink> </ul> <div style={styles.content}> <Switch> <Route exact path="/hsl/:h/:s/:l" component={HSL} /> <Route exact path="/rgb/:r/:g/:b" component={RGB} /> <Route exact path="/home" render={() => ( <Redirect to="/" /> )} /> <Route exact path="/" component={Index} /> <Route render={() => <div>Not Found</div>} /> </div> </div> </Router> ``` ### Adding Transitions to Routes To use the react-transition-group, we first have to import transitions into our project: ```js import { TransitionGroup, CSSTransition } from 'react-transition-group' ``` The TransitionGroup is a wrapper component, that keeps track of all it's children that are new and old and applies state to them. The CSSTransition then applies CSS classes to them - e.g. to fade-in new components / fade-out old components. To animate the transition of going from one URL to another, we have to wrap our __Switch__ component inside a __TransitionGroup__ + __CSSTransition__: ```jsx <div style={styles.content}> <TransitionGroup> <CSSTransition timeout='300' classNames='fade'> <Switch> <Route exact path="/" component={Index} /> <Route exact path="/home" render={() => ( <Redirect to="/" /> )} /> <Route exact path="/hsl/:h/:s/:l" component={HSL} /> <Route exact path="/rgb/:r/:g/:b" component={RGB} /> <Route render={() => <div>Not Found</div>} /> </Switch> </CSSTransition> </TransitionGroup> </div> ``` We are applying a 300ms time frame to the CSS transition and use a classname _fade_ for the fade-in effect. We import the styles from ./styles/index.css: ```css .fade-enter { opacity: 0; z-index: 1; } .fade-enter.fade-enter-active { opacity: 1; transition: opacity 250 ms ease-in; } ``` To get a unique key from every component being rendered, we can wrap everything inside a __Route__ and get the _location_ prop (that comes with a location.key) from it: ```jsx <Router> <Route render={({ location }) => ( <div style={styles.fill}> <ul style={styles.nav}> <NavLink to="/home">Home</NavLink> <NavLink to="/hsl/355/87/53">Crimson</NavLink> <NavLink to="/hsl/210/5/15">Darkslategray</NavLink> <NavLink to="/rgb/28/221/105">Springgreen</NavLink> <NavLink to="/rgb/43/29/201">Mediumblue</NavLink> </ul> <div style={styles.content}> <TransitionGroup> <CSSTransition key={location.key} timeout={300} classNames='fade' > <Switch location={location}> <Route exact path="/hsl/:h/:s/:l" component={HSL} /> <Route exact path="/rgb/:r/:g/:b" component={RGB} /> <Route exact path="/home" render={() => ( <Redirect to="/" /> )} /> <Route exact path="/" component={Index} /> <Route render={() => <div>Not Found</div>} /> </Switch> </CSSTransition> </TransitionGroup> </div> </div> )} /> </Router> ``` __Remember__ to also add the _location_ prop to the switch component! <GifContainer gifUrl="/assets/gif/rtg_03.gif" alt="React Password Generator" /> https://tylermcginnis.com/react-router-url-parameters/ https://hackernoon.com/animated-page-transitions-with-react-router-4-reacttransitiongroup-and-animated-1ca17bd97a1a<file_sep>const config = require('./config') const pathPrefix = config.pathPrefix === '/' ? '' : config.pathPrefix module.exports = { pathPrefix: config.pathPrefix, siteMetadata: { siteUrl: config.siteUrl + pathPrefix, }, plugins: [ 'gatsby-plugin-react-helmet', 'gatsby-plugin-styled-components', 'gatsby-plugin-sharp', { resolve: 'gatsby-source-filesystem', options: { name: 'post', path: `${__dirname}/blog`, }, }, { resolve: 'gatsby-plugin-google-analytics', options: { trackingId: config.googleAnalyticsID, }, }, { resolve: 'gatsby-plugin-mdx', options: { gatsbyRemarkPlugins: [ { resolve: 'gatsby-remark-external-links', options: { target: '_blank', rel: 'nofollow noopener noreferrer', }, }, { resolve: `gatsby-remark-images-without-bg`, options: { maxWidth: 1024, quality: 90, withWebp: true, tracedSVG: true, linkImagesToOriginal: false, }, }, // { // resolve: 'gatsby-remark-images', // options: { // maxWidth: 800, // backgroundColor: "transparent", // wrapperStyle: 'display: none', // quality: 90, // withWebp: true, // linkImagesToOriginal: false, // tracedSVG: true, // }, // }, // TODO: Replace with "mdx-component-autolink-headers" { resolve: 'gatsby-remark-autolink-headers', options: { maintainCase: false, }, }, ], }, }, 'gatsby-plugin-catch-links', 'gatsby-plugin-sitemap', 'gatsby-plugin-lodash', { resolve: 'gatsby-plugin-manifest', options: { name: config.siteTitleAlt, short_name: config.siteTitleManifest, description: config.siteDescription, start_url: config.pathPrefix, background_color: config.backgroundColor, theme_color: config.themeColor, display: 'standalone', icon: config.favicon, }, }, { resolve: `gatsby-plugin-offline`, options: { precachePages: [``, `/categories`, `/curriculum-vitae`], navigateFallback: `/offline-plugin-app-shell-fallback/index.html`, // Only match URLs without extensions or the query `no-cache=1`. // So example.com/about/ will pass but // example.com/about/?no-cache=1 and // example.com/cheeseburger.jpg will not. // We only want the service worker to handle our "clean" // URLs and not any files hosted on the site. // // Regex based on http://stackoverflow.com/a/18017805 navigateFallbackWhitelist: [/^[^?]*([^.?]{5}|\.html)(\?.*)?$/], navigateFallbackBlacklist: [/\?(.+&)?no-cache=1$/], cacheId: `gatsby-plugin-offline`, // Don't cache-bust JS or CSS files, and anything in the static directory, // since these files have unique URLs and their contents will never change dontCacheBustURLsMatching: /(\.js$|\.css$|static\/)/, runtimeCaching: [ { // Use cacheFirst since these don't need to be revalidated (same RegExp // and same reason as above) urlPattern: /(\.js$|\.css$|static\/)/, handler: `CacheFirst`, }, { // page-data.json files are not content hashed urlPattern: /^https?:.*\page-data\/.*\/page-data\.json/, handler: `NetworkFirst`, }, { // Add runtime caching of various page resources. urlPattern: /\.(?:png|jpg|jpeg|webp|svg|gif|tiff|js|woff|woff2|json|css)$/, handler: `staleWhileRevalidate`, }, ], skipWaiting: true, clientsClaim: true, } } ], } <file_sep>--- date: "2019-06-07" title: "Setting up Docker on CentOS 8" categories: - LINUX - Docker --- ![Harbin, China](./photo-456t66d_64567fh6dgjkhg4_d.jpg) ## Docker ? The latest release of the RHEL 8 / CentOS 8 introduces buildah and podman, which aim to be compatible with existing docker images and work without relying on a daemon, allowing the creation of containers as normal users, without the need of special permissions. Some specific tools, however, are still missing: an equivalent of docker-compose, for example does not exists yet. In this tutorial we will see how to install and run the original Docker CE on CentOS 8. The `dnf config-manager` utility let us enable or disable a repository in our distribution. We can use it to add and enable the docker-ce repo: ```bash sudo dnf config-manager --add-repo=https://download.docker.com/linux/centos/docker-ce.repo ``` We can verify that the repository has been enabled, by looking at the output of the following command: ```bash sudo dnf repolist -v ``` The docker-ce-stable repository is now enabled on our system. To display all of them, we can run: ```bash dnf list docker-ce --showduplicates | sort -r ``` Now we need to install `containerd.io` manually. As there isn't a version for CentOS 7 (yet?) we need to install the following: ```bash sudo dnf install https://download.docker.com/linux/centos/7/x86_64/stable/Packages/containerd.io-1.2.6-3.3.el7.x86_64.rpm ``` ## Install Docker-CE After the package is installed, we can simply install the latest docker-ce: ```bash sudo dnf install docker-ce ``` In order to make DNS resolution work inside Docker containers, we must disable firewalld (really?): ```bash sudo systemctl disable firewalld ``` ## Start and enable the Docker Daemon Once docker-ce is installed, we must start and enable the docker daemon, so that it will be also launched automatically at boot: ```bash sudo systemctl enable --now docker ``` At this point, we can confirm that the daemon is active by running: ``` systemctl is-active docker systemctl is-enabled docker ``` ## Installing Docker-Compose Download the binary from the [github page of the project](https://github.com/docker/compose/releases/latest): ```bash curl -L "https://github.com/docker/compose/releases/download/1.24.1/docker-compose-$(uname -s)-$(uname -m)" -o docker-compose ``` Once the binary is downloaded, we move it into `/usr/local/bin` and we make it executable: ```bash sudo mv docker-compose /usr/local/bin && sudo chmod +x /usr/local/bin/docker-compose ``` ## Testing Docker ```bash docker pull hello-world docker run hello-world ``` ![Docker on CentOS 8](./Docker_on_CentOS8_01.png)<file_sep>import matplotlib.pyplot as pyp import matplotlib.animation as animation #Creating a new figure figure = pyp.figure() #Creating a subplot with 1 row, 1 column and index 1 - this means a single subplot in our figure subplot = figure.add_subplot(1, 1, 1) #Creating the function that reads the data from cpu.txt and feeds it to our subplot def animation_function(i): #Opening the file and reading each row of CPU utilization data in the file; creating a list of values cpu_data = open("E:\\python-ssh-logger\\cpu-load.txt").readlines() #Creating an empty list in which to append each value in the file converted from string to float; x = [] #Iterating over the list of CPU values and appending each value (converted to float) to the previously created list - x; adding an if statement to exclude any blank lines in the file for each_value in cpu_data: if len(each_value) > 1: x.append(float(each_value)) #Clearing/refreshing the figure to avoid unnecessary overwriting for each new poll (every 10 seconds) subplot.clear() #Plotting the values in the list subplot.plot(x) #Using the figure, the function and a polling interval of 10000 ms (10 seconds) to build the graph graph_animation = animation.FuncAnimation(figure, animation_function, interval = 10000) #Displaying the graph to the screen pyp.show()<file_sep>--- date: "2020-06-14" title: "Install Salt on Ubuntu Server 20.04" categories: - LINUX --- ![Battambang, Cambodia](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Install Salt](#install-salt) - [Python 3.7 on Ubuntu 20.04](#python-37-on-ubuntu-2004) - [Salt Bootstrap](#salt-bootstrap) - [Network Addressing](#network-addressing) - [Authenticate Minions](#authenticate-minions) - [Test Connection](#test-connection) - [Grains](#grains) <!-- /TOC --> ## Install Salt ### Python 3.7 on Ubuntu 20.04 If this is still an issue (Salt not supporting Python 3.8) - install the older version from [deadsnakes PPA](https://launchpad.net/~deadsnakes/+archive/ubuntu/ppa): ```bash sudo add-apt-repository ppa:deadsnakes/ppa sudo apt-get update sudo apt-get install python3.7 ``` ### Salt Bootstrap [Salt Bootstrap](https://repo.saltstack.com/#bootstrap) is a configuration script which automatically detects the operating system it’s running on, sets the correct repositories, and installs Salt. The install script is intended to be run on the Salt master and all minion machines. __Salt Master__ ```bash curl -L https://bootstrap.saltstack.com -o install_salt.sh sudo sh install_salt.sh -P -M -N ``` __Salt Minion__ ```bash curl -L https://bootstrap.saltstack.com -o install_salt.sh sudo sh install_salt.sh -P ``` ### Network Addressing __Salt Master__ You can find the master configuration file here `/etc/salt/master`. Here we now want to define the interface our server is wirking on. You can uncomment the `#interface:` line near the top of the `/etc/salt/master` file and replace the address placeholder with the address of your Salt master’s IP. If all your nodes are located in the same data center, you can use the private IP address. ```bash # The address of the interface to bind to: interface: 192.168.2.110 ``` The "cleaner" way to handle this, is to leave the default config unchanged and add your own configuration files to `/etc/salt/master.d`. All `*.conf` files inside this directory will be used by Salt automatically - you can name them according to your needs. ```bash sudo nano /etc/salt/master.d/local.conf ``` And add the lines that you need to configure Salt: ```conf # The address of the interface to bind to: interface: 192.168.2.110 ``` Then restart Salt: ```bash sudo systemctl restart salt-master ``` __Salt Minions__ To assign a master to your minions you can again uncomment `#master: salt` near the top of `/etc/salt/minion`, and replace `salt` with your Salt master’s IP address. Or add your own config file to `/etc/salt/minion.d` to override the default settings: ```bash sudo nano /etc/salt/minion.d/local.conf ``` ```conf # Set the location of the salt master server. If the master server cannot be # resolved, then the minion will fail to start. master: 192.168.2.110 ``` ### Authenticate Minions From the Salt master, list its key fingerprint, and all Minions linked to it with their key fingerprints: ```bash sudo salt-key --finger-all Local Keys: master.pem: f1:d3:52:eb:f4:52... master.pub: 64:12:61:93:3e:09... ``` Add the Salt Master’s master.pub fingerprint to `/etc/salt/minion`, between the single quotes: ```conf # Fingerprint of the master public key to validate the identity of your Salt master # before the initial key exchange. The master fingerprint can be found by running # "salt-key -f master.pub" on the Salt master. master_finger: '64:12:61:93:3e:09...' ``` And while we are at it - let's give our minion a name: ```conf # Explicitly declare the id for this minion to use, if left commented the id # will be the hostname as returned by the python call: socket.getfqdn() # Since salt uses detached ids it is possible to run multiple minions on the # same machine but with different ids, this can be useful for salt compute # clusters. id: ubuntuAsus ``` Then restart Salt: ```bash sudo systemctl restart salt-minion ``` ## Test Connection List the Minion’s fingerprint hash: ```bash sudo salt-call key.finger --local local: 6a:1f:19:8e:78:0f:2d... ``` and verify it with what’s reported by the Salt Master: ```bash sudo salt-key --finger-all Local Keys: master.pem: f1:d3:52:eb:f4:52... master.pub: 64:12:61:93:3e:09... Unaccepted Keys: centos8.localdomain: 6a:1f:19:8e:78:0f:2d... ``` Once each Minion ID has been verified, accept them all from the Salt Master: ```bash sudo salt-key -a centos8.localdomain // or accept all: sudo salt-key -A The following keys are going to be accepted: Unaccepted Keys: centos8.localdomain Proceed? [n/Y] y Key for minion centos8.localdomain accepted. ``` > Unaccepted keys can be deleted with `salt-key -d 'minionname'`. To get rid of all Keys from currently disconnected Minions run `salt-run manage.down removekeys=True` Verify the status of accepted minions. The command below should return the hostname or IP address of each Minion which has been verified and is running: ```bash sudo salt-run manage.up - ubuntuAsus ``` Ping all Minions: ```bash sudo salt '*' test.ping ubuntuAsus: True ``` By using `*` you target all accepted Minions on your Salt network. You can also target a specific Minion `sudo salt -L ubuntuAsus,ubuntuBrix test.ping`. Target selection options: | | | | -- | -- | | -H, --hosts | List all known hosts to currently visible or other specified rosters | | -E, --pcre | Instead of using shell globs to evaluate the target servers, use pcre regular expressions. | | -L, --list | Instead of using shell globs to evaluate the target servers, take a comma or whitespace delimited list of servers. | | -G, --grain | Instead of using shell globs to evaluate the target use a grain value to identify targets, the syntax for the target is the grain key followed by a globexpression: "os:Arch*". | | -P, --grain-pcre | Instead of using shell globs to evaluate the target use a grain value to identify targets, the syntax for the target is the grain key followed by a pcre regular expression: "os:Arch.*". | | -N, --nodegroup | Instead of using shell globs to evaluate the target use one of the predefined nodegroups to identify a list of targets. | | -R, --range | Instead of using shell globs to evaluate the target use a range expression to identify targets. Range expressions look like %cluster. | | -C, --compound | The compound target option allows for multiple target types to be evaluated, allowing for greater granularity in target matching. The compound target is space delimited, targets other than globs are preceded with an identifier matching the specific targets argument type: salt 'G@os:RedHat and webser* or E@database.*'. | | -I, --pillar | Instead of using shell globs to evaluate the target use a pillar value to identify targets, the syntax for the target is the pillar key followed by a glob expression: "role:production*". | | -J, --pillar-pcre | Instead of using shell globs to evaluate the target use a pillar value to identify targets, the syntax for the target is the pillar key followed by a pcre regular expression: "role:prod.*". | | -S, --ipcidr | Match based on Subnet (CIDR notation) or IP address. | ## Grains Salt comes with an interface to derive information about the underlying system. This is called the [Grains Interface](https://docs.saltstack.com/en/latest/topics/grains/index.html), because it presents salt with grains of information. Grains are collected for the operating system, domain name, IP address, kernel, OS type, memory, and many other system properties. You can list all available grains by running `sudo salt '*' grains.ls`. Or get all available grains for your connected Minions with `sudo salt '*' grains.items`: ```yaml ubuntuAsus: ---------- biosreleasedate: 07/31/2018 biosversion: 0407 cpu_model: Intel(R) Core(TM) i3-8130U CPU @ 2.20GHz cpuarch: x86_64 kernelrelease: 5.4.0-42-generic kernelversion: #46-Ubuntu SMP Fri Jul 10 00:24:02 UTC 2020 locale_info: ---------- defaultencoding: UTF-8 defaultlanguage: en_US detectedencoding: utf-8 timezone: UTC localhost: ubuntubrix lsb_distrib_codename: focal lsb_distrib_description: Ubuntu 20.04.1 LTS lsb_distrib_id: Ubuntu lsb_distrib_release: 20.04 machine_id: b783c83a285f4d4bac05fda96aa96a90 manufacturer: ASUSTeK COMPUTER INC. master: 192.168.2.110 mdadm: mem_total: 7834 nodename: ubuntubrix num_cpus: 4 num_gpus: 1 os: Ubuntu os_family: Debian osarch: amd64 oscodename: focal osfinger: Ubuntu-20.04 osfullname: Ubuntu osmajorrelease: 20 osrelease: 20.04 osrelease_info: - 20 - 4 username: root virtual: physical zfs_feature_flags: False zfs_support: False zmqversion: 4.3.2 ``` To target Minions by grains use the `-G` flag: ```bash sudo salt -G os:Ubuntu test.ping ubuntuAsus: True ``` We can also build this request as a compound command with the `-C` flag - ping all Minions that run Ubuntu and have an ID that starts with `ubuntu`: ```bash sudo salt -C 'G@os:Ubuntu and ubuntu*' test.ping ubuntuAsus: True ```<file_sep>--- date: "2017-09-05" title: "Proxima Centauri" categories: - LINUX --- ![<NAME>](./photo-76834w36_4vkhjk89776s_78.png) <!-- TOC --> - [Setting up a Squid Proxy Server](#setting-up-a-squid-proxy-server) - [Setup Access Restrictions](#setup-access-restrictions) - [Squid Proxy Client Authentication](#squid-proxy-client-authentication) - [Configuring FirewallD](#configuring-firewalld) - [Adding the Proxy Server to your Raspberry Pi](#adding-the-proxy-server-to-your-raspberry-pi) - [Changing the Nameserver](#changing-the-nameserver) <!-- /TOC --> ## Setting up a Squid Proxy Server How To Install [Squid Proxy](http://www.squid-cache.org/Intro/) on CentOS 7: ```bash yum -y install squid chkconfig squid on ``` ### Setup Access Restrictions Since this Squid proxy would allow anyone using it to make connections from your servers's IP address, you would want to restrict access to it. We will use the free service from [NoIP.com](https://www.noip.com/sign-up) to create a DDNS address for our proxy service. We can then later configure squid to only accept traffic coming from this address. ![Squid Proxy](./Squid_Proxy_01.png) This hostname should be added to /etc/squid/squid.conf. Edit the file and add your hostname, e.g. `acl localnet src myproxy.no-ip.org`: ![Squid Proxy](./Squid_Proxy_02.png) We can now start squid and enable it as a system service: ```bash systemctl start squid systemctl enable squid systemctl status squid ``` ![Squid Proxy](./Squid_Proxy_03.png) You now can test your Proxy by adding it to your network configuration, e.g. in Chrome (s. below) and navigate to a website like [whatismyip.com](http://www.whatismyip.com/). It should now show you that you are actually accessing the web site from your proxies IP address. ![Squid Proxy](./Squid_Proxy_04.png) ### Squid Proxy Client Authentication You will most probably want your users to authenticate before using the proxy. For that purpose, you can enable basic http authentication. It is easy and fast to configure. First you will need __httpd-tools__ installed: ```bash yum -y install httpd-tools ``` Now lets create a file that will later store the username for the authentication. Squid runs with user `squid` so the file should be owned by that user: ```bash touch /etc/squid/passwd chown squid: /etc/squid/passwd ``` Now we will create a new user called `proxyclient` and setup its password: ```bash htpasswd /etc/squid/passwd proxyclient ``` Now to configure the authentication open the configuration file with `nano /etc/squid/squid.conf` and after the ports ACLs add the following lines: ```bash auth_param basic program /usr/lib64/squid/basic_ncsa_auth /etc/squid/passwd auth_param basic children 5 auth_param basic realm Squid Basic Authentication auth_param basic credentialsttl 2 hours acl auth_users proxy_auth REQUIRED http_access allow auth_users ``` ![Squid Proxy](./Squid_Proxy_05.png) Save the file and restart squid so that the new changes can take effect `systemctl restart squid`. ### Configuring FirewallD And make sure that your firewall allows traffic on your proxy port (default `3128`): ```bash firewall-cmd --zone=public --add-port=3128/tcp firewall-cmd --permanent --zone=public --add-port=3128/tcp ``` ## Adding the Proxy Server to your Raspberry Pi You will need to set up three environment variables __http_proxy__, __https_proxy__, and __no_proxy__ so your Raspberry Pi knows how to access the proxy server. ```bash sudo nano /etc/environment ``` Add the following to the `/etc/environment` file to create the __http_proxy__ variable: ```bash export http_proxy="http://proxyclient:password@proxyipaddress:proxyport" ``` Replace _proxyipaddress_ and _proxyport_ with the IP address and port (Squid default `3128`) of your proxy. And _password_ with the password you used for the `proxyclient`. Enter the same information for the environment variable __https_proxy__: ```bash export https_proxy="http://username:password@proxyipaddress:proxyport" ``` Create the no_proxy environment variable, which is a comma-separated list of addresses your Pi should not use the proxy for: ```bash export no_proxy="localhost, 127.0.0.1" ``` Your `/etc/environment` file should now look like this: ![Squid Proxy](./Squid_Proxy_06.png) In order for operations that run as sudo (e.g. downloading and installing software) to use the new environment variables, you'll need to update __sudoers__: ```bash sudo visudo ``` Add the following line to the file so sudo will use the environment variables you just created: ```bash Defaults env_keep+="http_proxy https_proxy no_proxy" ``` ![Squid Proxy](./Squid_Proxy_07.png) Now add the following line to `sudo nano /etc/apt/apt.conf.d/01proxy`: ```bash Acquire::http::Proxy "http://username:password@proxyipaddress:proxyport"; ``` ### Changing the Nameserver Unfortunately this isn't enough when you want to access servers by their domain name - e.g. Google CDNs for your libraries. Already the name resolution will be blocked or redirected to places you do not want to go. In some cases it is possible to get around this by the IP address of those servers instead of the domain (Note: you have to flag your `curls` with `--insecure` when accessing __HTTPS__ sites, as the certificate will match the domain but not the IP!) But it makes much more sense to drop the DNS server that your ISP provides and go with an open one instead. At the moment of writing both the [secondary Google DNS](https://developers.google.com/speed/public-dns/docs/using) (8.8.4.4) as well as the [OpenDNS](https://use.opendns.com) server is reachable without a VPN. So I am going to set up those two as primary and secondary DNS for my Dev RaspberryPi. This can usually be done inside `/etc/resolv.conf` but Raspbian installs a program called __resolvconf__ that will auto-generate this file every time you restart your Pi. To fix this problem we need to edit the DHCP config instead: ```bash sudo nano /etc/dhcpcd.conf ``` Add the following line to the bottom of the file: ```bash static domain_name_servers=8.8.4.4 208.67.222.222 ``` And restart your Pi with: ```bash sudo systemctl daemon-reload sudo service dhcpcd restart ``` You can afterwards verify that the resolve configuration was generated successfully `sudo nano /etc/resolv.conf` ![Squid Proxy](./Squid_Proxy_08.png) Reboot your Raspberry Pi for the changes to take effect. You should now be able to access the internet via your proxy server. You can the `curl https://www.wieistmeineip.de` - search for `wimip.ip` to verify that it shows your Proxy IP address: ```js <script> var _SITE_NAME="WIMIP"; var wimip = wimip || []; wimip.ip = "My Proxy IP"; </script> ``` <file_sep>--- date: "2019-01-15" title: "Creating a Kubernetes Cluster" categories: - LINUX - Docker - Kubernetes --- ![Shanghai, China](./photo-34219553960_350f18edbb_o.png) In this hands-on lab from [Linux Academy](https://linuxacademy.com/cp), we will install and configure a Kubernetes cluster consisting of 1 master and 2 nodes for the pods. Once the installation and configuration are complete, we will have a 3-node Kubernetes cluster that uses Flannel as the network overlay. <!-- TOC --> - [Instructions](#instructions) - [Objectives](#objectives) - [Install Docker and Kubernetes](#install-docker-and-kubernetes) - [Prerequisites](#prerequisites) - [Install Docker CE](#install-docker-ce) - [Install Kubernetes](#install-kubernetes) - [Start Docker and Kublet](#start-docker-and-kublet) - [Cluster Initialization](#cluster-initialization) - [Installing a Pod Network Add-on](#installing-a-pod-network-add-on) - [Check the cluster state.](#check-the-cluster-state) - [Create and scale a deployment using kubectl](#create-and-scale-a-deployment-using-kubectl) - [Kubernetes stuck on ContainerCreating](#kubernetes-stuck-on-containercreating) - [Scaling](#scaling) - [Pod Deployment](#pod-deployment) - [Kubernetes Services](#kubernetes-services) - [Updating the Pod](#updating-the-pod) - [Adding a Queue Server](#adding-a-queue-server) - [Replication (not recommended)](#replication-not-recommended) - [ReplicaSets](#replicasets) - [Deployment (recommended)](#deployment-recommended) - [Rolling Update and Rollback](#rolling-update-and-rollback) - [Networking and Service Discovery](#networking-and-service-discovery) - [Creating a Database Pod](#creating-a-database-pod) - [Kubernetes + Compose = Kompose](#kubernetes--compose--kompose) - [Use Kompose](#use-kompose) <!-- /TOC --> ## Instructions In this learning activity, we will create a Kubernetes cluster. The commands we will use for this process can be found in the task list by clicking on the orange question mark buttons. Once you have completed the lab, leave your cluster in its final state. Do not delete the deployment: ### Objectives * Install Docker and Kubernetes on all servers. * Create and scale a deployment using kubectl. ## Install Docker and Kubernetes ### Prerequisites 1. You first need to login as your root user or elevate your users privileges via sudo on your master and two minion servers: ```bash sudo su ``` 1. This next step is not recommended in production ([How do you create a SE Linux policy?](https://docs.docker.com/engine/security/seccomp/)) - but we are going to disable SE Linux on all 3 servers: ``` setenforce 0 sed -i --follow-symlinks 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/sysconfig/selinux ``` 3. Now we need to enable the `br_netfilter` module on all servers to allow our cluster to communicate: ```bash modprobe br_netfilter echo '1' > /proc/sys/net/bridge/bridge-nf-call-iptables ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_01.png) --- 4. Make sure that swap has been turned off: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_02.png) --- > To turn off swap on centos type `swapoff -a` and check that the swap was removed with `free -h`. Edit the `/etc/fstab` file, search for the swap line and comment the entire line by adding a `#` in front of the line: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_02a.png) ![Creating a Kubernetes Cluster](./kubernetes_cluster_02b.png) --- > Now delete the remaining swap file - check the location `blkid` and remove it `rm /dev/mapper/centos-swap`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_02c.png) --- 1. Add a FirewallD Service Configure FirewallD for Kubernetes to work correctly. First download the k8s-master.xml and k8s-worker.xml files to `cd /etc/firewalld/services` on your master and minion server: ```bash wget https://raw.githubusercontent.com/mpolinowski/k8s-firewalld/master/k8s-master.xml wget https://raw.githubusercontent.com/mpolinowski/k8s-firewalld/master/k8s-worker.xml firewall-cmd --reload ``` __On Master Node__ ```bash firewall-cmd --add-service=k8s-master --zone=public --permanent ``` __On Minion Nodes__ ```bash firewall-cmd --add-service=k8s-worker --zone=public --permanent ``` __TODO__: I have to add `firewall-cmd --add-service=dns --zone=public --permanent` ? <!-- systemctl stop firewalld && systemctl disable firewalld systemctl start firewalld && systemctl enable firewalld --> ### Install Docker CE 1. Ensure that all Docker dependencies are installed: ```bash yum install -y yum-utils device-mapper-persistent-data lvm2 ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_03.png) --- 2. Now we can add the Docker repository and install it: ```bash yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo yum install -y docker-ce ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_04.png) --- ### Install Kubernetes First we need to add the Kubernetes repository all servers: ```bash cat << EOF > /etc/yum.repos.d/kubernetes.repo [kubernetes] name=Kubernetes baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 enabled=1 gpgcheck=0 repo_gpgcheck=0 gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg EOF ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_05.png) --- Now we can install Kubernetes on all servers via yum: ```bash yum install -y kubelet kubeadm kubectl ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_06.png) --- ### Start Docker and Kublet ```bash systemctl enable docker systemctl enable kubelet systemctl start docker systemctl start kubelet ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_07.png) --- To check in what Group Docker is running, run `docker info | grep -i cgroup`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_08.png) --- In this case it is `cgroupfs` - we can now make sure that Kubernetes is added to this group: ```bash sed -i 's/cgroup-driver=systemd/cgroup-driver=cgroupfs/g' /etc/systemd/system/kubelet.service.d/10-kubeadm.conf systemctl daemon-reload systemctl restart kubelet ``` ## Cluster Initialization So far we did the basic setup for all our servers - now we will initialize our cluster from the __MASTER SERVER__ using the IP range for Flannel. Kubeadm is a tool built to provide [kubeadm init](https://kubernetes.io/docs/reference/setup-tools/kubeadm/kubeadm-init/) and [kubeadm join](https://kubernetes.io/docs/reference/setup-tools/kubeadm/kubeadm-join/) as best-practice _fast paths_ for creating Kubernetes clusters. The __init command__ executes the following phases: <!-- Flannel CNI backup ```bash kubeadm init --pod-network-cidr=10.244.0.0/16 ``` kubeadm init --pod-network-cidr=10.244.0.0/16 --ignore-preflight-errors=NumCPU --> <!-- Calico CNI backup ```bash kubeadm init --pod-network-cidr=192.168.0.0/16 ``` > The flag `--pod-network-cidr=192.168.0.0/16` has to be passed in for our [Container Network Interface (CNI)](https://kubernetes.io/docs/setup/independent/create-cluster-kubeadm/#pod-network) to work - see below. __Note__ that we are going to use the [Calico CNI](https://docs.projectcalico.org/latest/getting-started/kubernetes/) - other CNI's need different parameter. --> ```bash kubeadm init ``` Your Kubernetes master has initialized successfully! You can now join any number of machines by running the following on each node as root: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_09.png) --- ```bash kubeadm join X.X.X.X:6443 --token <PASSWORD> --discovery-token-ca-cert-hash sha256:XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX ``` Copy the `kubeadmin join` command that is in the output and past it into your minion server terminals: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_10.png) --- > To start over and reset the __init__ and __join__ process by running the [kubeadm reset](https://kubernetes.io/docs/reference/setup-tools/kubeadm/kubeadm-reset/) command on your master and all minion servers + clean up iptables `iptables -F && iptables -t nat -F && iptables -t mangle -F && iptables -X` To start using your cluster, you need to run the following as a regular user: ```bash mkdir -p $HOME/.kube sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config sudo chown $(id -u):$(id -g) $HOME/.kube/config ``` ### Installing a Pod Network Add-on You must install a pod network add-on so that your pods can communicate with each other. The network must be deployed before any applications. Also, CoreDNS will not start up before a network is installed. kubeadm only supports [Container Network Interface (CNI)](https://kubernetes.io/docs/setup/independent/create-cluster-kubeadm/#pod-network) based networks (and does not support kubenet). <!-- Flannel CNI backup We will use [Flannel](https://github.com/coreos/flannel) as a simple and easy way to configure a layer 3 network fabric designed for Kubernetes. Flannel runs a small, single binary agent called flanneld on each host, and is responsible for allocating a subnet lease to each host out of a larger, preconfigured address space. Flannel uses either the Kubernetes API or etcd directly to store the network configuration, the allocated subnets, and any auxiliary data (such as the host's public IP). Packets are forwarded using one of several backend mechanisms including VXLAN and various cloud integrations. For flannel to work correctly, you must pass `--pod-network-cidr=10.244.0.0/16` to `kubeadm init`. Set _/proc/sys/net/bridge/bridge-nf-call-iptables_ to 1 by running `sysctl net.bridge.bridge-nf-call-iptables=1` to pass bridged IPv4 traffic to iptables chains. This is a requirement for some CNI plugins to work, for more information please see here. ```bash kubectl apply -f https://raw.githubusercontent.com/coreos/flannel/master/Documentation/kube-flannel.yml ``` --> <!-- Calico CNI backup There are several CNI's listed for Kubernetes - but we will be using the solution from [Calico](https://docs.projectcalico.org/latest/getting-started/kubernetes/). For Calico to work correctly, you needed to pass `--pod-network-cidr=192.168.0.0/16` to `kubeadm init` or update the calico.yml file to match your Pod network. Note that Calico works on amd64, arm64 and ppc64le only. We can install the pod network add-on with the following commands on your Master server: ```bash kubectl apply -f \ https://docs.projectcalico.org/v3.5/getting-started/kubernetes/installation/hosted/etcd.yaml kubectl apply -f https://docs.projectcalico.org/v3.3/getting-started/kubernetes/installation/hosted/rbac-kdd.yaml kubectl apply -f https://docs.projectcalico.org/v3.3/getting-started/kubernetes/installation/hosted/kubernetes-datastore/calico-networking/1.7/calico.yaml ``` --> There are several CNI's listed for Kubernetes - but we will be using the solution from [Weave Net](https://www.weave.works/docs/net/latest/kube-addon/). First Set __/proc/sys/net/bridge/bridge-nf-call-iptables__ to __1__ by running `sysctl net.bridge.bridge-nf-call-iptables=1` to pass bridged IPv4 traffic to iptables’ chains. This is a requirement for some CNI plugins to work, for more information please see here. ``` sysctl -w net.bridge.bridge-nf-call-iptables=1 sysctl -w net.bridge.bridge-nf-call-ip6tables=1 reboot ``` Weave Net works on amd64, arm, arm64 and ppc64le without any extra action required. Weave Net sets hairpin mode by default. This allows Pods to access themselves via their Service IP address if they don’t know their PodIP: ```bash kubectl apply -f "https://cloud.weave.works/k8s/net?k8s-version=$(kubectl version | base64 | tr -d '\n')" ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_11.png) --- ### Check the cluster state. A quick `kubectl get nodes` should confirm that all our nodes are now connected to the container network (see screenshot above)! Once a pod network has been installed, you can confirm that it is working by checking that the CoreDNS pod is Running in the output of `kubectl get pods --all-namespaces`. And once the CoreDNS pod is up and running, you can continue by joining your nodes. Platforms like Kubernetes assume that each container (pod) has a unique, routable IP inside the cluster. The advantage of this model is that it removes the port mapping complexities that come from sharing a single host IP. --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_12.png) --- ## Create and scale a deployment using kubectl [Kubectl](https://kubernetes.io/docs/reference/kubectl/overview/) is a command line interface for running commands against Kubernetes clusters. We can run a NGINX container with the following command: ```bash kubectl create deployment nginx --image=nginx ``` You can verify that the __Pod__ was created by Kubernetes: ```bash kubectl get pods ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_14.png) --- <!-- Flannel backup ### Kubernetes stuck on ContainerCreating Use `kubectl describe pods` to list all the events associated with the pod, including pulling of images, starting of containers: ```bash Normal - Scheduled - 13m - default-scheduler - Successfully assigned default/nginx-5c7588df-76mgw to in-centos-minion2 Warning - FailedCreatePodSandBox - 13m - kubelet, in-centos-minion2 Failed create pod sandbox: rpc error: code = Unknown desc = failed to set up sandbox container "8ab7c07633f6170dabd8df9a28680a7b9af79f10374e81df8c85dac609abc209" network for pod "nginx-5c7588df-76mgw": NetworkPlugin cni failed to set up pod "nginx-5c7588df-76mgw_default" network: open /run/flannel/subnet.env: no such file or directory ``` I noticed that - to use Flannel - you need to initialize `kubeadm` with the ` --pod-network-cidr=10.244.0.0/16` flag. You can reset your deployment with: ```bash kubeadm reset iptables -F && iptables -t nat -F && iptables -t mangle -F && iptables -X ``` Now back to `kubeadm init --pod-network-cidr=10.244.0.0/16` and rebuild the deployment - success: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_15.png) --- --> ### Scaling To scale up our web server contingent to __4__, we can use the following `kubectl` command: ```bash kubectl scale deployment nginx --replicas=4 kubectl get pods ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_16.png) --- To get rid of those pods you can scale it back down to 0 replicas and to delete Nginx completely delete the deployment (not the pods - those would be respawned automatically): ```bash kubectl scale deployment nginx --replicas=0 kubectl delete deployment nginx ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_17.png) --- If you try to delete the pods by name `kubectl delete pod nginx-5c7588df-lf2pb`, or try to delete all: `kubectl delete pods --all`, Kubernetes will respawn new pods to meet your deployment scale requirement. ## Pod Deployment Let's deploy an simple Angular app inside a [Kubernetes Pod](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.13/#pod-v1-core) - we will use the 0 release of [this Docker image](https://hub.docker.com/r/richardchesterwood/k8s-fleetman-webapp-angular/tags) to get started with. To do this we will have to create a Pod Config file for it called `webapp-angular.yaml`: ```yaml apiVersion: v1 kind: Pod metadata: name: webapp spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release0 ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_18.png) --- We can now use __kubectl__ to read our configuration file and generate the webapp Pod: ```bash kubectl apply -f webapp-angular.yaml ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_19.png) --- You can inspect the pod with: ```bash kubectl describe pod webapp ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_20.png) --- Just like in Docker, we can use __kubectl__ to run commands against our containers - e.g. to access the shell inside your container, contact the webserver (that should be serving our Angular app) and printing out index page: ```bash kubectl -it exec webapp sh / # wget http://localhost:80 / # cat index.html ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_21.png) --- ## Kubernetes Services We now have a Pod that is serving our Angular frontend. To be able to access this Pod from _the outside_ we need to add a [Kubernetes service](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.13/#service-v1-core). Let's create a file `webapp-service.yaml`: ```yaml apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-webapp spec: ports: # Accept traffic sent to port 80 - name: http port: 80 targetPort: 80 # The nodePort is available from outside of the # cluster when is set to NodePort. It's value has # to be > 30000 nodePort: 30080 selector: # Define which pods are going to # be represented by this service # The service makes an network # endpoint for our app app: webapp # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: NodePort ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_22.png) --- We now have to add the __Selector__ `webapp` in form of a label in `webapp-angular.yaml` to our frontend pod to connect it to our service: ```yaml apiVersion: v1 kind: Pod metadata: name: webapp labels: app: webapp spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release0 ``` The service can be added to our cluster by the following commands: ```bash kubectl apply -f webapp-angular.yaml kubectl apply -f webapp-service.yaml ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_23.png) --- By applying our changes, we have updated our pod and created our service. --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_24.png) --- The webapp is now accessible on your Cluster IP with the Port __30080__! ## Updating the Pod To Update a Pod in Production with 0 downtime we can use labels. We can define a second pod inside the same file `webapp-angular.yaml`. Right now we are going to use release versions to define our pods. Later we might change this to a __production__ and a __development__ version - both of which can then be run inside the same cluster and be served by different services: ```yaml apiVersion: v1 kind: Pod metadata: name: webapp labels: app: webapp release: "0" spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release0 --- apiVersion: v1 kind: Pod metadata: name: webapp-release-0-5 labels: app: webapp release: "0-5" spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release0-5 ``` Make sure to surround release version with quotation marks to convert it into a string - __"0"__. Otherwise you end up with the error message `for: "webapp-angular.yaml": cannot convert int64 to string`. Now we have to modify our service `webapp-service.yaml`: to not only check for the app name label, but also for the release version - we want to only connect to the current version 0. Once the version 0.5 is deployed we then can update the service to connect us to the updated Pod instead - allowing us to deploy the update with 0 downtime: ```yaml apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-webapp spec: ports: # Accept traffic sent to port 80 - name: http port: 80 targetPort: 80 # The nodePort is available from outside of the # cluster when is set to NodePort. It's value has # to be > 30000 nodePort: 30080 selector: # Define which pods are going to # be represented by this service # The service makes an network # endpoint for our app app: webapp release: "0" # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: NodePort ``` Now update both our Pod and Service - as well add the new Pod with our updated Angular app: ``` kubectl apply -f webapp-angular.yaml kubectl apply -f webapp-service.yaml kubectl get pods --show-labels ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_25.png) --- Now that both pods are running we can edit our service `webapp-service.yaml` and change the `selector` to `release: "0-5"`. Apply the change with `kubectl apply -f webapp-service.yaml` and verify that the service is now switched to the new release with `kubectl describe service fleetman-webapp`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_26.png) --- Type in the IP of your __Kybernetes Cluster__ (_WAN IP of your master server_) and add the port __30080__ to access the updated web interface with your browser: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_26a.png) --- ## Adding a Queue Server Our finished WebApp is going to need a message broker to work. We are going to us the [given Docker Image](https://hub.docker.com/r/richardchesterwood/k8s-fleetman-queue/tags) in the __release1__ version to add [Apache ActiveMQ](http://activemq.apache.org) as a queue service for our app. ActiveMQ, or in generall all Message Oriented Middleware (MOM) implementations are designed for the purpose of sending messages between two applications, or two components inside one application. To add this service in a Pod, we need to create a configuration file for it - this can either be a new file called `webapp-queue.yaml`, or we can simply add it to the end of our existing `webapp-angular.yaml` configuration file to keep everything neat and tidy (this also means that we can simplify the filename to `pods.yaml` -> `mv webapp-angular.yaml pods.yaml`): ```yaml --- apiVersion: v1 kind: Pod metadata: name: queue labels: app: queue spec: containers: - name: queue image: richardchesterwood/k8s-fleetman-queue:release1 ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_27.png) --- Use __kubectl__ to read our configuration file and generate the Queue Pod: ```bash kubectl apply -f pods.yaml ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_28.png) --- To be able to access the Pod, we need to add it to our service configuration `webapp-service.yaml` which we can rename to `services.yaml` -> `mv webapp-service.yaml services.yaml`: ```yaml --- apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-queue spec: ports: # Accept traffic sent to port 8161 (default login is admin/admin) - name: http port: 8161 targetPort: 8161 # The nodePort is available from outside of the # cluster when is set to NodePort. It's value has # to be > 30000 nodePort: 30010 selector: # Define which pods are going to # be represented by this service # The service makes an network # endpoint for our app app: queue # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: NodePort ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_29.png) --- We can now apply all changes with: ```bash kubectl apply -f . kubectl get all ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_30.png) --- You can inspect the pod, in case that something doesn't look right. All events will be logged at the end of the console output: ```bash kubectl describe pod queue ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_31.png) --- Type in the IP of your __Kybernetes Cluster__ (_WAN IP of your master server_) and add the port __30010__ to access the updated web interface with your browser: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_31a.png) --- ## Replication (not recommended) So far we spawned all our Pods manually using the `kubectl apply` command. This way it is us who is responsible for the complete life-cycle of the pod. That means that you can delete (gracefully shutdown) or force shutdown those Pods with the `kubectl delete` command: ```bash kubectl delete pod webapp-release-0-5 kubectl delete pod webapp-release-0-5 --force ``` The Pod will just disappear - check with `kubectl get all` - and not be restarted. The same would happen if your app crashes and that is a situation where we want Kubernetes to recover Pods to keep our App running. ### ReplicaSets The [ReplicaSet](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.13/#replicaset-v1-apps) is basically just a wrapper around our Pod configuration that defines the number of instances of our Pod we want to be running at all times - if one crashes, it will automatically be replaced by a new version of the same pod. Let's re-write our Pod definition to a __ReplicaSet__ - below is the original `pods.yaml` file (the webapp with release0 was removed): ```yaml apiVersion: v1 kind: Pod metadata: name: webapp labels: app: webapp release: "0-5" spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release0-5 --- apiVersion: v1 kind: Pod metadata: name: queue labels: app: queue spec: containers: - name: queue image: richardchesterwood/k8s-fleetman-queue:release1 ``` The __ReplicaSet__ just adds a few lines to the top of our Pod configuration: ```yaml apiVersion: apps/v1 kind: ReplicaSet metadata: # Unique key of the ReplicaSet instance name: webapp spec: selector: matchLabels: # the ReplicaSet manages all Pods # where the lable = app: webapp app: webapp # only 1 Pod should exist atm - if it # crashes, a new pod will be spawned. replicas: 1 # Here starts the Pod Definition from b4 template: metadata: # name: webapp / now we ReplicaSet name labels: app: webapp spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release0.5 --- apiVersion: v1 kind: Pod metadata: name: queue labels: app: queue spec: containers: - name: queue image: richardchesterwood/k8s-fleetman-queue:release1 ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_32.png) --- Before applying our new __ReplicaSet__ let's first delete all pods we created earlier `kubectl delete pods --all` and then start the new Pods with `kubectl apply -f pods.yaml`. You can verify that everything worked with `kubectl get all` and `kubectl describe replicaset webapp`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_33.png) --- We earlier added a release selector to the `services.yaml` that now needs to be removed as we are no longer using it as a label for our Pod - afterwards run `kubectl apply -f services.yaml`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_34.png) --- If you now delete the Pod manually - simulating a crash - a new pod will be automatically spawned by Kubernetes to replace the Pod you just lost: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_35.png) --- In the case above, your web service would have experienced a few seconds of downtime while waiting for the new pod to spin up. To prevent this issue we can go back to `pods.yaml` and set the number of __replicas__ to __2__. Now if the first pod crashes, the second one will replace it right away: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_36.png) --- ## Deployment (recommended) Unlike [ReplicaSets](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.13/#replicaset-v1-apps) [Deployments](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.13/#deployment-v1-apps) enable __rolling updates__ with 0 downtime as well as __roll-backs__ to older versions of your app! Let's start with deleting our ReplicaSet `kubectl delete replicaset webapp` - this is going to remove both the set and all pods that have been spawned from it: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_37.png) --- To now change our __ReplicaSet__ into a __Deployment__ we only have to change `type: Deploymet` inside our `pods.yaml`: ```yaml apiVersion: apps/v1 kind: Deployment metadata: # Unique key of the Deployment name: webapp spec: selector: matchLabels: # the Deployment manages all Pods # where the lable = app: webapp app: webapp # only 1 Pod should exist atm - if it # crashes, a new pod will be spawned. replicas: 1 # Here starts the Pod Definition from b4 template: metadata: # name: webapp / now we Deployment name labels: app: webapp spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release0 --- apiVersion: v1 kind: Pod metadata: name: queue labels: app: queue spec: containers: - name: queue image: richardchesterwood/k8s-fleetman-queue:release1 ``` We also changed the version of our app back to `release0` so we can later test out the update procedure. Then apply the cahnges you made with `kubectl apply -f pods.yaml`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_38.png) --- As you can see, the __Deployment__ was created and set up a __ReplicaSet__ for us that spawned two __Pods__: * deployment.apps/webapp 2/2 * replicaset.apps/webapp-5fb4b78949 2 * pod/webapp-5fb4b78949-77ddd 1/1 * pod/webapp-5fb4b78949-kf7rr 1/1 ### Rolling Update and Rollback Now to update our app back to `release0-5` edit `pods.yaml` and set the image to back to the 0.5 release. Afterwards run `kubectl apply -f pods.yaml`. This will start up a new __ReplicaSet__ with the updated pods, but keep the old set alive until the update is ready - resulting in a 0-downtime, rolling update of our app: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_39.png) --- As seen above, the new ReplicaSet was successfully deployed - but the old one was __not deleted__. In case that something goes wrong during the update and the new pods never spin up, the old ones will not go down - keeping your app alive. This also allows you to always come back to the old version, if something goes wrong after the update. You can follow the rollout process with the following command: ```bash kubectl rollout status deployment webapp ``` To roll back to a working version of your app - after a desasterous update - check the __Rollout History__ and run the rollback command to go back to a previous version or specify the revision you need: ``` kubectl rollout history deployment webapp kubectl rollout undo deployment webapp --to-revision=1 ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_40.png) --- ## Networking and Service Discovery We will now create a simple Pod to use as a test environment. Create a file named busybox.yaml with the following contents: ```yaml apiVersion: v1 kind: Pod metadata: name: busybox namespace: default spec: containers: - name: busybox image: busybox:1.28 command: - sleep - "3600" imagePullPolicy: IfNotPresent restartPolicy: Always ``` Then create a pod using this file and verify its status: ```bash kubectl create -f busybox.yaml pod/busybox created kubectl get pods busybox NAME READY STATUS RESTARTS AGE busybox 1/1 Running 0 12s ``` Once that pod is running, you can `exec nslookup` in that environment. If you see something like the following, DNS is working correctly: ```bash kubectl exec -ti busybox -- nslookup kubernetes.default Server: 10.96.0.10 Address 1: 10.96.0.10 kube-dns.kube-system.svc.cluster.local Name: kubernetes.default Address 1: 10.96.0.1 kubernetes.default.svc.cluster.local ``` `nslookup` successfully contacted the internal __CoreDNS server__ on `10.96.0.10` that Kubernetes provides and resolved the corresponding IP address for __kubernetes.default__. If the address resolution fails, try to to bring down __FirewallD__ and test which port is being blocked - don't forget to bring it back up afterwards: ```bash systemctl stop firewalld && systemctl disable firewalld systemctl start firewalld && systemctl enable firewalld ``` __What to do if shutting down FirewallD "solves" your Problem?__ There are several threads on Github talking about this issue - [this one](https://github.com/kubernetes/kubeadm/issues/504) solved the issue for me: * __Problem__: `nslookup` does not resolve any domains when FirewallD is active: ```bash [root@in-centos-master ~]# kubectl exec -ti webapp-cf477f847-9wqft sh / # nslookup database nslookup: can't resolve '(null)': Name does not resolve nslookup: can't resolve 'database': Try again / # nslookup google.com nslookup: can't resolve '(null)': Name does not resolve nslookup: can't resolve 'google.com': Try again / # exit command terminated with exit code 1 ``` * __Solution__: Configure `iptables` as follows by [copying this](https://github.com/kubernetes/kubeadm/issues/504#issuecomment-404737675) to all nodes in cluster: ```bash systemctl stop kubelet systemctl stop docker rm -rf /var/lib/cni/ rm -rf /etc/cni/net.d rm -rf /run/flannel iptables -t nat -F iptables -t mangle -F iptables -F iptables -X iptables -P INPUT ACCEPT iptables -P FORWARD ACCEPT iptables -P OUTPUT ACCEPT ip6tables -t nat -F ip6tables -t mangle -F ip6tables -F ip6tables -X ip6tables -P INPUT ACCEPT ip6tables -P FORWARD ACCEPT ip6tables -P OUTPUT ACCEPT iptables -L -v -n iptables -L -v -n -t nat conntrack -L ipset list lsmod|grep br_netfilter modprobe br_netfilter systemctl start docker systemctl start kubelet ``` `nslookup` should now work: ```bash / # nslookup database nslookup: can't resolve '(null)': Name does not resolve Name: database Address 1: 10.105.73.87 database.default.svc.cluster.local / # ``` Or check the [DNS Debugging](https://kubernetes.io/docs/tasks/administer-cluster/dns-debugging-resolution/) section in the Kubernetes documentation. To be able to connect applications in different pods - e.g. having a backend container accessing a database in a different pod - Kubernetes has it's own __DNS Service__ and we can assign pods to __Namespaces__ to group them together. When we create a pod without specifying a namespace, it will be assigned the `namespace: default`. The command `kubectl get all` shows us all pods that are inside this specific space. We can run `kubectl get namespaces` to see all available spaces that have been assigned by the DNS service: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_41.png) --- If we want to see Pods running in the __kube-system__ namespace we can use the `kubectl get pods -n kube-system` command. ### Creating a Database Pod We now have a Web Application and a message broker in place. Next we want to a add a MariaDB database and we do so with the following configuration file `networking-tests.yaml`: ```yaml apiVersion: v1 kind: Pod metadata: name: mysql labels: app: mysql spec: containers: - name: mysql image: mysql:5 env: # Use secret in real life - name: MYSQL_ROOT_PASSWORD value: <PASSWORD> - name: MYSQL_DATABASE value: fleetman --- apiVersion: v1 kind: Service metadata: name: database spec: selector: app: mysql ports: - port: 3306 type: ClusterIP ``` We can apply this configuration to our cluster with `kubectl apply -f networking-tests.yaml` and verify that it is running with `kubectl get all`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_42.png) --- We now want to see if we can connect to the database from our webapp container. To do so we need to execute the shell command inside the container and check the DNS configuration file: ``` kubectl exec -it webapp-cf477f847-9wqft sh / # cat /etc/resolv.conf ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_43.png) --- In this case the CoreDNS server is registered under `nameserver 10.96.0.10` - that means that every time we need to resolve a domain name, like we did earlier with `kubernetes.default`, this DNS server is contacted to resolve the underlying IP address for us. We can verify this by typing: ```bash kubectl get services -n kube-system ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_44.png) --- That means that if we want to connect to our database server from another pod in our cluster (__Service Discovery__), all we need to do is to connect to the domain name `database` that is provided by the database service. This is very important since the assigned IP address can change all the time! Going back into the webapp container we can run a `nslookup` to see if the DNS service is working correctly: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_45.png) --- As you see, we can not only resolve internal cluster IPs but also internet domains - the CoreDNS service is using `8.8.8.8` and `8.8.4.4` as __Upstream DNS Servers_ [by default](https://kubernetes.io/docs/tasks/administer-cluster/dns-custom-nameservers/#configure-stub-domain-and-upstream-dns-servers). You can also see that the FQDN (__Fully Qualified Domain Name__) of our database is actually `database.default.svc.cluster.local`. The reason why we can simply use `databse` to get an result, is that the `/etc/resolv.conf` file specifies that if a domain name is not found appened one of the standard strings to it and try again: ```bash search default.svc.cluster.local svc.cluster.local cluster.local ``` Just using `database` as domain works fine because the pod is inside the __default namespace__. As soon as you start using different namespaces for different type of services, you __have to use the FQDN__ instead - or at least appened the used namespace to it, e.g. `database.mynondefaultnamespace` (the DNS service will then add `svc.cluster.local` to find the FQDN for you). To check if the webapp container can access the MariaDB database, we can install the __mySQL client__ on the _Alpine image_ that serves our webapp: ```bash apk update apk add mysql-client ``` To connect to our database we just have to type `mysql` followed by the host address `-h database` and the login credentials we set inside the configuration file earlier: ```bash mysql -h database -uroot -ppassword fleetman MySQL [fleetman]> CREATE TABLE testtable (test varchar (255)); MySQL [fleetman]> SHOW TABLES; ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_46.png) --- ## Kubernetes + Compose = Kompose [Kompose](http://kompose.io) is a conversion tool for Docker Compose to container orchestrators such as Kubernetes (or OpenShift). * Simplify your development process with Docker Compose and then deploy your containers to a production cluster * [Convert](https://kubernetes.io/docs/tasks/configure-pod-container/translate-compose-kubernetes/) your _docker-compose.yaml_ with one simple command `kompose convert` * Immediately bring up your cluster with `kompose up` * Bring it back down with `kompose down` Kompose is in EPEL CentOS repository. If you don’t have EPEL repository already installed and enabled you can do it by running `yum install epel-release`: ```bash yum -y install kompose ``` ### Use Kompose In just a few steps, we’ll take you from Docker Compose to Kubernetes. All you need is an existing `docker-compose.yml` file: 1. Go to the directory containing your _docker-compose.yml_ file. Run the `kompose up` command to deploy to Kubernetes directly. 2. Or convert the _docker-compose.yml_ file to files that you can use with `kubectl`, run `kompose convert`: --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_47.png) --- And then `kubectl create -f <output files>` - e.g. : ```bash kubectl create -f api-service.yaml,elasticsearch-service.yaml,frontend-service.yaml,wiki-service.yaml,api-deployment.yaml,api-claim0-persistentvolumeclaim.yaml,elasticsearch-deployment.yaml,esdata-en-persistentvolumeclaim.yaml,frontend-deployment.yaml,frontend-claim0-persistentvolumeclaim.yaml,wiki-deployment.yaml ``` --- ![Creating a Kubernetes Cluster](./kubernetes_cluster_48.png) ---<file_sep>--- date: "2019-01-27" title: "Kubernetes the Chinese Way" categories: - LINUX - Docker - Kubernetes --- ![Shenzhen, China](./photo-76834w36_4356dfgdsds_o.png) <!-- TOC --> - [Prerequisites](#prerequisites) - [Installing Docker](#installing-docker) - [Installing Kubernetes](#installing-kubernetes) <!-- /TOC --> > DRAFT - this article is still undergoing some "research"... When trying to run a Kubernetes cluster in China you quickly realize that 404s will take away the fun from it. This tutorial is based on a [Github repository](https://github.com/mpolinowski/kubernetes-for-china) but re-written to cater for CentOS servers. ## Prerequisites Install required packages. yum-utils provides the yum-config-manager utility, and device-mapper-persistent-data and lvm2 are required by the device mapper storage driver: ```bash sudo yum install -y yum-utils device-mapper-persistent-data lvm2 swapoff -a setenforce 0 ``` Edit the following file to disable `nano /etc/sysconfig/selinux` SELinux - `SELINUX=disabled`. And for __iptables__ create the following file `nano /etc/sysctl.d/k8s.conf` with the content below: ```bash net.bridge.bridge-nf-call-ip6tables = 1 net.bridge.bridge-nf-call-iptables = 1 ``` And apply those changes with the following command `sysctl --system`. ## Installing Docker Now we can [install DockerCE](https://docs.docker.com/install/linux/docker-ce/centos/) using the Aliyun cloud mirror. ```bash curl -fsSL https://get.docker.com | bash -s docker --mirror Aliyun ``` ![Kubernetes Aliyun](./Kubernetes-Aliyun-01.png) Now you can start and enable the Docker service, to have it automatically run in the background when you start your server: ```bash systemctl start docker systemctl enable docker ``` ![Kubernetes Aliyun](./Kubernetes-Aliyun-02.png) ## Installing Kubernetes Kubernetes can be installed from the [Alibaba Mirror](https://opsx.alibaba.com/mirror) which holds both a [Debian/Ubuntu (apt)](https://mirrors.aliyun.com/kubernetes/apt/) as well as a [RHEL/CentOS (yum)](https://mirrors.aliyun.com/kubernetes/yum/) version. For CentOS their currently [two versions hosted](https://mirrors.aliyun.com/kubernetes/yum/repos/), el6 and el7, for RHEL6/CentOS6 and REHL7/CentOS7, respectively: ![Kubernetes Aliyun](./Kubernetes-Aliyun-03.png) On CentOS 7 we have to install the el7 version by adding the repository `nano /etc/yum.repos.d/kubernetes.repo`: ```yaml [kubernetes] name=Kubernetes baseurl=https://mirrors.aliyun.com/kubernetes/yum/repos/kubernetes-el7-x86_64 enabled=1 gpgcheck=0 ``` > __NOTE__: The following installation fails when you activate the _gpgcheck_. Re-running the installation after setting _gpgcheck=0_ "solved" this. I find this slightly troublesome, but will proceed for now - since this is just a local test server environment. ![Kubernetes Aliyun](./Kubernetes-Aliyun-04.png) ```bash rpm --import https://mirrors.aliyun.com/kubernetes/yum/doc/yum-key.gpg yum update yum install -y kubelet kubeadm kubectl ``` ![Kubernetes Aliyun](./Kubernetes-Aliyun-05.png) As described above - this install failed du to the active key check. Going back to `nano /etc/yum.repos.d/kubernetes.repo` and setting `gpgcheck=0` solved this for now: ![Kubernetes Aliyun](./Kubernetes-Aliyun-06.png) <!-- 本来打算搭建istio环境,然后默置安装了kubernetes1.5.2集群,结果RBAC模式不被支持,然后切换到kubernetes1.7.5。 祝你好运吧! 一、基础环境配置,以下环境基于Docker master和slave的kubernetes yum源必须都修改 vi /etc/yum.repos.d/kubernetes.repo [kubernetes] name=Kubernetes baseurl=https://mirrors.aliyun.com/kubernetes/yum/repos/kubernetes-el7-x86_64 enabled=1 gpgcheck=0 (1)关闭swap swapoff -a (2)关闭SELinux,修改SELINUX属性 setenforce 0 vi /etc/sysconfig/selinux SELINUX=disabled (3)设置iptables vi /etc/sysctl.d/k8s.conf net.bridge.bridge-nf-call-ip6tables = 1 net.bridge.bridge-nf-call-iptables = 1 同iptables sysctl --system [1]以防在kubeadm init时出现如下错误 [kubeadm] WARNING: kubeadm is in beta, please do not use it for production clusters. [init] Using Kubernetes version: v1.7.5 [init] Using Authorization modes: [Node RBAC] [preflight] Running pre-flight checks [preflight] WARNING: kubelet service is not enabled, please run 'systemctl enable kubelet.service' [preflight] WARNING: docker service is not enabled, please run 'systemctl enable docker.service' [preflight] Some fatal errors occurred: /proc/sys/net/bridge/bridge-nf-call-iptables contents are not set to 1 (4)安装socat yum install -y socat rpm -qa|grep socat systemctl enable socat 二、安装kubernetes集群 Master:安装master 1、检查docker,如果没有安装docker,请看前面docker系列 docker version 2、安装kubectl、kubelet、kubeadm [yum源的问题使用阿里云的可以获取] yum install -y kubelet kubeadm kubectl (2)启动kubelet systemctl start kubelet systemctl status kubelet #查看是否启动成功 注意:[有人说要先修改/etc/systemd/system/kubelet.service.d/10-kubeadm.conf下参数,我修改后发现kubelet无法启动] (3)初始化master kubeadm init --apiserver-advertise-address=192.168.117.132 --pod-network-cidr=192.168.0.0/16 --kubernetes-version=v1.7.5 --skip-preflight-checks 参数说明: [1]--kubernetes-version=v1.7.5,如果不加版本号会导致报错 [kubeadm] WARNING: kubeadm is in beta, please do not use it for production clusters.unable to get URL "https://storage.googleapis.com/kubernetes-release/release/stable-1.7.txt": Get https://storage.googleapis.com/kubernetes-release/release/stable-1.7.txt: net/http: TLS handshake timeout 或者 [kubeadm] WARNING: kubeadm is in beta, please do not use it for production clusters. unable to get URL "https://storage.googleapis.com/kubernetes-release/release/stable-1.7.txt": Get https://storage.googleapis.com/kubernetes-release/release/stable-1.7.txt: dial tcp 172.16.17.32:443: getsockopt: connection refused [2]--skip-preflight-checks,可以防止每次初始化都去检查配置文件,否则可能在多次init后报错[etcd在使用、kubelet在使 用,因为每次init,kubeadm其实都会去启动etcd/kubelet] [3]还有可能出现的错误是镜像错误 初始化一直卡在[apiclient] Created API client, waiting for the control plane to become ready 或者 Unfortunately, an error has occurred: timed out waiting for the condition This error is likely caused by that: - The kubelet is not running - The kubelet is unhealthy due to a misconfiguration of the node in some way (required cgroups disabled) - There is no internet connection; so the kubelet can't pull the following control plane images: - gcr.io/google_containers/kube-apiserver-amd64:v1.8.4 - gcr.io/google_containers/kube-controller-manager-amd64:v1.8.4 - gcr.io/google_containers/kube-scheduler-amd64:v1.8.4 You can troubleshoot this for example with the following commands if you're on a systemd-powered system: - 'systemctl status kubelet' - 'journalctl -xeu kubelet' couldn't initialize a Kubernetes cluster {网上有人提出了一个很好的方法,就是先把需要的镜像下载下来改tag名,但是可能导致镜像不匹配问题} docker pull docker.io/sylzd/etcd-amd64-3.0.17 docker tag docker.io/sylzd/etcd-amd64-3.0.17:latest gcr.io/google_containers/etcd-amd64:3.0.17 docker pull registry.cn-hangzhou.aliyuncs.com/google-containers/kube-apiserver-amd64:v1.7.5 docker tag registry.cn-hangzhou.aliyuncs.com/google-containers/kube-apiserver-amd64:v1.7.5 gcr.io/google_containers/kube-apiserver-amd64:v1.7.5 docker pull registry.cn-hangzhou.aliyuncs.com/google-containers/kube-controller-manager-amd64:v1.7.5 docker tag registry.cn-hangzhou.aliyuncs.com/google-containers/kube-controller-manager-amd64:v1.7.5 gcr.io/google_containers/kube-controller-manager-amd64:v1.7.5 docker pull registry.cn-hangzhou.aliyuncs.com/google-containers/kube-scheduler-amd64:v1.7.5 docker tag registry.cn-hangzhou.aliyuncs.com/google-containers/kube-scheduler-amd64:v1.7.5 gcr.io/google_containers/kube-scheduler-amd64:v1.7.5 docker pull visenzek8s/pause-amd64:3.0 docker tag visenzek8s/pause-amd64:3.0 gcr.io/google_containers/pause-amd64:3.0 docker pull mirrorgooglecontainers/kube-proxy-amd64:v1.7.5 docker tag mirrorgooglecontainers/kube-proxy-amd64:v1.7.5 gcr.io/google_containers/kube-proxy-amd64:v1.7.5 docker pull registry.cn-hangzhou.aliyuncs.com/google-containers/k8s-dns-kube-dns-amd64:1.14.4 docker tag registry.cn-hangzhou.aliyuncs.com/google-containers/k8s-dns-kube-dns-amd64:1.14.4 gcr.io/google_containers/k8s-dns-kube-dns-amd64:1.14.4 docker pull registry.cn-hangzhou.aliyuncs.com/google-containers/k8s-dns-sidecar-amd64:1.14.4 docker tag registry.cn-hangzhou.aliyuncs.com/google-containers/k8s-dns-sidecar-amd64:1.14.4 gcr.io/google_containers/k8s-dns-sidecar-amd64:1.14.4 docker pull mirrorgooglecontainers/k8s-dns-dnsmasq-nanny-amd64:1.14.4 docker tag mirrorgooglecontainers/k8s-dns-dnsmasq-nanny-amd64:1.14.4 gcr.io/google_containers/k8s-dns-dnsmasq-nanny-amd64:1.14.4 docker pull quay.io/coreos/flannel #失败 docker pull quay.io/coreos/flannel:v0.9.1-amd64 注意:只有出现以下提示才是搭建成功(记住提示kubeadm join --token xxxxxxx命令,在slave从服务器上会使用) kubeadm token list命令也可以查看token,但是可能会有多条记录 Your Kubernetes master has initialized successfully!To start using your cluster, you need to run (as a regular user): mkdir -p $HOME/.kube sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config sudo chown $(id -u):$(id -g) $HOME/.kube/configYou should now deploy a pod network to the cluster.Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at: http://kubernetes.io/docs/admin/addons/You can now join any number of machines by running the following on each nodeas root: kubeadm join --token xxxxxxxxx (4)配置root用户/非root用户模式管理集群 root模式: vi /etc/profile export KUBECONFIG=/etc/kubernetes/admin.conf source /etc/profile 普通用户模式: mkdir -p $HOME/.kube cp -i /etc/kubernetes/admin.conf $HOME/.kube/config chown $(id -u):$(id -g) $HOME/.kube/config 这一步很关键,否则kubectl监控会报错 (5)网络配置,可以选择Calico、Canal、Flannel、Weave网络模式(有人说Flannel最好用,但是我感觉坑好多),两种配置方式如下: a、先将kube-flannel.yml下载到本地,或者自己创建一个和官网一样的kube-flannel.yml文件,在当前目录执行 kubectl apply -f kube-flannel.yml b、通过网络执行 kubectl apply -f https://raw.githubusercontent.com/coreos/flannel/v0.9.1/Documentation/kube-flannel.yml 检查flannel是否正常安装启动 kubectl get pods --all-namespaces 注意:如果出现Error、Pending、ImagePullBackOff、CrashLoopBackOff都属于启动失败的Pod,原因需要仔细排除 a、查看 /var/log/messages系统日志 b、kubectl describe pod kube-flannel-ds-2wk55 --namespace=kube-system c、kubectl logs -f kube-dns-2425271678-37lf7 -n kube-system kubedns (6)默认情况下pod不会被schedule到master节点上 kubectl taint nodes --all node-role.kubernetes.io/master- 报错 node "master" untainted 或者 error: taint "node-role.kubernetes.io/master:" not found 错误 [1]因为我第一次安装的1.5.2没有卸载干净 Transaction check error: file /usr/bin/kubectl from install of kubectl-1.7.5-0.x86_64 conflicts with file from package kubernetes-client-1.5.2-0.7.git269f928.el7.x86_64 Slave:安装slave从服务器 1、操作和以上基础操作(1)、(2)、(3)一样 2、将master中一些镜像导出,然后导入到slave中 docker save -o /opt/kube-pause.tar gcr.io/google_containers/pause-amd64:3.0 docker save -o /opt/kube-proxy.tar gcr.io/google_containers/kube-proxy-amd64:v1.7.5 docker save -o /opt/kube-flannel.tar quay.io/coreos/flannel:v0.9.1-amd64 docker load -i /opt/kube-flannel.tar docker load -i /opt/kube-proxy.tar docker load -i /opt/kube-pause.tar 3、在slave上安装kubeadm,然后执行master init后提示的kubeadm join --token kubeadm join --token <PASSWORD> 192.168.117.132:6443 4、切换到master检查子节点是否连接正常 kubectl get nodes 错误 [1][discovery] Failed to request cluster info, will try again: [Get https://192.168.117.132:6443/api/v1/namespaces/kube-public/configmaps/cluster-info: dial tcp 192.168.117.132:6443: getsockopt: no route to host] 原因是master开着防火墙,把防火墙关掉就行了 [2]failed to check server version: Get https://192.168.117.132:6443/version: x509: certificate has expired or is not yet valid 原因是master和slave的时间不同步,这一点和Elasticsearch/Kibana一样,必须时间同步,所以安装ntp就可以了 {***坎坎坷坷这一周内,从各种VPN到github,每天晚上回来搭到十二点,终于可以告一段落了,参考了很多文章,回头有时间再详细列出来,各位有什么问题的欢迎指正、交流***} --><file_sep>--- date: "2019-09-27" title: "Home Assistant, OpenHAB, Node-RED, ioBroker, MotionEye Containerized" categories: - LINUX - Smarthome - IoT - Docker --- ![<NAME>, Cambodia](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installation through Podman on CentOS8](#installation-through-podman-on-centos8) - [Home Assistant](#home-assistant) - [Node-RED](#node-red) - [Theming](#theming) - [OpenHAB](#openhab) - [ioBroker](#iobroker) - [MotionEye](#motioneye) <!-- /TOC --> ## Installation through Podman on CentOS8 ### Home Assistant ```bash podman pull homeassistant/home-assistant:stable ``` ```bash mkdir /opt/homeassistant mkdir /opt/homeassistant/config chmod -R 775 /opt/homeassistant ``` ```bash podman run -d --rm --privileged --net=host --name="home-assistant" -e "TZ=Europe/Berlin" -v /opt/homeassistant/config:/config homeassistant/home-assistant:stable ``` There seems to be an issue with Podman - or the way I am using it. I have to run the container `privileged` to be able to write to the volume. Google did not help me solve this "issue", yet. ```bash podman start home-assistant podman stop home-assistant ``` ### Node-RED ```bash podman pull nodered/node-red ``` ```bash mkdir /opt/nodered mkdir /opt/nodered/data chmod -R 775 /opt/nodered ``` ```bash podman run -d --rm --privileged --net=host -v /opt/nodered/data:/data --name nodered nodered/node-red ``` ```bash podman start nodered podman stop nodered ``` #### Theming ```bash cd /opt/nodered/data npm install @node-red-contrib-themes/midnight-red ``` Add the following to the editorTheme section of your `settings.js`: ```js editorTheme: { projects: { // To enable the Projects feature, set this value to true enabled: false } } ``` Change to -> ```js // Customising the editor editorTheme: { page: { css: "/opt/nodered/data/node_modules/@node-red-contrib-themes/midnight-red/theme.css" } } ``` ### OpenHAB ```bash groupadd -g 9001 openhab useradd -g 9001 openhab usermod -a -G openhab myownuser ``` ```bash mkdir /opt/openhab mkdir /opt/openhab/openhab_addons mkdir /opt/openhab/openhab_conf mkdir /opt/openhab/openhab_userdata chmod -R 775 /opt/openhab ``` ```bash podman run \ --name openhab \ --net=host \ --privileged \ --rm \ -v /etc/localtime:/etc/localtime:ro \ -v /opt/openhab/openhab_addons:/openhab/addons \ -v /opt/openhab/openhab_conf:/openhab/conf \ -v /opt/openhab/openhab_userdata:/openhab/userdata \ -d \ openhab/openhab:latest ``` #### OpenHAB CLI ```bash docker exec -it openhab /openhab/runtime/bin/client -p habopen ``` #### Updating the Image Use the following steps to update the docker image and all installed add-ons. Stop the container: ```bash docker stop openhab ``` Delete the container: ```bash docker rm openhab ``` Delete the contents of `/opt/openhab/userdata/cache` and `/opt/openhab/userdata/tmp` ```bash rm -rf /opt/openhab/userdata/cache rm -rf /opt/openhab/userdata/tmp` ``` Pull down the latest image: ```bash docker pull openhab/openhab:`latest` ``` Restart the container using the full command above. ### ioBroker ```bash podman pull buanet/iobroker:latest ``` And and run it: ```bash podman run \ --net=host \ --privileged \ --rm \ --name iobroker \ -v /opt/iobroker:/opt/iobroker \ -v /opt/iobroker/home:/home/iobroker \ --detach=true \ buanet/iobroker:latest ``` ### MotionEye ```bash podman pull ccrisan/motioneye:master-amd64 ``` And and run it: ```bash podman run --name="motioneye" \ --net=host \ --privileged \ --rm \ --hostname="motioneye" \ -v /etc/localtime:/etc/localtime:ro \ -v /opt/motioneye:/etc/motioneye \ -v /opt/motioneye/lib:/var/lib/motioneye \ --security-opt label=disable \ --detach=true \ ccrisan/motioneye:master-amd64 ```<file_sep>--- date: "2019-09-16" title: "Vanilla Forums Installation on Debian" categories: - LINUX --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Prerequisites](#prerequisites) - [NGINX](#nginx) - [Sample Configurations](#sample-configurations) <!-- /TOC --> ## Prerequisites It is strongly recommended to set up the following environment on your server: * PHP 7.3 or higher. * MySQL 5.7 or higher (or MariaDB equivalent). * SSL encryption (check out LetsEncrypt). ## NGINX The most important consideration to getting Vanilla running on nginx is to make sure the rewrite rules are correct. Below is one suggested configuration which locks down the server to only respond to requests via `index.php`, which we strongly recommend if Vanilla is the only application running. Make sure that you set the `fastcgi_param` named `X_REWRITE` to `1`. Make sure that `fastcgi_pass` is set to the name of your actual upstream (in our example below, it’s named `php-fpm`), or call your PHP install directly by socket, for example: `unix:/run/php/php7.3-fpm.sock`. Make sure that you define `PATH_INFO` in your `fastcgi_param` file. You may find this example set of [FastCGI params helpful](https://www.nginx.com/resources/wiki/start/topics/examples/phpfcgi/). When configuring FastCGI, using `$realpath_root` instead of `$document_root` may be necessary in some setups (e.g. when using symlinks). We define `SCRIPT_NAME` and `SCRIPT_FILENAME` explicitly because some configurations may redundantly re-add them during the rewrite, resulting in a name of `/index.php/index.php`. The end result of this is all your Javascript and CSS assets paths in the page start with `/index.php`, thus breaking them. Feel free to omit those two lines if you’re confident your configuration is immune. ### Sample Configurations This would go within the appropriate `server { }` block. It assumes you’ve already assigned a `root` and `index`, among other things: ```conf # Block some folders as an extra hardening measure. location ~* /\.git { deny all; return 403; } location /build/ { deny all; return 403; } location /cache/ { deny all; return 403; } location /cgi-bin/ { deny all; return 403; } location /uploads/import/ { deny all; return 403; } location /conf/ { deny all; return 403; } location /tests/ { deny all; return 403; } location /vendor/ { deny all; return 403; } # This handles all the main requests thru index.php. location ~* ^/index\.php(/|$) { # send to fastcgi include fastcgi.conf; fastcgi_param SCRIPT_NAME /index.php; fastcgi_param SCRIPT_FILENAME $realpath_root/index.php; fastcgi_param X_REWRITE 1; fastcgi_pass php-fpm; # where 'php-fpm' is the upstream, probably defined in nginx.conf } # If this is some other PHP script, disallow it by redirecting to /index.php location ~* \.php(/|$) { rewrite ^ /index.php$uri last; } # Default path handling location / { try_files $uri @vanilla; } location @vanilla { rewrite ^ /index.php$uri last; } ```<file_sep>--- date: "2019-01-21" title: "Kubernetes Bare Metal Ingress" categories: - LINUX - Docker - Kubernetes --- ![<NAME>, Hongkong](./photo-34607876585_6e59cd762e_o.png) <!-- TOC --> - [Bare-metal considerations](#bare-metal-considerations) - [Network Load-balancer MetalLB](#network-load-balancer-metallb) - [Installation With Kubernetes Manifests](#installation-with-kubernetes-manifests) - [MetalLB Configuration](#metallb-configuration) - [Layer 2 Configuration](#layer-2-configuration) <!-- /TOC --> > DRAFT - this article is still undergoing some "research"... ## Bare-metal considerations In traditional cloud environments, where network load balancers are available on-demand, a single Kubernetes manifest suffices to provide a single point of contact to the NGINX Ingress controller to external clients and, indirectly, to any application running inside the cluster. [Bare-metal environments](https://kubernetes.github.io/ingress-nginx/deploy/baremetal/) lack this commodity, requiring a slightly different setup to offer the same kind of access to external consumers. The rest of this article describes a few recommended approaches to deploying the [NGINX Ingress controller](https://github.com/kubernetes/ingress-nginx) inside a Kubernetes cluster running on bare-metal. ## Network Load-balancer MetalLB [MetalLB](https://metallb.universe.tf/concepts/) hooks into your Kubernetes cluster, and provides a network load-balancer implementation. In short, it allows you to create Kubernetes services of type “LoadBalancer” in clusters that don’t run on a cloud provider, and thus cannot simply hook into paid products to provide load-balancers. It has two features that work together to provide this service: address allocation, and external announcement: * __Address Allocation__: In a cloud-enabled Kubernetes cluster, you request a load-balancer, and your cloud platform assigns an IP address to you. In a bare metal cluster, MetalLB is responsible for that allocation. * __External Announcement__: Once MetalLB has assigned an external IP address to a service, it needs to make the network beyond the cluster aware that the IP “lives” in the cluster. MetalLB uses standard routing protocols to achieve this: ARP, NDP, or BGP. Before starting with installation, make sure you meet all the [requirements](https://metallb.universe.tf/#requirements). ### Installation With Kubernetes Manifests To install MetalLB, simply apply the manifest: ```bash kubectl apply -f https://raw.githubusercontent.com/google/metallb/v0.7.3/manifests/metallb.yaml ``` This will deploy MetalLB to your cluster, under the metallb-system namespace. The components in the manifest are: * The `metallb-system/controller deployment`. This is the cluster-wide controller that handles IP address assignments. * The `metallb-system/speaker daemonset`. This is the component that speaks the protocol(s) of your choice to make the services reachable. The installation manifest does not include a configuration file. MetalLB’s components will still start, but will remain idle until you define and deploy a configmap. ### MetalLB Configuration To configure MetalLB, write a config map to `metallb-system/config` like [this example](https://raw.githubusercontent.com/google/metallb/v0.7.3/manifests/example-config.yaml): ```yaml apiVersion: v1 kind: ConfigMap metadata: namespace: metallb-system name: config data: config: | # The peers section tells MetalLB what BGP routers to connect too. There # is one entry for each router you want to peer with. peers: - # The target IP address for the BGP session. peer-address: 10.0.0.1 # The BGP AS number that MetalLB expects to see advertised by # the router. peer-asn: 64512 # The BGP AS number that MetalLB should speak as. my-asn: 64512 # (optional) the TCP port to talk to. Defaults to 179, you shouldn't # need to set this in production. peer-port: 179 # (optional) The proposed value of the BGP Hold Time timer. Refer to # BGP reference material to understand what setting this implies. hold-time: 120 # (optional) The router ID to use when connecting to this peer. Defaults # to the node IP address. Generally only useful when you need to peer with # another BGP router running on the same machine as MetalLB. router-id: 1.2.3.4 # (optional) Password for TCPMD5 authenticated BGP sessions # offered by some peers. password: "<PASSWORD>" # (optional) The nodes that should connect to this peer. A node # matches if at least one of the node selectors matches. Within # one selector, a node matches if all the matchers are # satisfied. The semantics of each selector are the same as the # label- and set-based selectors in Kubernetes, documented at # https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/. # By default, all nodes are selected. node-selectors: - # Match by label=value match-labels: kubernetes.io/hostname: prod-01 # Match by 'key OP values' expressions match-expressions: - key: beta.kubernetes.io/arch operator: In values: [amd64, arm] # The address-pools section lists the IP addresses that MetalLB is # allowed to allocate, along with settings for how to advertise # those addresses over BGP once assigned. You can have as many # address pools as you want. address-pools: - # A name for the address pool. Services can request allocation # from a specific address pool using this name, by listing this # name under the 'metallb.universe.tf/address-pool' annotation. name: my-ip-space # Protocol can be used to select how the announcement is done. # Supported values are bgp and layer2. protocol: bgp # A list of IP address ranges over which MetalLB has # authority. You can list multiple ranges in a single pool, they # will all share the same settings. Each range can be either a # CIDR prefix, or an explicit start-end range of IPs. addresses: - 198.51.100.0/24 - 192.168.0.150-192.168.0.200 # (optional) If true, MetalLB will not allocate any address that # ends in .0 or .255. Some old, buggy consumer devices # mistakenly block traffic to such addresses under the guise of # smurf protection. Such devices have become fairly rare, but # the option is here if you encounter serving issues. avoid-buggy-ips: true # (optional, default true) If false, MetalLB will not automatically # allocate any address in this pool. Addresses can still explicitly # be requested via loadBalancerIP or the address-pool annotation. auto-assign: false # (optional) A list of BGP advertisements to make, when # protocol=bgp. Each address that gets assigned out of this pool # will turn into this many advertisements. For most simple # setups, you'll probably just want one. # # The default value for this field is a single advertisement with # all parameters set to their respective defaults. bgp-advertisements: - # (optional) How much you want to aggregate up the IP address # before advertising. For example, advertising 1.2.3.4 with # aggregation-length=24 would end up advertising 1.2.3.0/24. # For the majority of setups, you'll want to keep this at the # default of 32, which advertises the entire IP address # unmodified. aggregation-length: 32 # (optional) The value of the BGP "local preference" attribute # for this advertisement. Only used with IBGP peers, # i.e. peers where peer-asn is the same as my-asn. localpref: 100 # (optional) BGP communities to attach to this # advertisement. Communities are given in the standard # two-part form <asn>:<community number>. You can also use # alias names (see below). communities: - 64512:1 - no-export # (optional) BGP community aliases. Instead of using hard to # read BGP community numbers in address pool advertisement # configurations, you can define alias names here and use those # elsewhere in the configuration. The "no-export" community used # above is defined below. bgp-communities: # no-export is a well-known BGP community that prevents # re-advertisement outside of the immediate autonomous system, # but people don't usually recognize its numerical value. :) no-export: 65535:65281 ``` ### Layer 2 Configuration [Layer 2 mode](https://metallb.universe.tf/tutorial/layer2/) is the simplest to configure. The nice thing about layer 2 mode is that you don’t need any fancy network hardware at all, it should just work on any ethernet network. For example, the following configuration gives MetalLB control over IPs from __192.168.1.240__ to __192.168.1.250__, and configures [Layer 2 mode](https://metallb.universe.tf/configuration/#layer-2-configuration): ```yaml apiVersion: v1 kind: ConfigMap metadata: namespace: metallb-system name: config data: config: | address-pools: - name: default protocol: layer2 addresses: - 192.168.1.240-192.168.1.250 ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_04.png) ---<file_sep>--- date: "2019-02-01" title: "Kubernetes Traefik Ingress" categories: - LINUX - Docker - Kubernetes --- ![<NAME>](./photo-34445481222_d3b67160da_o.jpg) <!-- TOC --> - [Getting Started with the Traefik Reverse Proxy](#getting-started-with-the-traefik-reverse-proxy) - [Prerequisites](#prerequisites) - [Deploy Traefik using a Deployment or DaemonSet](#deploy-traefik-using-a-deployment-or-daemonset) - [Submitting an Ingress to the Cluster](#submitting-an-ingress-to-the-cluster) - [Path-based Routing](#path-based-routing) - [User Authentication](#user-authentication) - [Creating the Secret](#creating-the-secret) - [Add a TLS Certificate to the Ingress](#add-a-tls-certificate-to-the-ingress) - [Name-based Routing](#name-based-routing) <!-- /TOC --> __This article is still WIP - better check out the [NGINX Ingress](/kubernetes-nginx-ingress/) instead...__ Before we explored the [NGINX Ingress](/kubernetes-nginx-ingress/) to route traffic onto Services inside our Kubernetes Cluster. But there are a couple of options that we can choose from here: * [Nginx](https://github.com/kubernetes/ingress-nginx/blob/master/README.md) * [Contour](https://github.com/heptio/contour) * [HAProxy](https://www.haproxy.com/blog/haproxy_ingress_controller_for_kubernetes/) * [Voyager](https://appscode.com/products/voyager/) * [Traefik](https://docs.traefik.io) ## Getting Started with the Traefik Reverse Proxy [Traefik](https://github.com/containous/traefik) is a modern HTTP reverse proxy and load balancer that makes deploying microservices easy. Traefik integrates with your existing infrastructure components and configures itself automatically and dynamically. Pointing Traefik at your orchestrator - e.g. [Kubernetes](/creating-a-kubernetes-cluster/) should be the only configuration step you need. Traditional reverse-proxies require that you configure each route that will connect paths and subdomains to each microservice. Traefik listens to your service registry/orchestrator API ([etcd](https://docs.traefik.io/configuration/backends/etcd/) / [Kubernetes](https://docs.traefik.io/configuration/backends/kubernetes/)) and instantly generates the routes so your microservices are connected to the outside world -- without further intervention from your part. The example configuration files that we are working with are available in the [Traefik Github repository](https://github.com/containous/traefik/tree/v1.7/examples/k8s). ## Prerequisites Kubernetes introduces [Role Based Access Control (RBAC)](https://kubernetes.io/docs/reference/access-authn-authz/rbac/) in 1.6+ to allow fine-grained control of Kubernetes resources and API. If your cluster is configured with RBAC, you will need to authorize Traefik to use the Kubernetes API. There are two ways to set up the proper permission: Via __namespace-specific RoleBindings__ or a single, global __ClusterRoleBinding__. RoleBindings per namespace enable to restrict granted permissions to the very namespaces only that Traefik is watching over, thereby following the least-privileges principle. This is the preferred approach if Traefik is not supposed to watch all namespaces, and the set of namespaces does not change dynamically. Otherwise, a single __ClusterRoleBinding__ must be employed. But for the sake of simplicity, this guide will use a __ClusterRoleBinding__ with the following [YAML file](https://github.com/containous/traefik/blob/v1.7/examples/k8s/traefik-rbac.yaml): __traefik-rbac.yaml__ ```yaml --- kind: ClusterRole apiVersion: rbac.authorization.k8s.io/v1beta1 metadata: name: traefik-ingress-controller rules: - apiGroups: - "" resources: - services - endpoints - secrets verbs: - get - list - watch - apiGroups: - extensions resources: - ingresses verbs: - get - list - watch - apiGroups: - extensions resources: - ingresses/status verbs: - update --- kind: ClusterRoleBinding apiVersion: rbac.authorization.k8s.io/v1beta1 metadata: name: traefik-ingress-controller roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: traefik-ingress-controller subjects: - kind: ServiceAccount name: traefik-ingress-controller namespace: monitoring ``` ![Traefik Ingress for your Kubernetes Cluster](./kubernetes-traefik-ingress_01.png) Apply the configuration with the following Kubernete command (change the URL to the local path, if you decided to store the file above locally): ```bash kubectl apply -f https://raw.githubusercontent.com/containous/traefik/v1.7/examples/k8s/traefik-rbac.yaml ``` ## Deploy Traefik using a Deployment or DaemonSet It is possible to use Traefik with a [Deployment](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/) or a [DaemonSet](https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/) object, whereas both options have their own pros and cons: * The scalability can be much better when using a Deployment, because you will have a Single-Pod-per-Node model when using a DaemonSet, whereas you may need less replicas based on your environment when using a Deployment. * DaemonSets automatically scale to new nodes, when the nodes join the cluster, whereas Deployment pods are only scheduled on new nodes if required. * DaemonSets ensure that only one replica of pods run on any single node. Deployments require affinity settings if you want to ensure that two pods don't end up on the same node. * DaemonSets can be run with the _NET\_BIND\_SERVICE_ capability, which will allow it to bind to port 80/443/etc on each host. This will allow bypassing the kube-proxy, and reduce traffic hops. Note that this is against the Kubernetes [Best Practices Guidelines](https://kubernetes.io/docs/concepts/configuration/overview/#services), and raises the potential for scheduling/scaling issues. Despite potential issues, this remains the choice for most ingress controllers. I am going to use the Deployment here - but the configuration file for the DemonSet does not look s different and can be [downloaded here](https://github.com/containous/traefik/tree/v1.7/examples/k8s/traefik-ds.yaml). The Deployment objects looks like this (see below) and can be [downloaded here](https://github.com/containous/traefik/tree/v1.7/examples/k8s/traefik-deployment.yaml): __traefik-deployment.yaml__ ```yaml --- apiVersion: v1 kind: ServiceAccount metadata: name: traefik-ingress-controller namespace: monitoring --- kind: Deployment apiVersion: extensions/v1beta1 metadata: name: traefik-ingress-controller namespace: monitoring labels: k8s-app: traefik-ingress-lb spec: replicas: 1 selector: matchLabels: k8s-app: traefik-ingress-lb template: metadata: labels: k8s-app: traefik-ingress-lb name: traefik-ingress-lb spec: serviceAccountName: traefik-ingress-controller terminationGracePeriodSeconds: 60 containers: - image: traefik name: traefik-ingress-lb ports: - name: http containerPort: 80 hostPort: 8080 - name: admin containerPort: 8080 args: - --api - --kubernetes - --logLevel=INFO - --defaultentrypoints=http - --entrypoints=Name:http Address::80 --- kind: Service apiVersion: v1 metadata: name: traefik-ingress-service namespace: monitoring spec: selector: k8s-app: traefik-ingress-lb ports: - protocol: TCP port: 80 name: http - protocol: TCP port: 8080 name: admin externalIPs: - 172.16.17.32 ``` To deploy Traefik to your cluster start by applying the YAML files with kubectl: ```bash kubectl apply -f https://raw.githubusercontent.com/containous/traefik/v1.7/examples/k8s/traefik-deployment.yaml ``` You can verify that the Ingress pod was started with: ```bash kubectl --namespace=kube-system get pods ``` ![Traefik Ingress for your Kubernetes Cluster](./kubernetes-traefik-ingress_02.png) ## Submitting an Ingress to the Cluster Lets start by creating a Service and an Ingress that will expose the [Traefik Web UI](https://github.com/containous/traefik#web-ui) (the configuration file can be [downloaded here](https://github.com/containous/traefik/tree/v1.7/examples/k8s/ui.yaml)): __ui.yaml__ ```yaml --- apiVersion: v1 kind: Service metadata: name: traefik-web-ui namespace: monitoring spec: selector: k8s-app: traefik-ingress-lb ports: - name: http port: 80 targetPort: 8080 --- apiVersion: extensions/v1beta1 kind: Ingress metadata: name: traefik-web-ui namespace: monitoring spec: rules: http: paths: - path: / backend: serviceName: traefik-web-ui servicePort: http ``` Apply the service to your cluster with: ```bash kubectl apply -f https://raw.githubusercontent.com/containous/traefik/v1.7/examples/k8s/ui.yaml ``` > This setup assigns a host domain `traefik-ui.minikube` to your cluster ingress you can add an entry in our `/etc/hosts` file to route `traefik-ui.minikube` to our cluster. In production you would want to add your real DNS entries here! > [As seen before](/kubernetes-nginx-ingress/#creating-the-load-balancing-service), we can add the Kubernetes Master WAN IP address, e.g. `externalIPs: 172.16.17.32`, to the service configuration to be able to access the service over the internet Adding these two modifications, we end up with a YAML file looking like this: ```yaml --- --- apiVersion: v1 kind: Service metadata: name: traefik-web-ui namespace: monitoring spec: selector: k8s-app: traefik-ingress-lb ports: - name: http port: 80 targetPort: 8080 externalIPs: - 172.16.17.32 --- apiVersion: extensions/v1beta1 kind: Ingress metadata: name: traefik-web-ui namespace: monitoring spec: rules: - host: traefik-ui.minikube http: paths: - path: / backend: serviceName: traefik-web-ui servicePort: http ``` When you access the URL with your browser, you should now be greeted by the [Traefik Dashboard](https://github.com/containous/traefik#web-ui): ![Traefik Ingress for your Kubernetes Cluster](./kubernetes-traefik-ingress_03.png) ## Path-based Routing First lets start by launching the pods for three websites. __Note__: I am using 3 Node.js apps, that are more less build the way [described here](/express-generator-dockerrized/) - that means they are Node.js/Express.js servers that host web content on a specific port. In my case those ports are `7777`, `7778` and `7779`. They were uploaded to Dockerhub and can be accessed by referencing `- image: your-docker-hub-account/your-docker-image-name-on-docker-hub`: __app1.yaml__ ```yaml --- kind: Deployment apiVersion: extensions/v1beta1 metadata: name: wiki-de labels: app: wiki-frontend spec: replicas: 1 selector: matchLabels: app: wiki-frontend template: metadata: labels: app: wiki-frontend version: v2.1.0 spec: containers: - image: mpolinowski/my-docker-image:latest imagePullPolicy: Always name: wiki-de ports: - containerPort: 7779 restartPolicy: Always --- apiVersion: v1 kind: Service metadata: name: wiki-de spec: ports: - name: http targetPort: 7779 port: 7779 selector: app: wiki-frontend ``` __app2.yaml__ ```yaml --- kind: Deployment apiVersion: extensions/v1beta1 metadata: name: wiki-en labels: app: wiki-frontend spec: replicas: 1 selector: matchLabels: app: wiki-frontend template: metadata: labels: app: wiki-frontend version: v2.1.0 spec: containers: - image: mpolinowski/my-docker-image-en:latest imagePullPolicy: Always name: wiki-en ports: - containerPort: 7777 restartPolicy: Always --- apiVersion: v1 kind: Service metadata: name: wiki-en spec: ports: - name: http targetPort: 7777 port: 7777 selector: app: wiki-frontend ``` __app3.yaml__ ```yaml --- kind: Deployment apiVersion: extensions/v1beta1 metadata: name: wiki-fr labels: app: wiki-frontend spec: replicas: 1 selector: matchLabels: app: wiki-frontend template: metadata: labels: app: wiki-frontend version: v2.1.0 spec: containers: - image: mpolinowski/my-docker-image-fr:latest imagePullPolicy: Always name: wiki-frontend ports: - containerPort: 7778 restartPolicy: Always --- apiVersion: v1 kind: Service metadata: name: wiki-fr spec: ports: - name: http targetPort: 7778 port: 7778 selector: app: wiki-frontend ``` Now we can submit an ingress for the 3 web apps: ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: wiki-frontend annotations: kubernetes.io/ingress.class: traefik traefik.frontend.rule.type: PathPrefixStrip spec: rules: - host: my.domain.com http: paths: - path: /de backend: serviceName: wiki-de servicePort: http - path: /en backend: serviceName: wiki-en servicePort: http - path: /fr backend: serviceName: wiki-fr servicePort: http ``` ## User Authentication It's possible to protect access to Traefik through basic authentication. See the [Kubernetes Ingress](https://docs.traefik.io/configuration/backends/kubernetes) configuration page for syntactical details and restrictions. ### Creating the Secret 1. Use `htpasswd` to create a file containing the username and the MD5-encoded password (on Centos you might have to install it first `yum install -y httpd-tools`). You will be prompted for a password which you will have to enter twice: ```bash htpasswd -c ./auth myusername ``` 2. Now use `kubectl` to create a secret in the `monitoring` namespace using the file created by `htpasswd`: ```bash kubectl create namespace monitoring kubectl create secret generic mysecret --from-file auth --namespace=monitoring ``` You have to swap the __kube-system__ with the __monitoring__ namespace in the config files (see below) 3. Attach the following annotations to the Ingress object: * `traefik.ingress.kubernetes.io/auth-type: "basic"` * `traefik.ingress.kubernetes.io/auth-secret: "mysecret"` They specify basic authentication and reference the Secret `mysecret` containing the credentials. __HOW DO YOU CONFIGURE THE THE BASIC AUTHENTICATION? WIP__ ## Add a TLS Certificate to the Ingress To setup an HTTPS-protected ingress, you can leverage the TLS feature of the ingress resource: ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: traefik-web-ui namespace: monitoring annotations: traefik.ingress.kubernetes.io/auth-type: "basic" traefik.ingress.kubernetes.io/auth-secret: "mysecret" spec: rules: - host: my.domain.com http: paths: - path: / backend: serviceName: traefik-web-ui servicePort: https tls: - secretName: traefik-ui-tls-cert ``` We now need to provide the TLS certificate via a Kubernetes secret in the same namespace as the ingress. The following two commands will generate a new certificate and create a secret containing the key and cert files: ```bash openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout tls.key -out tls.crt -subj "/CN=traefik-ui.minikube" kubectl -n kube-system create secret tls traefik-ui-tls-cert --key=tls.key --cert=tls.crt ``` Since I am already using fully qualified Domain - and I have used [Certbot](https://certbot.eff.org/lets-encrypt/centosrhel7-other) to generate an TLS certificate - I can use those files for my domain inside the ingress. __Note__: that the certificate that you generated with Certbot can be found in `/etc/letsencrypt/live/my.domain.com` - where _my.domain.com_ is the domain you created the certificate for. Furthermore, Certbot generates a couple of _*.pem_ files instead of a _*.key_ and _*.crt_. In the kubectl command above use `privkey.pem` as the __key__ file and `fullchain.pem` as your __crt__ file (see [Stackoverflow](https://stackoverflow.com/questions/50389883/generate-crt-key-ssl-files-from-lets-encrypt-from-scratch) for details): ![Traefik Ingress for your Kubernetes Cluster](./kubernetes-traefik-ingress_04.png) The resulting command will look something like this: ```bash kubectl -n kube-system create secret tls traefik-ui-tls-cert --key=/etc/letsencrypt/live/my.domain.com/privkey.pem --cert=/etc/letsencrypt/live/my.domain.com/fullchain.pem ``` You receive a reply `secret/traefik-ui-tls-cert created`! <!-- kubectl -n kube-system create secret tls traefik-ui-tls-cert --key=/etc/letsencrypt/live/my.domain.com/privkey.pem --cert=/etc/letsencrypt/live/my.domain.com/fullchain.pem --> __HOW DO YOU CONFIGURE THE SERVICE FOR HTTPS? WIP__ You can add a TLS entrypoint by adding the following args to the container spec: ```yaml --defaultentrypoints=http,https --entrypoints=Name:https Address::443 TLS --entrypoints=Name:http Address::80 ``` __traefik-deployment.yaml__ ```yaml --- apiVersion: v1 kind: ServiceAccount metadata: name: traefik-ingress-controller namespace: monitoring annotations: traefik.ingress.kubernetes.io/auth-type: "basic" traefik.ingress.kubernetes.io/auth-secret: "mysecret" --- kind: Deployment apiVersion: extensions/v1beta1 metadata: name: traefik-ingress-controller namespace: monitoring annotations: traefik.ingress.kubernetes.io/auth-type: "basic" traefik.ingress.kubernetes.io/auth-secret: "mysecret" labels: k8s-app: traefik-ingress-lb spec: replicas: 1 selector: matchLabels: k8s-app: traefik-ingress-lb template: metadata: labels: k8s-app: traefik-ingress-lb name: traefik-ingress-lb spec: serviceAccountName: traefik-ingress-controller terminationGracePeriodSeconds: 60 containers: - image: traefik name: traefik-ingress-lb ports: - name: http containerPort: 80 hostPort: 8080 - name: https containerPort: 443 hostPort: 443 - name: admin containerPort: 8080 args: - --api - --kubernetes - --logLevel=INFO - --defaultentrypoints=https,http - --entrypoints=Name:https Address::443 TLS - --entrypoints=Name:http Address::80 --- kind: Service apiVersion: v1 metadata: name: traefik-ingress-service namespace: monitoring annotations: traefik.ingress.kubernetes.io/auth-type: "basic" traefik.ingress.kubernetes.io/auth-secret: "mysecret" spec: selector: k8s-app: traefik-ingress-lb ports: - protocol: TCP port: 80 name: http - protocol: TCP port: 443 name: https - protocol: TCP port: 8080 name: admin type: NodePort ``` __ui.yaml__ ```yaml --- apiVersion: v1 kind: Service metadata: name: traefik-web-ui namespace: monitoring annotations: traefik.ingress.kubernetes.io/auth-type: "basic" traefik.ingress.kubernetes.io/auth-secret: "mysecret" spec: selector: k8s-app: traefik-ingress-lb ports: - name: http port: 80 targetPort: 8080 - name: https protocol: TCP port: 443 targetPort: 443 externalIPs: - 172.16.17.32 --- apiVersion: extensions/v1beta1 kind: Ingress metadata: name: traefik-web-ui namespace: monitoring annotations: traefik.ingress.kubernetes.io/auth-type: "basic" traefik.ingress.kubernetes.io/auth-secret: "mysecret" spec: rules: - host: my.domain.com http: paths: - path: / backend: serviceName: traefik-web-ui servicePort: https tls: - secretName: traefik-ui-tls-cert ``` ## Name-based Routing <file_sep>--- date: "2019-09-20" title: "Magento 2 UI Components" categories: - Magento --- ![<NAME>, Hongkong](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Rendering Grids](#rendering-grids) <!-- /TOC --> > Use Magento UI components only for admin routes (see end of the article) ## Rendering Grids We already [created a custom view](/creating-magento-2-modules#view-layer) that listed all items that were stored in the database by our [custom component](/creating-magento-2-modules#module-install-scripts). Let's now add some design to it by displaying it in a grid. This view was routed by `./Controller/Index/Index.php`: ```php <?php namespace INSTAR\SampleModule\Controller\Index; use Magento\Framework\Controller\ResultFactory; class Index extends \Magento\Framework\App\Action\Action { public function execute() { return $this->resultFactory->create(ResultFactory::TYPE_PAGE); } } ``` We now need to create a new collection model to populate the grid with data from our database in `./Model/ResourceModel/Item/Grid/Collection.php`: ```php <?php namespace INSTAR\SampleModule\Model\ResourceModel\Item\Grid; use Magento\Framework\Data\Collection\Db\FetchStrategyInterface as FetchStrategy; use Magento\Framework\Data\Collection\EntityFactoryInterface as EntityFactory; use Magento\Framework\Event\ManagerInterface as EventManager; use Psr\Log\LoggerInterface as Logger; class Collection extends \Magento\Framework\View\Element\UiComponent\DataProvider\SearchResult { public function __construct( EntityFactory $entityFactory, Logger $logger, FetchStrategy $fetchStrategy, EventManager $eventManager, $mainTable = 'instar_sample_item', $resourceModel = 'INSTAR\SampleModule\Model\ResourceModel\Item' ) { parent::__construct( $entityFactory, $logger, $fetchStrategy, $eventManager, $mainTable, $resourceModel ); } } ``` We now have to add some additional configuration to dependency injection `./etc/di.xml`: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:ObjectManager/etc/config.xsd"> <type name="Magento\Framework\Console\CommandList"> <arguments> <argument name="commands" xsi:type="array"> <item name="instarAddItem" xsi:type="object">INSTAR\SampleModule\Console\Command\AddItem</item> </argument> </arguments> </type> </config> ``` Add the following lines of code to configure the UI grid: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:ObjectManager/etc/config.xsd"> <type name="Magento\Framework\Console\CommandList"> <arguments> <argument name="commands" xsi:type="array"> <item name="instarAddItem" xsi:type="object">INSTAR\SampleModule\Console\Command\AddItem</item> </argument> </arguments> </type> <type name="Magento\Framework\View\Element\UiComponent\DataProvider\CollectionFactory"> <arguments> <argument name="collections" xsi:type="array"> <item name="instar_items_grid_data_source" xsi:type="string">INSTAR\SampleModule\Model\ResourceModel\Item\Grid\Collection</item> </argument> </arguments> </type> </config> ``` Now we can modify the layout file that we already created `view/frontend/layout/instar_index_index.xml`: ```xml <?xml version="1.0"?> <page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" layout="1column" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd"> <body> <referenceContainer name="content"> <block name="instar_hello" class="INSTAR\SampleModule\Block\Hello" template="hello.phtml"/> </referenceContainer> </body> </page> ``` Instead of using a custom __Block__ we now can use the __UI Component__: ```xml <?xml version="1.0"?> <page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" layout="1column" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd"> <body> <referenceContainer name="content"> <uiComponent name="instar_items_grid" /> </referenceContainer> </body> </page> ``` The configuration for the here referenced `instar_items_grid` can be done in `./view/frontend/ui_component/instar_items_grid.xml`: ```xml <?xml version="1.0" encoding="UTF-8"?> <listing xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:module:Magento_Ui:etc/ui_configuration.xsd"> <argument name="data" xsi:type="array"> <item name="js_config" xsi:type="array"> <item name="provider" xsi:type="string">instar_items_grid.instar_items_grid_data_source</item> <item name="deps" xsi:type="string">instar_items_grid.instar_items_grid_data_source</item> </item> <item name="spinner" xsi:type="string">instar_items_columns</item> <item name="buttons" xsi:type="array"> <item name="add" xsi:type="array"> <item name="name" xsi:type="string">add</item> <item name="label" xsi:type="string" translate="true">Add Item</item> <item name="class" xsi:type="string">primary</item> <item name="url" xsi:type="string">*/item/new</item> </item> </item> </argument> <dataSource name="instar_items_grid_data_source"> <argument name="dataProvider" xsi:type="configurableObject"> <argument name="class" xsi:type="string">Magento\Framework\View\Element\UiComponent\DataProvider\DataProvider</argument> <argument name="name" xsi:type="string">instar_items_grid_data_source</argument> <argument name="primaryFieldName" xsi:type="string">id</argument> <argument name="requestFieldName" xsi:type="string">id</argument> <argument name="data" xsi:type="array"> <item name="config" xsi:type="array"> <item name="update_url" xsi:type="url" path="mui/index/render"/> <item name="component" xsi:type="string">Magento_Ui/js/grid/provider</item> </item> </argument> </argument> </dataSource> <listingToolbar name="listing_top"> <bookmark name="bookmarks"/> <columnsControls name="columns_controls"/> <exportButton name="export_button"/> <filterSearch name="fulltext"/> <filters name="listing_filters"/> <paging name="listing_paging"/> </listingToolbar> <columns name="instar_items_columns"> <argument name="data" xsi:type="array"> <item name="config" xsi:type="array"> <item name="childDefaults" xsi:type="array"> <item name="fieldAction" xsi:type="array"> <item name="provider" xsi:type="string">instar_items_grid.instar_items_grid.instar_items_columns.actions</item> <item name="target" xsi:type="string">applyAction</item> <item name="params" xsi:type="array"> <item name="0" xsi:type="string">view</item> <item name="1" xsi:type="string">${ $.$data.rowIndex }</item> </item> </item> </item> </item> </argument> <selectionsColumn name="ids"> <argument name="data" xsi:type="array"> <item name="config" xsi:type="array"> <item name="indexField" xsi:type="string">id</item> </item> </argument> </selectionsColumn> <column name="name"> <argument name="data" xsi:type="array"> <item name="config" xsi:type="array"> <item name="filter" xsi:type="string">text</item> <item name="label" xsi:type="string" translate="true">Name</item> </item> </argument> </column> <column name="description"> <argument name="data" xsi:type="array"> <item name="config" xsi:type="array"> <item name="filter" xsi:type="string">text</item> <item name="label" xsi:type="string" translate="true">Description</item> </item> </argument> </column> </columns> </listing> ``` And we end up with a mess: ![Magento UI Component](./Magento2_Module_Components_01.png) Ok, so if we had chosen to use this on our Admin route this would have worked. But there are a couple of things not available for use on the frontend side of Magento - [see this blog for details](https://belvg.com/blog/ui-grid-component-on-the-front-end-in-magento-2.html). So this can be done - but obviously is not the best way to get started.<file_sep>--- date: "2020-02-01" title: "Running Gitlab in Podman on CentOS8" categories: - LINUX - Docker --- ![<NAME>, Cambodia](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installation through Podman on CentOS8](#installation-through-podman-on-centos8) - [Setting Up Gitlab](#setting-up-gitlab) - [Adding your SSH Key](#adding-your-ssh-key) - [Testing that everything is set up correctly](#testing-that-everything-is-set-up-correctly) - [Uploading your Project](#uploading-your-project) <!-- /TOC --> ## Installation through Podman on CentOS8 Before setting everything else, configure a new enviroment variable $GITLAB_HOME pointing to the folder where the configuration, logs, and data files will reside. Ensure that the folder exists and appropriate permission have been granted: ```bash export GITLAB_HOME=/srv ``` The GitLab container uses host mounted volumes to store persistent data: | Local location | Container location | Usage | | -- | -- | -- | | `$GITLAB_HOME/gitlab/data` | `/var/opt/gitlab` | For storing application data | | `$GITLAB_HOME/gitlab/logs` | `/var/log/gitlab` | For storing logs | | `$GITLAB_HOME/gitlab/config` | `/etc/gitlab` | For storing the GitLab configuration files | ```bash mkdir -p /srv/gitlab/{data,logs,config} ``` You can fine tune these directories to meet your requirements. Once you’ve set up the GITLAB_HOME variable, you can run the image: ```bash podman run --detach \ --privileged \ --hostname centos8.fritz.box \ --publish 443:443 --publish 80:80 --publish 55466:22 \ --name gitlab \ --restart always \ --volume $GITLAB_HOME/gitlab/config:/etc/gitlab \ --volume $GITLAB_HOME/gitlab/logs:/var/log/gitlab \ --volume $GITLAB_HOME/gitlab/data:/var/opt/gitlab \ gitlab/gitlab-ce:latest ``` If you are on SELinux, then run this instead: ```bash podman run --detach \ --hostname centos8.fritz.box \ --publish 443:443 --publish 80:80 --publish 55466:22 \ --name gitlab \ --restart always \ --volume $GITLAB_HOME/gitlab/config:/etc/gitlab:Z \ --volume $GITLAB_HOME/gitlab/logs:/var/log/gitlab:Z \ --volume $GITLAB_HOME/gitlab/data:/var/opt/gitlab:Z \ gitlab/gitlab-ce:latest ``` This will ensure that the Docker process has enough permissions to create the config files in the mounted volumes. For my test environment `centos8.fritz.box` is a local domain that I use for this server - replace it with your own domain. The __SSH Port__ for the Gitlab container is - in the original documentation - exposed on port `22`. I replaced this by a random port `55466` since I am already using `22` to get into my server. The initialization process may take a long time. You can track this process with: ```bash podman logs -f gitlab ``` After starting a container you can visit `centos8.fritz.box`. It might take a while before the Docker container starts to respond to queries. The very first time you visit GitLab, you will be asked to set up the admin password. After you change it, you can login with username root and the password you set up. ## Setting Up Gitlab On first access you will be asked to add a password for the `root` user: ![Running Gitlab in Podman on CentOS8](./Gitlab_Podman_CentOS_01.png) ### Adding your SSH Key You can create and configure an __ED25519__ key with the following command: ```bash ssh-keygen -t ed25519 -C "<comment>" ``` The -C flag, with a quoted comment such as an email address, is an optional way to label your SSH keys. You'll see a response similar to: ```bash ssh-keygen -t ed25519 -C "<EMAIL>" Generating public/private ed25519 key pair. Enter file in which to save the key (C:\Users\INSTAR/.ssh/id_ed25519): Enter passphrase (empty for no passphrase): Enter same passphrase again: Your identification has been saved in C:\Users\INSTAR/.ssh/id_ed25519. Your public key has been saved in C:\Users\INSTAR/.ssh/id_ed25519.pub. The key fingerprint is: SHA256:shdfhgfhjghjghjdfgj467wudjh <EMAIL> The key's randomart image is: +--[ED25519 256]--+ |9-+o+o | |+-=.= = | |+_.._+ u d * | | .Y . | +----[SHA256]-----+ ``` Copy your public SSH key to a location that saves information in text format. For Windows you can use the following command: ```bash cat ~/.ssh/id_ed25519.pub | clip ``` Select your avatar in the upper right corner, and click Settings. Click SSH Keys. Paste the public key that you copied into the Key text box: ![Running Gitlab in Podman on CentOS8](./Gitlab_Podman_CentOS_02.png) > If you manually copied your public SSH key make sure you copied the entire key starting with ssh-ed25519 (or ssh-rsa) and ending with your email address. ### Testing that everything is set up correctly To test whether your SSH key was added correctly, run the following command in your terminal (replace centos8.fritz.box with your GitLab's instance domain and port `55466` with the port you have chosen when starting the Gitlab container earlier): ```bash ssh -p 55466 -T [email protected] The authenticity of host '[centos8.fritz.box]:55466 ([192.168.2.111]:55466)' can't be established. ECDSA key fingerprint is <KEY> Are you sure you want to continue connecting (yes/no/[fingerprint])? yes Warning: Permanently added '[centos8.fritz.box]:55466,[192.168.2.111]:55466' (ECDSA) to the list of known hosts. ``` ### Uploading your Project Start by creating a project (or cloning one from e.g. Github or Gitlab.com). I created a blank repo with the name `mui-gatsby-markdown-fr`: ![Running Gitlab in Podman on CentOS8](./Gitlab_Podman_CentOS_03.png) You can now clone the empty repository, enter the directory and add some project code. To keep it simple, I am going to use Github Desktop and the HTTP URL (if you want to use SSH remember to use the SSH port that you assigned to your Gitlab container): ![Running Gitlab in Podman on CentOS8](./Gitlab_Podman_CentOS_04.png) Copy your code into the created directory, commit und publish it with Github Desktop (you have to login with your Gitlab user `root` + your password). It works!<file_sep>--- date: "2020-06-01" title: "Elasticsearch in Docker" categories: - Databases - Docker --- ![Shanghai, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Setup](#setup) - [MAPPING](#mapping) - [INDEX](#index) - [UPDATE](#update) - [DELETE](#delete) - [BULK](#bulk) - [SEARCH](#search) - [Cheat Sheet](#cheat-sheet) <!-- /TOC --> ## Setup 1. Download the this [repository](https://github.com/mpolinowski/docker-elk): ```bash git clone https://github.com/mpolinowski/docker-elk ``` 2. Disable paid security features: ```bash # xpack.license.self_generated.type: trial xpack.license.self_generated.type: basic ``` By changing your XPack license from `trial` to `basic`: `elasticsearch/config/elasticsearch.yml`. 3. ELK Stack Version To use a different version of the core Elastic components, simply change the version number inside the `./.env` file. 4. User Login `./docker-compose.yml` file and take note of the `changeme` password. The default user name is `elastic`. ([Changing your Login](https://github.com/deviantony/docker-elk#initial-setup)) 1. Build your stack ```bash cd ./docker-elk docker-compose build ``` 6. Start Up ```bash docker-compose up -d ``` You can now access Elasticsearch on `http://localhost:9200`: ![ELK Stack in Docker](./docker-elk_01.png) and Kibana on `http://localhost:5601`: ![ELK Stack in Docker](./docker-elk_02.png) 7. Shutdown & CleanUp In order to entirely shutdown the stack and remove all persisted data, use the following Docker Compose command: ``` docker-compose down -v ``` ## MAPPING Continue by creating a mapping for your Elasticsearch index: ```json PUT /wiki_ssr_en_2020_07_06 { "settings": { "analysis": { "analyzer": { "custom_analyzer": { "type": "custom", "char_filter": [ "symbol", "html_strip" ], "tokenizer": "punctuation", "filter": [ "lowercase", "word_delimiter", "english_stop", "english_stemmer" ] } }, "filter": { "english_stop": { "type": "stop", "stopwords": "_english_ " }, "english_stemmer": { "type": "stemmer", "language": "english" } }, "tokenizer": { "punctuation": { "type": "pattern", "pattern": "[.,!?&=_:;']" } }, "char_filter": { "symbol": { "type": "mapping", "mappings": [ "& => and", ":) => happy", ":( => unhappy", "+ => plus" ] } } } }, "mappings": { "properties": { "title": { "type": "text", "analyzer": "custom_analyzer", "index": "true" }, "series": { "type": "text", "index": "false" }, "models": { "type": "text", "index": "false" }, "description": { "type": "text", "analyzer": "custom_analyzer", "index": "true" }, "link": { "type": "text", "index": "false" }, "title2": { "type": "text", "analyzer": "german", "index": "true" }, "chapters": { "type": "text", "analyzer": "english", "fields": { "raw": { "type": "keyword" } } }, "tags": { "type": "text", "analyzer": "english", "fields": { "raw": { "type": "keyword" } } }, "image": { "type": "text", "index": "false" }, "imagesquare": { "type": "text", "index": "false" }, "abstract": { "type": "text", "analyzer": "custom_analyzer", "index": "true" }, "sublink1": { "type": "text", "index": "false" }, "sublink2": { "type": "text", "index": "false" }, "sublink3": { "type": "text", "index": "false" }, "sublink4": { "type": "text", "index": "false" }, "subtitle1": { "type": "text", "index": "false" }, "subtitle2": { "type": "text", "index": "false" }, "subtitle3": { "type": "text", "index": "false" }, "subtitle4": { "type": "text", "index": "false" }, "badge": { "type": "text", "index": "false" } } } } ``` Test your Custom Analyzer: ```json POST /wiki_ssr_en_2020_07_06/_analyze //Test your custom analyzer - strip HTML + english stopwords + custom characters { "analyzer": "custom_analyzer", "text": "<p>This + This is an HTML posting going well, hopefully ? :)</p>. And this is a CGI command: http://admin:[email protected]/param.cgi?cmd=setsmtpattr&-ma_ssl=3&-ma_from=cam%40instar.email&[email protected]%3B&-ma_subject=Alarm%20Email&-ma_text=ALARM&-ma_server=mx.instar.email&-ma_port=587&-ma_logintype=1&-ma_username=cam%40instar.email&-ma_password=<PASSWORD>" } ``` ## INDEX Add Single Post ```json PUT /wiki_ssr_en_2020_07_06/_doc/documentid { "title": "How Does An IP Camera Work?", "series": ["HD", "VGA", "Indoor", "Outdoor"], "models": ["IN-2905", "IN-2908", "IN-3011", "IN-4010", "IN-4011", "IN-5905 HD", "IN-5907 HD", "IN-6001 HD", "IN-6012 HD", "IN-6014 HD", "IN-7011 HD"], "description": "How does an IP-Camera-Network work? LAN or Wifi connectivity. Remote access to your camera via DDNS (Dynamic Domain Name Service). Internal IP address vs internet address (DDNS). What is the difference between the internal IP (LAN) and the external IP address (WAN). Internal port / external port - How to open a door to the internet (Port Forwarding)", "link": "/Quick_Installation/How_Does_An_IP_Camera_Work", "title2": "Wie arbeitet eine IP Kamera?", "chapters": "Quick Installation", "tags": ["Introduction", "Quickinstallation"], "image": "/images/Search/QI_SearchThumb_HowDoesAnIPCameraWork.png", "abstract": "How do IP Cameras work in my Network" } ``` ### UPDATE Update only one key pair in document with ID yt-intro: ```json POST /wiki_ssr_en_2020_07_06/_update/yt-intro { "doc": { "image": "/images/Search/InstarWiki_SearchThumb_HowDoesAnIPCameraWork.jpg" } } ``` Update complete document: ```json PUT /wiki_ssr_en_2020_07_06/_doc/yt-intro { "title": "IP Cameras - An Introduction Video", "...": "..." } ``` ### DELETE Delete only document with ID yt-intro ```bash DELETE /wiki_ssr_en_2020_07_06/_doc/yt-intro ``` Delete complete Index ```bash DELETE /wiki_ssr_en_2020_07_06 ``` ### BULK Bulk actions INDEX, UPDATE and DELETE: ```json POST _bulk {"index": {"_index": "wiki_ssr_en_2020_07_06", "_id": "yt-intro"}} {"title": "IP Cameras - An Introduction Video", "description": "How does an IP-Camera work? An IP camera is a complex product, however it is not complicated to operate a INSTAR product. In the following we want to give you an intorduction to the basic functions of an IP camera. For more information about our cameras you can continue reading the FIRST STEPS, where we will dive a little deeper. What is an IP camera and how does it work? The IP in IP-camera stands for Internet Protocol. This implies that the camera is being connected with a network, from which it can be accessed by other devices. The access is not only possible within the same network, but even through the internet. Using our IP cameras works like this: You connect your IP camera via LAN cable or wireless with your router. When your computer or smartphone is connected to the same router, you just type the camera´s IP address into your browsers address bar to access the web user interface (1080P MODELS / 720P MODELS). You can also remotely access your camera through the internet. This is possible via DDNS ADDRESS or via a POINT-TO-POINT connection. When you access your camera, you will enter its Web User Interface 1080P MODELS / 720P MODELS. There you can see the current live video and adjust settings such as alarms, schedules or video configuration. Those settings will be saved on the camera. The camera is opersting 24/7 and will notify you if something moves within the camera´s field of view. How sensitive the camera´s MOTION DETECTION is, and what happens after the alarm was triggered, can be set individually for each camera. Manual or planned recordings following a SCHEDULE are possible as well. This is the basic concept of our cameras. For further information you can check out the FIRST STEPS or you browse our Wiki. Of course you can ask us your unanswered questions PERSONALLY as well.", "sublink1": "/Quick_Installation/How_Does_An_IP_Camera_Work/Video/", "subtitle1": "Video • ", "sublink2": "/Quick_Installation/How_Does_An_IP_Camera_Work/", "subtitle2": "How does an IP Camera Work • ", "sublink3": "/Quick_Installation/", "subtitle3": "Quick Installation", "sublink4": "", "subtitle4": "", "badge": "Video", "title2": "Wie arbeitet eine IP Kamera?", "chapter": "Quick Installation", "tags": ["Introduction", "Quickinstallation"], "image": "/images/Search/QI_SearchThumb_HowDoesAnIPCameraWork.png", "imagesquare": "/images/Search/TOC_Icons/Wiki_Tiles_Youtube_white.png", "short": "How do IP Cameras work in my Network", "abstract": "These videos contain an overview over the basic IP camera features like: LAN or WiFi connectivity and remote access via DDNS and P2P."} {"index": {"_index": "wiki_ssr_en_2020_07_06", "_id": "yt-powerline"}} {"title": "Powerline - Introduction Video", "description": "Powerline INSTALLATION Network over your Power Grid IN-LAN is an intelligent and secure technology that lets you set up a home network easily via your household power grid - without the need of complex and expensive dedicated cabling. IN-LAN communication now attains speeds you would expect from other LAN technologies. IN-LAN uses the household power grid to transfer data between computers equipped with suitable adapters and other network components. As a result, any power outlet can be used as a network access point. State-of-the-art technology ensures that the power and data networks do not interfere with one another. Powerline vs Power-over-Ethernet Powerline allows you to connect your camera to your local network over the power grid. The camera still needs to be powered by the included power supply. Power-over-Ethernet combines both the network as well as the power supply in a regular Ethernet cable. You only need a POE INJECTOR or POE SWITCH to add the voltage to the Ethernet cable. What is the difference between IN-LAN 500 & IN-LAN 500p? The P in IN-LAN 500p stands for pass-through. Unlike the base model the 500p will block your power outlet but pass through the existing one. Both models offer the same advantages otherwise: Use existing power lines to implement a network with IN-LAN. Very simple plug&play technology. Just plug into the wall socket and you're done. Ultra-fast data transfer up to 500Mbps. Expand your network with for e.g. IP cameras without laying network cables. A very detailed instruction will make the installation very easy. Installation Warnings Powerline communication will fail. if both adaptors (one at your router / the other for your camera) are not connected to the same phase on your powergrid. The Powerline network can suffer quality issues, if the cables, used in your power grid are old. Always directly plug in your IN-LAN adaptors into a wall socket. Don't use extensions cords.", "sublink1": "/Quick_Installation/Powerline/Video/", "subtitle1": "Video • ", "sublink2": "/Quick_Installation/Powerline/", "subtitle2": "Powerline • ", "sublink3": "/Quick_Installation/", "subtitle3": "Quick Installation", "sublink4": "", "subtitle4": "", "badge": "Video", "title2": "Powerline", "chapter": "Quick Installation", "tags": ["Introduction", "Quickinstallation", "Network", "D-LAN", "IN-LAN", "Homeplug AV", "Devolo"], "image": "/images/Search/QI_SearchThumb_Powerline.png", "imagesquare": "/images/Search/TOC_Icons/Wiki_Tiles_Youtube_white.png", "short": "Network Connection over your Power Grid", "abstract": "IN-LAN is an intelligent and secure technology that lets you set up a home network easily via your household power grid - without the need of complex and expensive dedicated cabling."} {"update": {"_id": "yt-intro", "_index": "wiki_ssr_en_2020_07_06"}} {"doc": {"image": "/images/Search/updatedimage.png"}} {"delete": {"_index": "wiki_ssr_en_2020_07_06", "_id": "yt-powerline"}} ``` ## SEARCH ### Cheat Sheet Return all documents ```bash GET /wiki_ssr_en_2020_07_06/_search ``` Return all documents from all indices starting with wiki_ssr_en ```bash GET /wiki_ssr_en*/_search ``` Return all documents from all indices ```bash GET /_search ``` Return all documents from index 1&2 ```bash GET /index1,index2/_search ``` This request returned 5 documents with the search query `f<PASSWORD>` and the article with the highest match has a score of >5. ```json GET /wiki_ssr_en_2020_07_06/_search?q=fritzbox { "took" : 16, "timed_out" : false, "_shards" : { "total" : 1, "successful" : 1, "skipped" : 0, "failed" : 0 }, "hits" : { "total" : { "value" : 5, "relation" : "eq" }, "max_score" : 5.1948776 } } ``` Only return documents with search query in it's title ```bash GET /wiki_ssr_en_2020_07_06/_search?q=title:9008 ``` Search query in request body ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "title": "fritzbox" } } } ``` Multiple terms with OR operator ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "title": "avm fritzbox" } } } ``` Multiple terms with AND operator ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match_phrase": { "title": "avm fritzbox" } } } ``` Search as you type - when you want to display suggestions to the user you can use `match_phrase_prefix` in this case the last word the user typed will be understood as a prefix instead of a whole search query. ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "bool": { "must": [ { "match_phrase_prefix": { "title": { "query": "Heater Install" } } } ] } } } ``` You can also use `prefix` to match terms that start with the search query ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "prefix": { "title": { "value": "adjust" } } } } ``` Multi match more than one field ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "multi_match": { "query": "fritzbox", "fields": ["title", "tags"] } } } ``` Highlight search query in search results: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "title": "Forward" } }, "highlight": { "fields": { "title": {} } } } ``` The search result will now contain an extra field below `_source` allowing you to style the em-tags in your search results: ```json "highlight" : { "title" : [ "Port <em>Forwarding</em> Digitalisierungsbox Video" ] } ``` Check if a specific field is present and display all documents that fit: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "exists": { "field": "published" } } } ``` Bool queries ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "bool": { "must": [{ "match": { "title": "fritzbox" } }], "must_not": [{ "match": { "title": "forwarding" } }], "should": [{ "match": { "tags": "ftp" } }] } } } ``` Range filter greater-to-equal or lesser-to-equal ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "bool": { "must": [{ "match": { "title": "fritzbox" } }], "filter": { "match": { "range": { "likes": { "gte": 10, "lte": 100 } } } } } } } ``` Limit the amount of returned documents (__Note__: the default value in Elasticsearch is `10`!): ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match_all": {} }, "size": 2 } ``` To paginate through your search results use `from` to set the start point: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match_all": {} }, "size": 2, "from": 2 } ``` Limit the source output to values you are interested in: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match_all": {} }, "size": 2, "_source": ["title*", "abstract", "*link*"] } ``` Or the other way around - use excludes: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match_all": {} }, "size": 2, "_source": { "excludes": "*link*" } } ``` Sort your search results (default is by relevancy) - this example fails, see reason below: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match_all": {} }, "size": 2, "sort": [ { "title": { "order": "desc" } } ] } ``` > Text fields are not optimised for operations that require per-document field data like aggregations and sorting, so these operations are disabled by default. Please use a keyword field instead. Alternatively, set `fielddata=true` on [title] in order to load field data by uninverting the inverted index. Note that this can use significant memory. Our mapping sets `chapter` and `tags` to be a keyword fields that are not analyzed - we can use them to sort our results: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match_all": {} }, "size": 5, "_source": [ "chapter", "tags" ], "sort": [ { "chapters.raw": { "order": "desc" } }, { "tags.raw": { "order": "desc" } } ] } ``` Use the AND operator to get exact results (matched to all keywords you provide - default behaviour is OR): ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "title": "avm fritzbox wireless protected setup" } } } ``` This query returns 10 results for documents that have either of those search keywords in their title. Let's change this query to the AND operator to get only the documents that has all of those keywords in it's title: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "title": { "query": "avm fritzbox wireless protected setup", "operator": "and" } } } } ``` This only returns 1 document with the exact title. __Note__: that Elasticsearch uses the same analyzer for your search query that were used for this field in your document. Since `title` uses our custom analyzer all english stop words will be scrubbed - this might lead to different search results. You can also make this query a little bit more fuzzy by defining a number of terms that have to match: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "title": { "query": "avm fritzbox wireless protected setup", "minimum_should_match": 3 } } } } ``` Or use a relative match: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "title": { "query": "avm fritzbox wireless protected setup", "minimum_should_match": "75%" } } } } ``` Weighting your search queries to prefer hits in certain fields: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "multi_match": { "query": "ASUS", "fields": [ "tag^10", "title^9", "abstract^7", "description^5" ] } } } ``` Or boost a match clause in your query: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "bool": { "should": [ { "match": { "title": { "query": "installation", "boost": 3 } } }, { "match": { "abstract": { "query": "installation" } } } ] } } } ``` If you want to provide a boost factor during index time, you can modify your mapping: ```json PUT /wiki_ssr_en_2020_07_06 { "settings": { "analysis": { "analyzer": { "custom_analyzer": { "type": "custom", "char_filter": [ "symbol", "html_strip" ], "tokenizer": "punctuation", "filter": [ "lowercase", "word_delimiter", "english_stop", "english_stemmer" ] } }, "filter": { "english_stop": { "type": "stop", "stopwords": "_english_ " }, "english_stemmer": { "type": "stemmer", "language": "english" } }, "tokenizer": { "punctuation": { "type": "pattern", "pattern": "[.,!?&=_:;']" } }, "char_filter": { "symbol": { "type": "mapping", "mappings": [ "& => and", ":) => happy", ":( => unhappy", "+ => plus" ] } } } }, "mappings": { "properties": { "title": { "type": "text", "analyzer": "custom_analyzer", "index": "true", "boost": 9 }, "series": { "type": "text", "index": "false" }, "models": { "type": "text", "index": "false" }, "description": { "type": "text", "analyzer": "custom_analyzer", "index": "true", "boost": 3 }, "link": { "type": "text", "index": "false" }, "title2": { "type": "text", "analyzer": "german", "index": "true" }, "chapters": { "type": "text", "analyzer": "english", "fields": { "raw": { "type": "keyword" } } }, "tags": { "type": "text", "analyzer": "english", "boost": 10, "fields": { "raw": { "type": "keyword" } } }, "image": { "type": "text", "index": "false" }, "imagesquare": { "type": "text", "index": "false" }, "abstract": { "type": "text", "analyzer": "custom_analyzer", "index": "true", "boost": 7 }, "sublink1": { "type": "text", "index": "false" }, "sublink2": { "type": "text", "index": "false" }, "sublink3": { "type": "text", "index": "false" }, "sublink4": { "type": "text", "index": "false" }, "subtitle1": { "type": "text", "index": "false" }, "subtitle2": { "type": "text", "index": "false" }, "subtitle3": { "type": "text", "index": "false" }, "subtitle4": { "type": "text", "index": "false" }, "badge": { "type": "text", "index": "false" } } } } ``` Term queries, unlike match queries are not analyzed - the following search will give you all the articles that are tagged with `indoor`: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "term": { "tags": "indoor" } }, "_source": ["tags"] } ``` If you type `Indoor` with a capital letter you won get a match since all our documents have been analyzed and run through a to-lower-case transformation. When you use a match query instead your search will be run through the same analyzers and you will get a match: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "match": { "tags": "Indoor" } }, "_source": ["tags"] } ``` Term queries can also be used to filter searches: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "bool": { "must": [ { "match": { "title": { "query": "heater installation" } } } ], "filter": { "term": { "tags": "instar" } } } } } ``` You can add multiple terms with the `terms` filter: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "bool": { "must": [ { "match": { "title": { "query": "heater installation" } } } ], "filter": { "terms": { "tags": [ "instar", "products" ] } } } } } ``` With multiple filter use the one that eliminates most documents first to improve the performance of the following filters: ```json GET /wiki_ssr_en_2020_07_06/_search { "query": { "bool": { "must": [ { "match": { "title": { "query": "camera unboxing" } } } ], "must_not": [ { "term": { "value": "draft" } } ], "filter": [ { "range": { "likes": { "gte": 100 } } }, { "range": { "published": { "gte": "2020-03-17" } } } ], "should": [ { "match": { "tags": "1080p" } } ], "minimum_should_match": 1 } } } ``` <file_sep>--- date: "2019-12-01" title: "Podman Homebridge to MQTT" categories: - LINUX - Smarthome - IoT - Docker --- ![Sapporo, Japan](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Building the Docker Image](#building-the-docker-image) - [Preparing Ubuntu](#preparing-ubuntu) - [Installing FFMPEG](#installing-ffmpeg) - [Installing NodeJS 12 LTS](#installing-nodejs-12-lts) - [Installing Homebridge](#installing-homebridge) - [Adding your Camera](#adding-your-camera) - [Adding MQTT Button](#adding-mqtt-button) <!-- /TOC --> ## Building the Docker Image ### Preparing Ubuntu Start by pulling the latest [Ubuntu Image from DockerHub](https://hub.docker.com/_/ubuntu): ```bash podman pull ubuntu:latest podman run --net=host --privileged -ti ubuntu:latest /bin/bash podman update && podman upgrade -y ``` ```bash cat /etc/os-release NAME="Ubuntu" VERSION="18.04.4 LTS (Bionic Beaver)" ID=ubuntu ID_LIKE=debian PRETTY_NAME="Ubuntu 18.04.4 LTS" VERSION_ID="18.04" ``` And recommit it with a descriptive name (in a new terminal from your host system): ```bash podman ps -a CONTAINER ID IMAGE COMMAND 48fa0c8f78b2 localhost/ubuntu:latest /bin/bash ``` ```bash podman commit --author "<NAME>" 48fa0c8f78b2 mqtt-homebridge podman rm 48fa0c8f78b2 ``` ```bash podman run \ --name homebridge \ --net=host \ --privileged \ --rm \ -ti -u root localhost/mqtt-homebridge /bin/bash ``` ### Installing FFMPEG Sources: FFmpeg with fdk-aac codecs: * https://github.com/mstorsjo/fdk-aac * https://github.com/FFmpeg/FFmpeg Get the __dependencies__: ```bash apt -y install \ autoconf \ automake \ build-essential \ cmake \ git \ git-core \ libass-dev \ libfreetype6-dev \ libsdl2-dev \ libtool \ libva-dev \ libvdpau-dev \ libvorbis-dev \ libx264-dev \ libxcb1-dev \ libxcb-shm0-dev \ libxcb-xfixes0-dev \ pkg-config \ texinfo \ wget \ zlib1g-dev ``` [Installing Assembler](https://trac.ffmpeg.org/wiki/CompilationGuide/Ubuntu): ```bash apt install nasm yasm ``` [Installing Video Encoder](https://trac.ffmpeg.org/wiki/CompilationGuide/Ubuntu): ```bash apt install -y libx264-dev libvpx-dev libfdk-aac-dev libmp3lame-dev ``` Download and build __fdk-aac__: ```bash mkdir -p ~/ffmpeg_sources ~/bin cd ~/ffmpeg_sources git clone https://github.com/mstorsjo/fdk-aac.git cd fdk-aac ./autogen.sh ./configure --prefix=/usr/local --enable-shared --enable-static make -j4 make install ldconfig cd .. ``` Download and build __FFmpeg__: ```bash cd ~/ffmpeg_sources && \ wget -O ffmpeg-snapshot.tar.bz2 https://ffmpeg.org/releases/ffmpeg-snapshot.tar.bz2 && \ tar xjvf ffmpeg-snapshot.tar.bz2 && \ cd ffmpeg && \ PATH="$HOME/bin:$PATH" PKG_CONFIG_PATH="$HOME/ffmpeg_build/lib/pkgconfig" ./configure \ --prefix="$HOME/ffmpeg_build" \ --pkg-config-flags="--static" \ --extra-cflags="-I$HOME/ffmpeg_build/include" \ --extra-ldflags="-L$HOME/ffmpeg_build/lib" \ --extra-libs="-lpthread -lm" \ --bindir="$HOME/bin" \ --enable-gpl \ --enable-libass \ --enable-libfdk-aac \ --enable-libfreetype \ --enable-libmp3lame \ --enable-libvpx \ --enable-libx264 \ --enable-nonfree \ --target-os=linux \ --enable-decoder=h264 \ --enable-network \ --enable-protocol=tcp \ --enable-demuxer=rtsp && \ PATH="$HOME/bin:$PATH" make && \ make install && \ hash -r cd .. ``` Now re-login or run the following command for your current shell session to recognize the new ffmpeg location: ```bash export PATH=$PATH:$HOME/bin source ~/.profile ``` or ```bash cp ~/bin/* /usr/local/bin ``` You can now test the command: ```bash ffmpeg ffmpeg version N-97038-g7239254 Copyright (c) 2000-2020 the FFmpeg developers built with gcc 7 (Ubuntu 7.5.0-3ubuntu1~18.04) configuration: --prefix=/root/ffmpeg_build --pkg-config-flags=--static --extra-cflags=-I/root/ffmpeg_build/include --extra-ldflags=-L/root/ffmpeg_build/lib --extra-libs='-lpthread -lm' --bindir=/root/bin --enable-gpl --enable-libass --enable-libfdk-aac --enable-libfreetype --enable-libmp3lame --enable-libvpx --enable-libx264 --enable-nonfree --target-os=linux --enable-decoder=h264 --enable-network --enable-protocol=tcp --enable-demuxer=rtsp ``` ### Installing NodeJS 12 LTS [Node.js v12.x for Ubuntu](https://github.com/nodesource/distributions/blob/master/README.md#debinstall) ```bash curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - apt install -y nodejs apt-mark hold nodejs ``` Test and update your installation: ```bash node -v v12.16.1 npm -v 6.13.4 npm update -g ``` ### Installing Homebridge Install [homebridge](https://www.npmjs.com/package/homebridge) and the [homebridge ui](https://www.npmjs.com/package/homebridge-config-ui-x): ```bash npm install -g --unsafe-perm homebridge homebridge-config-ui-x ``` Copy the sample configuration: ```bash cp /usr/lib/node_modules/homebridge/config-sample.json ~/.homebridge/config.json nano ~/.homebridge/config.json ``` Add this to your homebridge config.json file ```json "platforms": [ { "platform": "config", "name": "Config", "port": 8080, "sudo": false } ] ``` And start Homebridge with: ```bash homebridge ``` ![Homebridge INSTAR MQTT](./Homebridge_01.png) You can install the following plugins through the Hombridge UI: * homebridge-camera-ffmpeg * homebridge-mqttthing ![Homebridge INSTAR MQTT](./Homebridge_02.png) ![Homebridge INSTAR MQTT](./Homebridge_03.png) ![Homebridge INSTAR MQTT](./Homebridge_04.png) ### Adding your Camera You can now switch the the __Config__ tab in the Homebridge UI and will see the sample configuration that was added by the camera plugin: ```json { "name": "Camera ffmpeg", "cameras": [ { "name": "Default Camera", "videoConfig": { "source": "-re -i rtsp://myfancy_rtsp_stream", "stillImageSource": "-i http://faster_still_image_grab_url/this_is_optional.jpg", "maxStreams": 2, "maxWidth": 1920, "maxHeight": 1080, "maxFPS": 10, "maxBitrate": 300, "vcodec": "libx264", "packetSize": 1316, "audio": false, "debug": false } } ], "platform": "Camera-ffmpeg" } ``` You can [edit this configuration](https://github.com/KhaosT/homebridge-camera-ffmpeg#readme) to add your personal IP camera: ```json { "platform": "Camera-ffmpeg", "videoProcessor": "/root/bin/ffmpeg", "cameras": [ { "name": "Name of your Camera", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567890", "firmwareRevision": "1.0", "videoConfig": { "source": "-rtsp_transport tcp -vcodec -re -i rtsp://admin:[email protected]:554/12", "stillImageSource": "-re -i http://192.168.2.117/tmpfs/auto.jpg?usr=admin&pwd=<PASSWORD>", "vcodec": "libx264", "maxStreams": 3, "maxWidth": 1280, "maxHeight": 720, "videoFilter": "scale=1280:720", "maxFPS": 10, "maxBitrate": 4096, "packetSize": 1316, "audio": false, "additionalCommandline": "", "debug": true } } ] } ``` ![Homebridge INSTAR MQTT](./Homebridge_05.png) ### Adding MQTT Button to the Hombridge UI ```json "accessories": [ { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 1", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567890", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Alarm Area 1", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area1/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area1/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 } ], ``` ![Homebridge INSTAR MQTT](./Homebridge_06.png) Recommit the image and restart the container. ```bash podman commit --author "<NAME>" <container ID> mqtt-homebridge ``` ```bash podman run \ --name homebridge \ --net=host \ --privileged \ --rm \ -ti -u root localhost/mqtt-homebridge /bin/bash ``` This time start Homebridge with the `-I` flag to get access to the __Accessories__ tab: ```bash homebridge -I ``` ![Homebridge INSTAR MQTT](./Homebridge_07.png) ![Homebridge INSTAR MQTT](./Homebridge_08.png) ![Homebridge INSTAR MQTT](./Homebridge_09.png) ```json "accessories": [ { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 1", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567889", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Alarm Area 1", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area1/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area1/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 2", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567891", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Alarm Area 2", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area2/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area2/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 3", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567892", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Alarm Area 3", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area3/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area3/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 4", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567893", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Alarm Area 4", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area4/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area4/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "PIR Sensor", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567894", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "PIR Sensor", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/actions/pir/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/actions/pir/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Link Areas", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567895", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Link Areas", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/actions/linkareas", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/actions/linkareas", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "on", "offValue": "off", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Email", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567896", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Alarm Email", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/actions/email", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/actions/email", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "on", "offValue": "off", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Push", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567897", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Alarm Push", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/push/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/push/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "on", "offValue": "off", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Audio", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567898", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Audio", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/multimedia/audio/enable/high", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/multimedia/audio/enable/high", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Preset Position", "manufacturer": "INSTAR", "model": "IN-9020 FullHD", "serialNumber": "1234567899", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "<PASSWORD>", "caption": "Preset Position", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/features/ptz/preset", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/features/ptz/preset", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 } ], ``` * [Wiki MQTT](https://wiki.instar.com/Advanced_User/Homebridge_INSTAR_MQTT_to_Homekit/) * [Wiki HTTP](https://wiki.instar.com/Frequently_Asked_Question/Homebridge_to_Homekit_without_MQTT/) * [Wiki Node-RED](https://wiki.instar.com/Frequently_Asked_Question/Homebridge_in_Node-RED/) ```bash podman run \ --net=host \ --name=instar-mqtt-homebridge \ --privileged \ --rm \ -e PUID=0 -e PGID=0 \ -e TZ=Europe/Berlin \ -e HOMEBRIDGE_CONFIG_UI=1 \ -e HOMEBRIDGE_CONFIG_UI_PORT=8080 \ -e HOMEBRIDGE_INSECURE=1 \ -v /opt/homebridge:/homebridge \ -ti -u root localhost/instar-mqtt-homebridge /bin/bash ``` ```bash podman run \ --net=host \ --name=instar-mqtt-homebridge \ --privileged \ --rm \ -e PUID=0 -e PGID=0 \ -e TZ=Europe/Berlin \ -e HOMEBRIDGE_CONFIG_UI=1 \ -e HOMEBRIDGE_CONFIG_UI_PORT=8080 \ -e HOMEBRIDGE_INSECURE=1 \ -v /opt/homebridge:/homebridge \ -ti -u root docker.io/oznu/homebridge /bin/ash ``` ```json { "bridge": { "name": "Homebridge", "username": "AA:CC:AA:AA:CC:AA", "manufacturer": "homebridge.io", "model": "homebridge", "port": 51826, "pin": "866-88-668" }, "description": "This is an example configuration file with one fake accessory and one fake platform. You can use this as a template for creating your own configuration file containing devices you actually own.", "ports": { "start": 52100, "end": 52150, "comment": "This section is used to control the range of ports that separate accessory (like camera or television) should be bind to." }, "accessories": [ { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 1", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567889", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": <PASSWORD>{|}", "caption": "Alarm Area 1", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area1/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area1/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 2", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567891", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": <PASSWORD>{|}", "caption": "Alarm Area 2", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area2/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area2/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 3", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567892", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "!#()*-./<?@[]^_{|}", "caption": "Alarm Area 3", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area3/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area3/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Area 4", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567893", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "!#()*-./<?@[]^_{|}", "caption": "Alarm Area 4", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/area4/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/area4/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "PIR Sensor", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567894", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "!#()*-./<?@[]^_{|}", "caption": "PIR Sensor", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/actions/pir/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/actions/pir/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Link Areas", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567895", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": "!#()*-./<?@[]^_{|}", "caption": "Link Areas", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/actions/linkareas", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/actions/linkareas", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "on", "offValue": "off", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Email", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567896", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": <PASSWORD>{|}", "caption": "Alarm Email", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/actions/email", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/actions/email", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "on", "offValue": "off", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Alarm Push", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567897", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": <PASSWORD>{|}", "caption": "Alarm Push", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/alarm/push/enable", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/alarm/push/enable", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "on", "offValue": "off", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Audio", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567898", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": <PASSWORD>{|}", "caption": "Audio", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/multimedia/audio/enable/high", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/multimedia/audio/enable/high", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "mqttthing", "type": "switch", "name": "Preset Position", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234<PASSWORD>", "firmwareRevision": "0.1", "url": "http://192.168.2.117:1883", "username": "admin", "password": <PASSWORD>{|}", "caption": "Preset Position", "mqttOptions": { "keepalive": 30 }, "mqttPubOptions": { "retain": true }, "logMqtt": true, "topics": { "getOn": { "topic": "instar/local/status/features/ptz/preset", "apply": "return JSON.parse(message).val;" }, "setOn": { "topic": "instar/local/features/ptz/preset", "apply": "return JSON.stringify({val: (message)})" } }, "onValue": "1", "offValue": "0", "confirmationPeriodms": 1000, "retryLimit": 3 }, { "accessory": "HTTP-SWITCH", "type": "switch", "name": "Areas On", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567900", "firmwareRevision": "0.1", "switchType": "stateless", "caption": "All Areas On", "onUrl": [ "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=1&-enable=1", "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=2&-enable=1", "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=3&-enable=1", "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=4&-enable=1" ] }, { "accessory": "HTTP-SWITCH", "name": "Areas Off", "manufacturer": "INSTAR", "model": "IN-8015 FullHD", "serialNumber": "1234567901", "firmwareRevision": "0.1", "switchType": "stateless", "caption": "All Areas Off", "onUrl": [ "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=1&-enable=0", "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=2&-enable=0", "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=3&-enable=0", "http://admin:[email protected]/param.cgi?cmd=setmdattr&-name=4&-enable=0" ] } ], "platforms": [ { "name": "Config", "port": 8080, "auth": "form", "theme": "dark-mode", "tempUnits": "c", "lang": "auto", "sudo": false, "accessoryControl": { "debug": true }, "platform": "config" }, { "cameras": [ { "name": "IN-8015 Full HD", "manufacturer": "INSTAR", "model": "IN-8015 Full HD", "serialNumber": "000-0000-001", "firmwareRevision": "v666", "videoConfig": { "source": "-re -i rtsp://admin:[email protected]:554/12", "stillImageSource": "-re -i http://192.168.2.117:80/tmpfs/auto.jpg?usr=admin&pwd=<PASSWORD>", "maxStreams": 3, "maxWidth": 640, "maxHeight": 352, "maxFPS": 15, "maxBitrate": 512, "preserveRatio": "W", "vcodec": "libx264", "additionalCommandline": "-rtsp_transport tcp" } } ], "platform": "Camera-ffmpeg" } ] } ```<file_sep>--- date: "2018-11-12" title: "Using SQLite with Node-RED on Windows" categories: - Windows - Node-RED - IoT - Databases - SQL --- ![Pokhara, Nepal](./photo-15491736416_6abd8de751_o.png) <!-- TOC --> - [Installing SQLite](#installing-sqlite) - [Creating your first Database](#creating-your-first-database) - [Installing SQLite in Node-RED](#installing-sqlite-in-node-red) - [INSERT inject node](#insert-inject-node) - [SELECT inject node with](#select-inject-node-with) - [DELETE inject node as follows](#delete-inject-node-as-follows) - [DROP inject node](#drop-inject-node) - [Visualizing Data from SQLite in Node-RED Dashboard](#visualizing-data-from-sqlite-in-node-red-dashboard) - [Database Administration](#database-administration) - [Flow Export](#flow-export) <!-- /TOC --> ## Installing SQLite First scroll down to the newest pre-compiled version for your Operating System [here](https://www.sqlite.org/download.html) and choose the bundle of command-line tools for managing SQLite database files, including the command-line shell program, the sqldiff.exe program, and the sqlite3_analyzer.exe program. Unzip the folder, rename it something short - e.g. sqlite3 - and copy it to your C:\ partition. Now add the installation path to your system variables and run the __sqlite3__ command to see that it is working: ![SQLite](./nodered_01.png) To exit the SQLite Shell again, type `.quit` ### Creating your first Database Navigate to the directory you want to store your data and type `sqlite3 mydb.db` and `.databases`: ![SQLite](./nodered_02.png) To add a table, type in the following `create table cameras (id INT primary key);`. This will create a table named __cameras__ with a single (primary) column with the name of __id__ that expects an integer value. ## Installing SQLite in Node-RED Open the __Manage Palette__ menu inside Node-RED, switch to the __Install__ tab and search for __node-red-node-sqlite__. More information can be found [here](https://flows.nodered.org/node/node-red-node-sqlite). A new node called sqlite should appear on the left under the storage tab. In this flow, you’re going to send 5 SQL queries (CREATE, INSERT, SELECT, DELETE and DROP) to your SQLite database: ![SQLite](./nodered_03.png) Double-click the __SQLite Node__ press the Add new sqlitedb button and type in the absolute path to the SQLite Database we created earlier ![SQLite](./nodered_04.png) Configure your CREATE __Inject Node__ as follows: ```sql CREATE TABLE dhtreadings(id INTEGER PRIMARY KEY AUTOINCREMENT, temperature NUMERIC, humidity NUMERIC, currentdate DATE, currenttime TIME, device TEXT) ``` ![SQLite](./nodered_05.png) Repeat this step to create the following : ### INSERT inject node ```sql INSERT INTO dhtreadings(temperature, humidity, currentdate, currenttime, device) values(22.4, 48, date('now'), time('now'), "manual") ``` ### SELECT inject node with ```sql SELECT * FROM dhtreadings ``` ### DELETE inject node as follows ```sql DELETE from dhtreadings ``` ### DROP inject node ```sql DROP TABLE dhtreadings ``` To save your application, you need to click the __Deploy__ button on the top right corner (`You might have to restart Node-RED`) and your application is saved and ready. Open the debug window and press the first inject node to trigger the CREATE SQL query: ![SQLite](./nodered_06.png) ## Visualizing Data from SQLite in Node-RED Dashboard We are able to visualize the data by using the [dashboard nodes](https://github.com/mpolinowski/nodered-dashboard-getting-started). Injecting `SELECT * FROM dhtreadings` into our SQLite node gives us an array of records in the response, we can simply use the ui template node to parse the information we want. ![SQLite](./nodered_07.png) For example, we would like to have a table showing the first 2 records, just simply connect the template UI node to the SQLite node: ![SQLite](./nodered_08.png) In the template node, enter the following code: ```html <table style="width:100%"> <tr> <th>Time</th> <th>Temp</th> <th>Hum</th> </tr> <tr ng-repeat="x in msg.payload | limitTo:2"> <td>{{msg.payload[$index].currenttime}}</td> <td>{{msg.payload[$index].temperature}}</td> <td>{{msg.payload[$index].humidity}}</td> </tr> </table> ``` ![SQLite](./nodered_09.png) This code simply a table in HTML, and places the index, currenttime, temperature and humidity fields from msg.payload into the appropriate cells of the table using __ng-repeat__ & __limitTo__. Once you deploy the nodes, the UI template node will update the table whenever SELECT * is sent to the SQLite node, and an array of records are returned from the SQLite node to the UI template node. The resulting table would look like this when you hit the URL http://192.168.1.112:1880/ui/: ![SQLite](./nodered_10.png) ## Database Administration For the basic database administrative activities you can use web (php) based tools like __phpLiteAdmin__ or go with an installable tool like [SQLiteBrowser](https://github.com/sqlitebrowser/sqlitebrowser): ![SQLite](./nodered_11.png) ![SQLite](./nodered_12.png) ## Flow Export ```json [{"id":"af8b3131.1b188","type":"tab","label":"sqlite","disabled":false,"info":""},{"id":"d7f0c96f.0d0588","type":"inject","z":"af8b3131.1b188","name":"CREATE","topic":"CREATE TABLE dhtreadings(id INTEGER PRIMARY KEY AUTOINCREMENT, temperature NUMERIC, humidity NUMERIC, currentdate DATE, currenttime TIME, device TEXT)","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":90,"y":40,"wires":[["7f9d303d.adb45"]]},{"id":"522e8f98.ac3de","type":"inject","z":"af8b3131.1b188","name":"INSERT","topic":"INSERT INTO dhtreadings(temperature, humidity, currentdate, currenttime, device) values(22.4, 48, date('now'), time('now'), \"manual\")","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":90,"y":100,"wires":[["7f9d303d.adb45"]]},{"id":"40fe756f.d7cd5c","type":"inject","z":"af8b3131.1b188","name":"SELECT","topic":"SELECT * FROM dhtreadings","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":90,"y":160,"wires":[["7f9d303d.adb45"]]},{"id":"3b1c34a.1639acc","type":"inject","z":"af8b3131.1b188","name":"DELETE","topic":"DELETE from dhtreadings","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":90,"y":220,"wires":[["7f9d303d.adb45"]]},{"id":"57370135.7daa1","type":"inject","z":"af8b3131.1b188","name":"DROP TABLE","topic":"DROP TABLE dhtreadings","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":100,"y":280,"wires":[["7f9d303d.adb45"]]},{"id":"7f9d303d.adb45","type":"sqlite","z":"af8b3131.1b188","mydb":"4e71ffcc.32ba8","sqlquery":"msg.topic","sql":"","name":"SQLite","x":270,"y":160,"wires":[["593ed62b.495198"]]},{"id":"593ed62b.495198","type":"debug","z":"af8b3131.1b188","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":430,"y":160,"wires":[]},{"id":"3735b0c8.22ff","type":"sqlite","z":"af8b3131.1b188","mydb":"4e71ffcc.32ba8","sqlquery":"msg.topic","sql":"","name":"SQLite","x":370,"y":300,"wires":[["2f6afcf0.58e874"]]},{"id":"c8e55090.582db","type":"inject","z":"af8b3131.1b188","name":"View Records","topic":"SELECT * FROM dhtreadings","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":300,"y":240,"wires":[["3735b0c8.22ff"]]},{"id":"2f6afcf0.58e874","type":"ui_template","z":"af8b3131.1b188","group":"5be80bfc.629394","name":"UI Table","order":2,"width":"6","height":"3","format":"<table style=\"width:100%\">\n <tr>\n <th>Time</th> \n <th>Temp</th> \n <th>Hum</th>\n </tr>\n <tr ng-repeat=\"x in msg.payload | limitTo:2\">\n <td>{{msg.payload[$index].currenttime}}</td>\n <td>{{msg.payload[$index].temperature}}</td> \n <td>{{msg.payload[$index].humidity}}</td>\n </tr>\n</table>","storeOutMessages":true,"fwdInMessages":true,"templateScope":"local","x":430,"y":360,"wires":[[]]},{"id":"4e71ffcc.32ba8","type":"sqlitedb","z":"","db":"C:\\Users\\INSTAR\\.node-red\\db\\mydb.db"},{"id":"5be80bfc.629394","type":"ui_group","z":"","name":"Data","tab":"da0180c5.64fa9","order":3,"disp":true,"width":"6","collapse":false},{"id":"da0180c5.64fa9","type":"ui_tab","z":"","name":"IP Camera","icon":"photo_camera"}] ```<file_sep>--- date: "2018-01-17" title: "Server Rendering with React and React Router" categories: - Javascript - React --- import GifContainer from "../../src/components/ImageContainer"; ![<NAME>](./photo-11627973773_7295195845_o.png) > Server side rendering a React app can provide a few different benefits including performance and SEO. The problem is with those benefits comes a cost of additional complexity to your application. In this project, we'll start from scratch and slowly build a server side rendered React 16.3 with React Router 4. <!-- TOC --> - [01 Setup Webpack 3](#01-setup-webpack-3) - [02 Hello World](#02-hello-world) - [03 Rehydration](#03-rehydration) - [04 Data Fetch Api](#04-data-fetch-api) - [05 React Router](#05-react-router) - [Server Side](#server-side) - [Client Side](#client-side) <!-- /TOC --> ## 01 Setup Webpack 3 When accessing our app, we first want to be presented with a static HTML scaffold of our app, that loads immediately. This static layout will be re-hydrated by the actual react app, in form of an embedded _bundle.js_ file. This file can become quite big and might take a while to download - the static HTML is going to help us bridging the load time, before react kicks into action. This way there are two render cases that we have to handle inside webpack - on our server and inside the clients browser. To accomplish this we need to add two configs - browserConfig and serverConfig to our ./webpack.config.js file. The following dependencies need to be _npm installed_ and required in our Webpack Config: ```js var path = require('path') var webpack = require('webpack') var nodeExternals = require('webpack-node-externals') ``` Client Side Rendering Case: ```js var browserConfig = { entry: './src/browser/index.js', output: { path: path.resolve(__dirname, 'public'), filename: 'bundle.js', publicPath: '/' }, module: { rules: [{ test: /\.(js)$/, use: 'babel-loader' }, ] }, plugins: [ new webpack.DefinePlugin({ __isBrowser__: "true" }) ] } ``` Webpack is going to take a `./src/browser/index.js` file, and - with the help of Babel - bundle everything into a big / browser-readable javascript file in `./public`. Make sure to install the following Babel components for this to work: ```bash npm install babel-core babel-loader babel-plugin-transform-object-rest-spread babel-preset-env babel-preset-react ``` And configure babel to use the _react_ and _env_ preset inside your ./package.json file: ```json "babel": { "presets": [ "env", "react" ], "plugins": [ "transform-object-rest-spread" ] }, ``` Server Side Rendering Case: ```js var serverConfig = { entry: './src/server/index.js', target: 'node', externals: [nodeExternals()], output: { path: __dirname, filename: 'server.js', publicPath: '/' }, module: { rules: [{ test: /\.(js)$/, use: 'babel-loader' }] }, plugins: [ new webpack.DefinePlugin({ __isBrowser__: "false" }) ] } ``` When __isBrowser__ is set to false, Webpack will use the serverConfig, grabs the `./src/server/index.js`, targets _node_ and babel-transpiles / bundles everything up inside _server.js_ in the root directory. The _externals_ line only states, that our _node-modules_ are not bundled up as well. ## 02 Hello World Now we need to create the files that we defined in our Webpack Config. Lets create a `./src` folder and add `./src/browser`, `./src/server` and `./src/shared`. The last one will contain code, that is shared between the server and client side rendered file. First, lets: ```bash npm install react react-dom ``` Then get started with an Hello World, add an `./src/shared/app.js` file with our primary react component: ```jsx import React, {Component} from 'react' class App extends Component { render() { return ( <div> Hello World </div> ) } } ``` Inside `./src/server/index.js` we want to create an [Express.js](https://expressjs.com) webserver for our app. first `npm install express cors`, import them and create an Express App that is listening on port 3000, serving the content of our `./public`. ```js import express from 'express' import cors from 'cors' const app = express() const port = 3000 app.use(cors()) app.use(express.static('public')) app.listen(port, () => { console.log('Express Server listening on port: '+ port) }) ``` To make Express serve our Hello World component, we can use the `render-to-string` method from `react-dom`: ```js ... import React from 'react' import { renderToString } from 'react-dom/server' import App from '../shared/App' ... app.get('*', (req, res, next) => { const markup = renderToString( <App /> ) //Create HTML Markup from App React Component res.send(` <!DOCTYPE html> <html> <head> <title>Server Rendering with React and React Router</title> <script src='/bundle.js' defer></script> </head> <body> <div id="app">${markup}</div> </body> </html> `) //Wrap Markup inside HTML site and send to client and link to generated bundle.js }) ``` To test our app, lets install [Nodemon](https://nodemon.io) (it will catch changes we make to the code and automatically restart the server, when necessary) and add a `npm start script` inside the package.json that allow us to run both npm scripts - the `develop script` that starts Webpack and the `watch script` - in parallel with our start script: ```json "scripts": { "start": "npm run develop | npm run watch", "develop": "webpack --watch", "watch": "nodemon server.js", "test": "echo \"Error: no test specified\" && exit 1" }, ``` A proposed alternative to piping both commands together, is using [npm-run-all](https://www.npmjs.com/package/npm-run-all) (another would have been [concurrently](https://www.npmjs.com/package/concurrently), which has to be installed as a dev dependency: ```json "scripts": { "start": "npm-run-all --parallel develop watch", "develop": "webpack -w", "watch": "nodemon server.js", "test": "echo \"Error: no test specified\" && exit 1" } ``` We can now `npm start` both components of the app server and see our Hello World by accessing _http://localhost:3000_ with a web browser. ## 03 Rehydration We now have the server-side rendered part of our app. The next step is to build the client-side part - and rehydrate the static HTML with React, once the Javascript bundle is loaded. To visualize the process, let's add some data to our App React Component: _./src/shared/app.js_ ```jsx class App extends Component { render(data) { return ( <div> Hello {this.props.data} </div> ) } } ``` Now we can call our app with data in `./src/server/index.js`: ```js app.get('*', (req, res, next) => { const markup = renderToString( <App data='World' /> ) ``` This is going to display our old __Hello World__. By calling the component with a data string in the client side version of our app, we will be able to see the Rehydration in process: `./src/browser/index.js` ```js import React from 'react' import { hydrate } from 'react-dom' import App from '../shared/App' hydrate ( <App data='You' />, document.getElementById('app') ) ``` This process will search for the Element ID 'app' inside our server-side rendered HTML and replace (rehydrate) the Element with corresponding, fully functional React Component. Since we are passing in different Data (World / You), we will be able to see, when the rehydration process succeeded - which on localhost is of course very fast: <GifContainer gifUrl="/assets/gif/ssrrr_01.gif" alt="React Rehydration" /> But taking a look at your Browser Console tells you, that React is not happy about the mismatch - it expects the rendered content to be identical between the server and the client! ![React Rehydration](./ssrrr_02.png) To make sure that the data is always the same on both sides, we can only hardcode (you need to `npm install serialize-javascript`) it to the server side and pass it to the window object from there: `./src/server/index.js` ```jsx ... import serialize from 'serialize-javascript' ... app.get('*', (req, res, next) => { const world = 'World' const markup = renderToString( <App data={world} /> ) res.send(` <!DOCTYPE html> <html> <head> <title>Server Rendering with React and React Router</title> <script src='/bundle.js' defer></script> <script>window.__INITIAL_DATA__ = ${serialize(world)}</script> </head> <body> <div id="app">${markup}</div> </body> </html> `) }) ``` _./src/browser/index.js_ ```jsx hydrate ( <App data={window.__INITIAL_DATA__} />, document.getElementById('app') ) ``` <GifContainer gifUrl="/assets/gif/ssrrr_03.gif" alt="React Rehydration" /> Now the server rendered version will have the hardcoded data, and the client side can pick it up from __\_\_INITIAL\_DATA\_\___ ## 04 Data Fetch Api To make our app a little bit more useful, lets - instead of using hardcoded strings - fetch some Data from the [Github API](https://developer.github.com/v3/). We create a `./src/shared/api.js` file and add the necessary fetch api AJAX request (you need to `npm install isomorphic-fetch`) to retrieve a JSON response with the most beloved repositories from different programming languages: ```js import fetch from 'isomorphic-fetch' export function fetchPopularRepos(language = 'all') { const encodedURI = encodeURI(`https://api.github.com/search/repositories?q=stars:>1+language:${language}&sort=stars&order=desc&type=Repositories`) return fetch(encodedURI) .then((data) => data.json()) .then((repos) => repos.items) .catch((error) => { console.warn(error) return null }); } ``` To add this data to our server rendered HTML, we first need to import it: `./src/server/index.js` ```js import { fetchPopularRepos } from '../shared/api' ``` And then pass the data response to the render function: ```jsx app.get('*', (req, res, next) => { fetchPopularRepos() .then((data) => { const markup = renderToString( <App data={data} /> ) res.send(` <!DOCTYPE html> <html> <head> <title>Server Rendering with React and React Router</title> <script src='/bundle.js' defer></script> <script>window.__INITIAL_DATA__ = ${serialize(data)}</script> </head> <body> <div id="app">${markup}</div> </body> </html> `) }) }) ``` To make the API response a little bit more readable, lets create a Grid component that will be imported to our \<App/\> component. _./shared/grid.js_ ```jsx import React, { Component } from 'react' class Grid extends Component { render() { const repos = this.props.data return ( <ul style={{ display: 'flex', flexWrap: 'wrap' }}> {repos.map(({ name, owner, stargazers_count, html_url }) => ( <li key={name} style={{ margin: 30 }}> <ul> <li><a href={html_url}>{name}</a></li> <li>@{owner.login}</li> <li>{stargazers_count} stars</li> </ul> </li> ))} </ul> ) } } export default Grid ``` Since we already passed down the data from our API call to the \<App/\> component, we can now pass it further down to our new \<Grid/\> component: _./src/shared/app.js_ ```jsx import Grid from './grid' class App extends Component { render(data) { return ( <div> <Grid data={this.props.data} /> </div> ) } } ``` The Github API call is now populating the Grid inside our App component. Opening _http://localhost:3000_ inside your browser will show you a list of the most popular github repositories for all programming languages: ![Popular Repositories on Github](./ssrrr_04.png) ## 05 React Router ### Server Side Now we want to add some routes to our app, using [React Router](https://github.com/ReactTraining/react-router). We will have to use a shared route config for server and client side navigation. We will add this central route configuration file in `./src/shared/routes.js` : ```js import Home from './home' import Grid from './grid' const routes = [ { path: '/', exact: 'true', component: Home, }, { path: '/popular/:id', component: Grid, } ] ``` The App will have 2 available routes - the root page and the page that will render the Github API response. Lets quickly add a scaffolding for the \<Home \/\> component, that will later let you choose the language, you want to see popular repositories for: `./src/shared/home.js` ```jsx import React from 'react' export default function Home () { return ( <div> Select a language </div> ) } ``` To enable our server to pre-render each page, we have to add every data request that a route needs into the central route object `./src/shared/routes.js` : ```js import { fetchPopularRepos } from './api' ... { path: '/popular/:id', component: Grid, fetchInitialData: (path = '') => fetchPopularRepos(path.split('/').pop()) } ``` The user will be able to choose the programming language he is interested in inside the \<Home \/\> component and will navigate to the specific route `/popular/${language}`. The initialData for the server rendered page will be take from the Github API call with the chosen language from the URL path. Now we need to make the `./src/server/index.js` aware what route the user chose, so that correct content will be pre-rendered. We can do this with the __matchPath__ method, provided by react-router-dom: ```jsx import { matchPath } from 'react-router-dom' import routes from '../shared/routes' ... app.get('*', (req, res, next) => { //when app receives a GET request... const activeRoute = routes.find((route) => matchPath(req.url, route)) || {} //find out what route matches the requested URL... fetchPopularRepos() .then((data) => { const markup = renderToString( <App data={data} /> ) ... ``` To find the active route, compare the requested URL with the route Object (imported from `./src/shared/routes/`), if no match is found, set the active route to an empty object. We can now handle the fetching of Initial data with a promise and remove the _fetchPopularRepos_ API call (as it is now handled inside the central route object): ```jsx // import { fetchPopularRepos } from '../shared/api' //REMOVE ... app.get('*', (req, res, next) => { //when app receives a GET request... const activeRoute = routes.find((route) => matchPath(req.url, route)) || {} //find out what route matches the requested URL... const promise = activeRoute.fetchInitialData //find out if the route needs to fetch data ? activeRoute.fetchInitialData(req.path) //if so, fetch the data for the active route : Promise.resolve() //if no data is needed (home route), just resolve promise.then((data) => { //when everything is fetched, render the app markup with the with {data} const markup = renderToString( <App data={data} /> ) res.send(` <html> ... </html> `) //and send the static HTML page to client }).catch(next) //if there are any errors, move on to the next request }) ... ``` You can test the routing by opening a valid URL that follows our requirement `path:'/popular/:id'`, e.g. _http://localhost:3000/popular/javascript_ ![Popular Javascript Repositories on Github](./ssrrr_05.png) The Router matches the URL, fetches the corresponding data from the Github API and provides it to the Grid component that is rendered and send to the client. ### Client Side Now we have to add the client side routing. First, we wrap the \<App \/\> component `./src/browser/index.js` inside a \<BrowserRouter \/\>: ```jsx import React from 'react' import { hydrate } from 'react-dom' import { BrowserRouter } from 'react-router-dom' import App from '../shared/App' hydrate( <BrowserRouter> <App data={window.__INITIAL_DATA__} /> </BrowserRouter>, document.getElementById('app') ) ``` And use the \<StaticRouter \/\> for the server side render: ```jsx import { matchPath, StaticRouter } from 'react-router-dom' ... promise.then((data) => { const markup = renderToString( <StaticRouter location='req.url' context={{}}> <App data={data} /> </StaticRouter> ) ``` The \<App \/\> component in `./src/shared/app.js` now no longer needs to render the \<Grid \/\> but will receive ```jsx import React, { Component } from 'react' import routes from './routes' import { Route } from 'react-router-dom' class App extends Component { render(data) { return ( <div> {routes.map(({ path, exact, component: C, ...rest }) => ( <Route key={path} path={path} exact={exact} render={(props) => ( <C {...props} {...rest} /> )} /> ))} </div> ) } } export default App ```<file_sep>const btn = document.querySelector("button") const output = document.querySelector("#output") btn.addEventListener("click", getJoke) function getJoke() { const xhr = new XMLHttpRequest() const url = 'https://api.chucknorris.io/jokes/random' xhr.onreadystatechange = function () { if (xhr.readyState === 4) { if (xhr.status === 200) { const obj = JSON.parse(xhr.responseText) output.innerHTML = obj.value + '<br/><br/><img src="' + obj.icon_url + '"><br/><br/>' } else { output.innerHTML = "ERROR" } } } xhr.open('GET', url) xhr.send() xhr.addEventListener("progress", callBackfn) xhr.addEventListener("load", callBackfn) xhr.addEventListener("error", callBackfn) } function callBackfn(e) { console.log(e) }<file_sep>--- date: "2019-08-09" title: "MQTT Networks with homee" categories: - MQTT - Smarthome - IoT --- import GifContainer from "../../src/components/ImageContainer"; ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Node-RED Configuration](#node-red-configuration) - [Connecting the homee Gateway](#connecting-the-homee-gateway) - [Using homee Environment Variables](#using-homee-environment-variables) - [Flow Download](#flow-download) - [Connecting a Sensor to your Camera](#connecting-a-sensor-to-your-camera) - [Flow Download](#flow-download-1) <!-- /TOC --> ## Node-RED Configuration If you are using a Raspberry Pi to run Node-RED, you can use the [official installation script](https://nodered.org/docs/getting-started/raspberrypi) to have the program set up automatically for you. ## Connecting the homee Gateway ### Using homee Environment Variables First we are going to use the homee __Webhook__ service to trigger a Node-RED Flow for us. As a trigger I want to use the homee system variables, like `away`, `home`, `vacation`. Let's start by creating a webhook in Node-RED that we can contact from the homee gateway: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_06.png) --- You can import this flow to your Node-RED panel with the following JSON code: ```json [{"id":"181fefd6.961e6","type":"http in","z":"4ea30e4c.29d4","name":"homee env","url":"/homee/environment/","method":"post","upload":false,"swaggerDoc":"","x":80,"y":340,"wires":[["405534e7.66348c","bec44bcd.5eb548"]]},{"id":"405534e7.66348c","type":"http response","z":"4ea30e4c.29d4","name":"Webhook","statusCode":"","headers":{"content-type":"application/json"},"x":228,"y":340,"wires":[]},{"id":"bec44bcd.5eb548","type":"debug","z":"4ea30e4c.29d4","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":240,"y":299,"wires":[]}] ``` The webhook we created will wait to receive a __POST__ request on the URL `/homee/environment/`: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_07.png) --- Great! Now we can switch to the home dashboard and create a automation that is able to utilize this webhook. Click to create a new __Homeegram__: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_08.png) --- Click on __When__ to set the condition that should be met for the action to be triggered and select __Mode__: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_09.png) --- Select __Mode is set to Home__ and click on done to save the condition: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_10.png) --- Now we need to add an __Then__ action we want to be triggered when the condition is met: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_11.png) --- Here we will choose the __Webhook__ and type in the URL of the hook we just created in Node-RED. We set this to be `/homee/environment/`. So if your Node-RED installation is running on a computer with the IP _1172.16.58.3_ and is using the default port _1880_ the complete URL is: ```bash http://192.168.2.48:1880/homee/environment/ ``` Select the POST method and add a JSON body payload `{"val":"home"}`. This way we can reuse the webhook for all environment variables and add different actions in Node-RED depending on what value was posted - `home`, `away`, `sleeping`, `vacation`. --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_12.png) --- After saving our Homeegram we can now give it a test run. The webhook we created in Node-RED had a debug node connected. So if you switch on the debug panel in Node-RED (small bug icon in the top right) we should be able to our POST request come in when we change our homee variable to `home`: <GifContainer gifUrl="/assets/gif/homee_node-red-mqtt_13.gif" alt="homee Node-RED MQTT connection to your INSTAR IP Camera" /> Now we can repeat those steps to create Homeegrams for every environment variable we want to use in Node-RED. To differentiate between those cases we will use a switch node in Node-RED and attach it to our webhook node (the complete [flow can be downloaded below](#flow-download) and imported to Node-RED): --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_14.png) --- The switch node will check the message that was send and look for the value `val`. If it is `home` the message will be forwarded through exit 1 and every message that carries an `away` will leave the node through exit 2. We need to set the message to the value that we need and connect it to a MQTT Out node for the command topic we want to update: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_14.png) --- In this example we want to disable the motion detection area 1 when the `home` environment variable is activated. This is done by sending `{"val":"0"}` to the `alarm/area1/enable` topic. --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_15.png) --- #### Flow Download ```json [{"id":"181fefd6.961e6","type":"http in","z":"4ea30e4c.29d4","name":"homee env","url":"/homee/environment/","method":"post","upload":false,"swaggerDoc":"","x":80,"y":80,"wires":[["405534e7.66348c","97d88190.082db"]]},{"id":"405534e7.66348c","type":"http response","z":"4ea30e4c.29d4","name":"Webhook","statusCode":"","headers":{"content-type":"application/json"},"x":238,"y":40,"wires":[]},{"id":"97d88190.082db","type":"switch","z":"4ea30e4c.29d4","name":"","property":"payload.val","propertyType":"msg","rules":[{"t":"eq","v":"home","vt":"str"},{"t":"eq","v":"away","vt":"str"}],"checkall":"true","repair":false,"outputs":2,"x":229,"y":80,"wires":[["8a63a528.e6e3f8"],["80d7c418.063f58"]]},{"id":"8a63a528.e6e3f8","type":"change","z":"4ea30e4c.29d4","name":"home","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"0\"}","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":372,"y":54,"wires":[["e86399a7.4e76e8"]]},{"id":"80d7c418.063f58","type":"change","z":"4ea30e4c.29d4","name":"away","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"1\"}","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":372,"y":100,"wires":[["f96ffbe7.2eca78"]]},{"id":"e86399a7.4e76e8","type":"mqtt out","z":"4ea30e4c.29d4","name":"alarm/area1/enable","topic":"instar/000389888811/alarm/area1/enable","qos":"1","retain":"false","broker":"9e4e460d.640f18","x":529,"y":54,"wires":[]},{"id":"f96ffbe7.2eca78","type":"mqtt out","z":"4ea30e4c.29d4","name":"alarm/area1/enable","topic":"instar/000389888811/alarm/area1/enable","qos":"1","retain":"false","broker":"9e4e460d.640f18","x":527,"y":101,"wires":[]},{"id":"9e4e460d.640f18","type":"mqtt-broker","z":"","name":"IN-9010FHD","broker":"192.168.2.165","port":"8883","tls":"b55f9f2e.fe529","clientid":"","usetls":true,"compatmode":true,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","closeTopic":"","closeQos":"0","closePayload":"","willTopic":"","willQos":"0","willPayload":""},{"id":"b55f9f2e.fe529","type":"tls-config","z":"","name":"","cert":"","key":"","ca":"","certname":"instar-cert.cert","keyname":"","caname":"","servername":"","verifyservercert":false}] ``` * __Note__ that this flow uses the MQTT topic `instar/000389888811/alarm/area1/enable` - you will have to swap _000389888811_ with the MAC address of your camera to make this work. See the MQTT Introduction for more details. ### Connecting a Sensor to your Camera Start by creating a new Homeegram and defining the __When__ condition: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_16.png) --- This time we have to choose a __Device__ as the trigger for our action: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_17.png) --- We choose a ZigBee motion detector that has been connected to our homee gateway: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_18.png) --- The action of this automation should trigger when the ZigBee sensor reports a motion: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_19.png) --- As action we choose a webhook `http://192.168.2.48:1880/homee/motionsensor/` and POST a `{"val":"1"}` to it: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_20.png) --- The corresponding webhook in Node-RED is even simpler this time: --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_20.png) --- homee sends us a POST request with the payload `{"val":"1"}` through the webhook. All we need to do is to connect a MQTT Out node that updates the topic `/alarm/pushalarm` on our camera. Now every time the ZigBee sensor reports a motion an alarm will be triggered on our camera. --- ![homee Node-RED MQTT connection to your INSTAR IP Camera](./homee_node-red-mqtt_21.png) --- #### Flow Download ```json [{"id":"4080d8b0.359ba8","type":"http in","z":"4ea30e4c.29d4","name":"homee motion","url":"/homee/motionsensor/","method":"post","upload":false,"swaggerDoc":"","x":90,"y":220,"wires":[["a073e475.1c5da8","66f1a6a1.b59f78"]]},{"id":"a073e475.1c5da8","type":"http response","z":"4ea30e4c.29d4","name":"Webhook","statusCode":"","headers":{"content-type":"application/json"},"x":238,"y":180,"wires":[]},{"id":"66f1a6a1.b59f78","type":"mqtt out","z":"4ea30e4c.29d4","name":"alarm/pushalarm","topic":"instar/000389888811/alarm/pushalarm","qos":"1","retain":"false","broker":"9e4e460d.640f18","x":270,"y":220,"wires":[]},{"id":"9e4e460d.640f18","type":"mqtt-broker","z":"","name":"IN-9010FHD","broker":"192.168.2.165","port":"8883","tls":"b55f9f2e.fe529","clientid":"","usetls":true,"compatmode":true,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","closeTopic":"","closeQos":"0","closePayload":"","willTopic":"","willQos":"0","willPayload":""},{"id":"b55f9f2e.fe529","type":"tls-config","z":"","name":"","cert":"","key":"","ca":"","certname":"instar-cert.cert","keyname":"","caname":"","servername":"","verifyservercert":false}] ``` * __Note__ that this flow uses the MQTT topic `instar/000389888811/alarm/pushalarm` - you will have to swap _000389888811_ with the MAC address of your camera to make this work.<file_sep>--- date: "2019-09-23" title: "MQTT Camera AI Enhanced Security" categories: - MQTT - Smarthome - IoT - Node-RED --- ![Central, Hong Kong](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installation](#installation) - [Installing Python 3 on CentOS 8](#installing-python-3-on-centos-8) - [Installing Node.js 13 on CentOS 8](#installing-nodejs-13-on-centos-8) - [Installing Node-RED on CentOS 8](#installing-node-red-on-centos-8) <!-- /TOC --> > THIS IS WIP ## Installation Start by downloading [AI_enhanced_video_security](https://github.com/wb666greene/AI_enhanced_video_security) by @wb666greene from Github: ```bash cd /opt git clone https://github.com/wb666greene/AI_enhanced_video_security.git ``` ### Installing Python 3 on CentOS 8 ```bash sudo dnf install python3 python3 --version Python 3.6.8 pip3 --version pip 9.0.3 from /usr/lib/python3.6/site-packages (python 3.6) ``` ```bash pip3 install paho-mqtt numpy requests imutils pillow opencv-contrib-python ``` ### Installing Node.js 13 on CentOS 8 ```bash # As root curl -sL https://rpm.nodesource.com/setup_13.x | bash - # No root privileges # curl -sL https://rpm.nodesource.com/setup_13.x | sudo bash - yum install gcc-c++ make # or: yum groupinstall 'Development Tools' yum -y install nodejs npm ``` ### Installing Node-RED on CentOS 8 ```bash npm install -g --unsafe-perm node-red ``` Once installed as a global module you can use the node-red command to start Node-RED in your terminal. You can use Ctrl-C or close the terminal window to stop Node-RED. You can then access the Node-RED editor by pointing your browser at `http://localhost:1880`. Install the following Node-RED modules: ```bash node-red-contrib-ftp-server node-red-contrib-simple-gate ``` Import the following [Node-RED Flow](https://raw.githubusercontent.com/wb666greene/AI_enhanced_video_security/master/FTP_image_to_AI_via_MQTT.json):<file_sep>--- date: "2016-05-27" title: "JavaScript and Getting Started with APIs and AJAX" categories: - Javascript - APIs --- ![Shenzhen, China](./photo-34475542491_9069464269_o.jpg) <!-- TOC --> - [Prerequisite](#prerequisite) - [XHR Requests](#xhr-requests) - [XHR Requests Parameters](#xhr-requests-parameters) - [Javascript Fetch and Promises](#javascript-fetch-and-promises) - [Fetch Response Methods](#fetch-response-methods) - [Fetch Error Handling](#fetch-error-handling) - [Fetch Working with Headers](#fetch-working-with-headers) - [Fetch Working with JSON Endpoints](#fetch-working-with-json-endpoints) - [Fetch Working with JSON Endpoints Part II](#fetch-working-with-json-endpoints-part-ii) - [Fetch Working with JSON Endpoints Part III](#fetch-working-with-json-endpoints-part-iii) - [Building a Mini Application to interact with REST APIs](#building-a-mini-application-to-interact-with-rest-apis) <!-- /TOC --> ## Prerequisite We are going to write Javascript files in this course that we cannot simply execute inside our web broser. For this I am going to use the Node.js Mini Webserver [httpster](https://www.npmjs.com/package/httpster) - this way I am able to serve my files with a simple command from the directory where I stored my files: ```bash httpster -p 3000 ``` We only need a simple HTML Boilerplate to wrap our Javascript files in with the name __index.html__: ```html <html> <title>Javascript API Course</title> <body> <div id="output"></div> <button>Click</button> <script src="ajax-request.js"></script> </body> </html> ``` Now I can open my web browser on `localhost:3000` to see my website: ![Javascript APIs](./Javascript_APIs_01.png) ## XHR Requests XMLHttpRequest (XHR) is an API available to web browser scripting languages such as JavaScript. It is used to send HTTP or HTTPS requests to a web server and load the server response data back into the script. An Ajax call is an asynchronous request initiated by the browser that does not directly result in a page transition. An Ajax ("Asynchronous Javascript and XML") request is sometimes called an XHR request ("XmlHttpRequest"), which is the name most browsers give the object used to send an Ajax request, because at least initially Ajax calls involved the sending and receiving of XML but now it's just as common to send/receive JSON, plain text or HTML. Ok then, let's start with creating an __XMLHttpRequest Object__ inside the file `ajax-request.js` that we linked into our HTML page above: ```js const xhr = new XMLHttpRequest(); console.log(xhr); ``` The [XMLHttpRequest.readyState](https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/readyState) property returns the state an XMLHttpRequest client is in. Since we created it but not yet called `open()` it is currently `UNSENT` or `0`: ![Javascript APIs](./Javascript_APIs_02.png) The `open()` call for the request takes 2 arguments. First the kind of request we want to make - we need a __HTTP GET__ request. And secondly we need to say what URL we want to connect to. For the latter we can use the [Chuck Norris API](https://api.chucknorris.io/) that is free to use: ```js const xhr = new XMLHttpRequest(); const url = 'https://api.chucknorris.io/jokes/random'; xhr.open('GET', url); console.log(xhr); ``` ![Javascript APIs](./Javascript_APIs_03.png) The `open()` method has been invoked and the __readyState__ is now `OPENED` or `1` - meaning that the connection to the API has been established. During this state, the `send()` method can be called which will initiate the fetch. We can record the steps with the __onreadystatechange__ method: ```js const xhr = new XMLHttpRequest(); const url = 'https://api.chucknorris.io/jokes/random'; xhr.onreadystatechange = function() { console.log( xhr.readyState ); } xhr.open('GET', url); xhr.send(); console.log(xhr); ``` ![Javascript APIs](./Javascript_APIs_05.png) We can see that the Request was received by the API and we are getting a [HTTP Status](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status) of `200` - meaning that the resource of the GET request has been fetched and is transmitted in the message body. ![Javascript APIs](./Javascript_APIs_04.png) The state changes from 1-4 representing the [XMLHttpRequest readyState](https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/readyState): 1. OPENED 2. HEADERS_RECEIVED 3. LOADING 4. DONE The data that we receive from our API is only available after we reached the __readyState 4__ on our API call and the request gave us a __HTTP Status 200__! We can use an __if-statement__ to make sure that we do not request the xhr response before this is assured: ```js const xhr = new XMLHttpRequest(); const url = 'https://api.chucknorris.io/jokes/random'; xhr.onreadystatechange = function() { if(xhr.readyState == 4 && xhr.status == 200) { console.log(xhr.response); } } xhr.open('GET', url); xhr.send(); ``` ![Javascript APIs](./Javascript_APIs_06.png) Right now we won't get anything if back if the request fails - we can change this by adding an `else` statement that throws us an error. We can format the JSON response of our API using the Javascript `JSON.parse()` method. The [JSON.parse() method](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) parses a JSON string, constructing the JavaScript value or object described by the string. ```js const xhr = new XMLHttpRequest() const url = 'https://api.chucknorris.io/jokes/random' xhr.onreadystatechange = function() { if(xhr.readyState === 4){ if(xhr.status === 200) { const str = xhr.responseText const obj = JSON.parse(str) console.log(obj) } else { output.innerHTML = "ERROR" } } } xhr.open('GET', url) xhr.send() ``` ![Javascript APIs](./Javascript_APIs_07.png) We are now able to work with the Javascript object we created and display the Chuck Norris quote in our website when we request the `value` of the API response and also add the image URL that corresponds with `icon_url`. For this we will use the DIV container we created with the __id: output__: ```js const output = document.querySelector("#output") const xhr = new XMLHttpRequest() const url = 'https://api.chucknorris.io/jokes/random' xhr.onreadystatechange = function() { if(xhr.readyState === 4){ if(xhr.status === 200) { const str = xhr.responseText const obj = JSON.parse(str) output.innerHTML = obj.value + '<br/><br/><img src="'+obj.icon_url+'"><br/><br/>' } else { output.innerHTML = "ERROR" } } } xhr.open('GET', url) xhr.send() ``` ![Javascript APIs](./Javascript_APIs_08.png) Excellent! Now we can map our button to refresh the request every time we click it: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") btn.addEventListener("click", getJoke) function getJoke() { const xhr = new XMLHttpRequest() const url = 'https://api.chucknorris.io/jokes/random' xhr.onreadystatechange = function () { if (xhr.readyState === 4) { if (xhr.status === 200) { const str = xhr.responseText const obj = JSON.parse(str) output.innerHTML = obj.value + '<br/><br/><img src="' + obj.icon_url + '"><br/><br/>' } else { output.innerHTML = "ERROR" } } } xhr.open('GET', url) xhr.send() } ``` ![Javascript APIs](./Javascript_APIs_09.png) We can now add a few __EventListeners__ to our Ajax request to get some more insights into the mechanics of it: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") btn.addEventListener("click", getJoke) function getJoke() { const xhr = new XMLHttpRequest() const url = 'https://api.chucknorris.io/jokes/random' xhr.onreadystatechange = function () { if (xhr.readyState === 4) { if (xhr.status === 200) { const obj = JSON.parse(xhr.responseText) output.innerHTML = obj.value + '<br/><br/><img src="' + obj.icon_url + '"><br/><br/>' } else { output.innerHTML = "ERROR" } } } xhr.open('GET', url) xhr.send() xhr.addEventListener("progress", callBackfn) xhr.addEventListener("load", callBackfn) xhr.addEventListener("error", callBackfn) } function callBackfn(e) { console.log(e) } ``` ![Javascript APIs](./Javascript_APIs_10.png) ## XHR Requests Parameters We will start this with a new HTML template that gives us both an input field and a button: ```html <html> <title>Javascript API Course</title> <body> <input type="number"> <button>Click</button> <div id="output"></div> <script src="xhr-parameter.js"></script> </body> </html> ``` The Javascript file that is linked in here adds an __EventListener__ to the button and console logs the numeric value of the input field when the button is clicked: ```js const btn = document.querySelector("button"); const output = document.querySelector("#output"); const intake = document.querySelector("input"); btn.addEventListener("click", getInput); function getInput() { console.log(intake.value); } ``` ![Javascript APIs](./Javascript_APIs_11.png) Great! We can now start with our work of connecting this to an API. In this part we going to use the [open Random User API](https://randomuser.me/) that can be reached over the following URL (that allows us to specify the amount of random user profiles we want to see returned to us): ``` https://randomuser.me/api/?results=1 ``` The result for such an request is a JSON response from the API looking like this: ![Javascript APIs](./Javascript_APIs_12.png) Let's add the xhr request to our JS file: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "https://randomuser.me/api/" btn.addEventListener("click", getInput) function getInput() { const xhr = new XMLHttpRequest() xhr.open("GET", url) xhr.onload = function (data) { console.log(data) } xhr.send() } ``` This code opens a connection to the API and retrieves a response onload: ![Javascript APIs](./Javascript_APIs_13.png) The xhr request is working. We can see that the information that we need can be found under __responseText__ - let's modify our request accordingly and add the same error checks we used before before: ```js function getInput() { const xhr = new XMLHttpRequest() xhr.open("GET", url) xhr.onload = function () { if(xhr.readyState === 4 && xhr.status == "200") { let data = JSON.parse(xhr.responseText) console.log(data) } else { console.log("error") } } xhr.send() } ``` The console log now contains the JSON parsed data that we will need further on: ![Javascript APIs](./Javascript_APIs_14.png) But we can further refine that we only need the __result object__ of the response by modifying the line: ```js let data = JSON.parse(xhr.responseText).results ``` As we have seen before, we are able to specify the amount of users that we want to be returned from the API by adding a parameter to the URL. If we want to be able to set the number of users from the input field, this will look like this: ```js function getInput() { const xhr = new XMLHttpRequest() let tempVal = intake.value let tempURL = url + "?results=" +tempVal xhr.onload = function () { if(xhr.readyState === 4 && xhr.status == "200") { let data = JSON.parse(xhr.responseText).results console.log(data) } else { console.log("error") } } xhr.open("GET", tempURL) xhr.send() } ``` We creating a temporary value from the intake of the input field and add it to the base URL of the API - when we have the number 3 inside the input field and hit the Click button, we will now receive 3 user from the API response: ![Javascript APIs](./Javascript_APIs_15.png) To print out this information, we can add another function `outputHTML(data)`, loop over the array that we receive from the response and call that function from the `xhr.onload` function above: ```js function getInput() { const xhr = new XMLHttpRequest() let tempVal = intake.value let tempURL = url + "?results=" +tempVal xhr.onload = function () { if(xhr.readyState === 4 && xhr.status == "200") { let data = JSON.parse(xhr.responseText).results outputHTML(data) } else { console.log("error") } } xhr.open("GET", tempURL) xhr.send() } function outputHTML(data) { console.log(data) for(let i=0; i<data.length; i++) { output.innerHTML += "<br>" + data[i].email + "<br>" } } ``` ![Javascript APIs](./Javascript_APIs_16.png) ## Javascript Fetch and Promises The [Fetch API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch) supersedes the previously used XMLHttpRequest in __ES6__. It provides a JavaScript interface for accessing and manipulating parts of the HTTP pipeline, such as requests and responses. It also provides a global `fetch()` method that provides an easy, logical way to fetch resources asynchronously across the network. The Promise returned from `fetch()` won’t reject on HTTP error status even if the response is an HTTP 404 or 500. Instead, it will resolve normally (with ok status set to false), and it will only reject on network failure or if anything prevented the request from completing. By default, fetch won't send or receive any cookies from the server, resulting in unauthenticated requests if the site relies on maintaining a user session. A basic fetch request is really simple to set up. Have a look at the following code: ```js fetch('http://example.com/data.json') .then(function(response) { return response.json(); }) .then(function(myJson) { console.log(JSON.stringify(myJson)); }); ``` The `fetch()` requests a response and then works with promises for the following steps. We can now start to re-write our previous example using the Fetch API: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "https://randomuser.me/api/" btn.addEventListener("click", getInput) function getInput() { fetch(url) .then(function (response) { console.log(response); }) } ``` We can now add another promise and simply return the response from the first to be further worked on by the second: ```js function getInput() { fetch(url) .then(function (response) { return response.json(); }) .then(function(data) { console.log(data.results); }) } ``` The first promise receive the response from the API and transforms it into a Javascript object. The second promise receives the result as `data` and logs the result object to our console. We can now add the API parameter again to be able to specify the number of results we want to get: ```js function getInput() { let userNumber = intake.value; let url = baseUrl + "?results=" + userNumber; fetch(url) .then(function (response) { return response.json(); }) .then(function(data) { console.log(data.results); }) } ``` Now we need to add the loop back in to display our results: ```js function getInput() { let userNumber = intake.value; let url = baseUrl + "?results=" + userNumber; fetch(url) .then(function (response) { return response.json(); }) .then(function (data) { outputHTML(data); }) } function outputHTML(data) { console.log(data.results) for (let i = 0; i < data.results.length; i++) { output.innerHTML += "<br>" + data.results[i].name.last + ", " + data.results[i].name.first + "<br>" } } ``` ![Javascript APIs](./Javascript_APIs_17.png) ### Fetch Response Methods __Using the Fetch API for Text Responses__ If all we need is a text output, we can use the Fetch API in the following way: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const baseUrl = "https://randomuser.me/api/" btn.addEventListener("click", getInput) function getInput() { fetch(baseUrl).then(function (response) { return response.text(); }).then(function (data) { console.log(data); }) } ``` ![Javascript APIs](./Javascript_APIs_18.png) __Using the Fetch API for Images__ We can use the Javascript Fetch API to add images to our webpage. For this you need to add an image tag with an empty source to your html `<img src="">` and rewrite the code from the previous step as follows: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") btn.addEventListener("click", getInput) function getInput() { let url = "photo-34475542491_9069464269_o-cover.jpg"; fetch(url).then(function (response) { return response.blob(); }).then(function (data) { console.log(data); let pathImage = URL.createObjectURL(data); document.querySelector("img").src = pathImage; }) } ``` This code is fetching an image from the same directory our JS file is in with the name _photo-34475542491\_9069464269\_o-cover.jpg_. Here we have to change the response type from __json__ to __blob__ and use the `createObjectURL(` method to create a file URL, that is then added to the source of our empty image tag. ![Javascript APIs](./Javascript_APIs_19.png) Note that the image URL is created and does not represent the location on our filesystem - in my case it is: ``` blob:http://localhost:3000/a01e8cb8-60c0-4770-bea2-2c369520a92e ``` ### Fetch Error Handling The Fetch API offers the `catch()` method to help us with error handling - that is much neater than the __if-else__ syntax we had before. To test it, we can create the following script: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const baseUrl = "https://randomuser.me/apis/" btn.addEventListener("click", getInput) function getInput() { fetch(baseUrl).then(function (response) { return response.json(); }).then(function (data) { console.log(data); }) .catch(function(error) { console.log(error); }) } ``` Note that we introduced an error to the `baseURL` - it should be __api__ not __apis__: ![Javascript APIs](./Javascript_APIs_20.png) The error is caught and the error message printed to console. ### Fetch Working with Headers If our API requiers us to set specific header information for the request, we can create them in form of a __Request Object__ that is then used by the `fetch()` method. We will start with the previous code and add the following: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const baseUrl = "https://randomuser.me/api/" btn.addEventListener("click", getInput) function getInput() { let params = new Request(baseUrl, { method: "GET", mode: "cors", headers: new Headers(), cache: "default" }) fetch(params).then(function (response) { return response.json(); }).then(function (data) { console.log(data); }) .catch(function(error) { console.log(error); }) } ``` ### Fetch Working with JSON Endpoints The website [myjson.com](http://myjson.com/) allows you to store JSON data and access it through an URL. Just type a valid JSON object into the input field and press __Save__ to be given an URL you can retrieve this information from: ![Javascript APIs](./Javascript_APIs_21.png) After clicking on save, the following URL was generated for me: `https://api.myjson.com/bins/m1l12`. Accessing this URL returns the JSON data as an text output. We can now use this URL for our fetch request: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "https://api.myjson.com/bins/m1l12" btn.addEventListener("click", getInput) function getInput() { fetch(url).then(function (response) { return response.json(); }).then(function (data) { console.log(data); }).catch(function (error) { console.log(error); }) } ``` Running the script inside our browser will return the information in form of a Javascript object: ![Javascript APIs](./Javascript_APIs_22.png) We can also add a new addition to Javascript to clean up our function - the __Arrow Function__: ```js function getInput() { fetch(url).then(res => res.json()) .then(json => console.log(json)) .catch(error => console.log(error)) } ``` We can write a JSON object that contains an array that allows us to loop through the API response: ```json { "results": [{ "gender": "male", "name": { "title": "mr", "first": "hellmut", "last": "merz" } }, { "gender": "female", "name": { "title": "ms", "first": "یسنا", "last": "رضاییان" } }, { "gender": "female", "name": { "title": "mrs", "first": "elsa", "last": "simon" } }] } ``` Adding the following for loop to the fetch request will give us the list of three names: ```js function getInput() { fetch(url).then(res => res.json()) .then(function (data) { for (let i = 0; i < data.results.length; i++) { console.log(data.results[i].name.last + ", " + data.results[i].name.first) } }) .catch(error => console.log(error)) } ``` ![Javascript APIs](./Javascript_APIs_23.png) ### Fetch Working with JSON Endpoints Part II Another place to test your Javascript against a fake REST API is [JSONPlaceholder](http://jsonplaceholder.typicode.com/). For example the URL `http://jsonplaceholder.typicode.com/photos` will give you a result of 5000 photos in an JSON array: ![Javascript APIs](./Javascript_APIs_24.png) The following script will log the URL to the first thumbnail image to your browser console and will set the source for the empty image tag `<img src="">` inside your html to this URL: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "http://jsonplaceholder.typicode.com/photos" btn.addEventListener("click", getInput) function getInput() { fetch(url).then(res => res.json()) .then(function (data) { console.log(data[0].thumbnailUrl) document.querySelector("img").src=data[0].thumbnailUrl; }) .catch(error => console.log(error)) } ``` ![Javascript APIs](./Javascript_APIs_25.png) ### Fetch Working with JSON Endpoints Part III Yet another API to use is [Swapi](https://swapi.co/), the Star Wars API. We can connect to it using the following script: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "https://swapi.co/api/" btn.addEventListener("click", getInput) function getInput() { fetch(url).then(res => res.json()) .then(function (data) { console.log(data) }) .catch(error => console.log(error)) } ``` ![Javascript APIs](./Javascript_APIs_26.png) Testing the script shows us that there are sub categories that we can query - for example, we can just fetch information about planets in the Star Wars Universe by changing the Fetch URL to `https://swapi.co/api/planets`. ![Javascript APIs](./Javascript_APIs_27.png) We can see that the database found us 61 planets but only returned 10 of them + an URL to get the next 10 `https://swapi.co/api/planets/?page=2` - making it easy to paginate through large amount of data. To get the name of a specific planet - lets say the ninths from the array - we just need to change the console.log() to `console.log(data.results[8].name)`: ![Javascript APIs](./Javascript_APIs_28.png) ### Building a Mini Application to interact with REST APIs And another API - this time one called [REST Countries](https://restcountries.eu/) that gives us access to a large quantity of information about countries - capital cities, language spoken, currency, etc. The complete set of data can be queried with the following URL `https://restcountries.eu/rest/v2/all`: ![Javascript APIs](./Javascript_APIs_29.png) We can start by checking the API response with the following script: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "https://restcountries.eu/rest/v2/all" btn.addEventListener("click", getInput) function getInput() { fetch(url).then(res => res.json()) .then(function (data) { console.log(data) }) .catch(error => console.log(error)) } ``` This returns to us the result we have seen earlier in form of a Javascript object that contains the first 250 countries: ![Javascript APIs](./Javascript_APIs_30.png) We can now make a change to our script to no longer `console.log()` the result but instead writing the result into an object we call __responseObject__. We will create it first as an empty object and then add the `data` from the API response to it: ```js const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "https://restcountries.eu/rest/v2/all" let responseObject = {} fetch(url).then(res => res.json()) .then(function (data) { responseObject = data; }) .catch(error => console.log(error)); ``` ![Javascript APIs](./Javascript_APIs_31.png) You will be able to query the __responseObject__ after you refreshed the page - it does contain the API data. We can now use a helper function to split up every country into it's own object and prepare it to be inserted into a __select element__ on our page: ```js fetch(url).then(res => res.json()) .then(function (data) { responseObject = data; buildSelect(data); }) .catch(error => console.log(error)); function buildSelect(data) { let select = document.createElement('select'); data.forEach(function(item) { let option = document.createElement('option'); console.log(item); }) } ``` ![Javascript APIs](./Javascript_APIs_32.png) We can additionally output the index number of each country to be able to reference them: ```js function buildSelect(data) { let select = document.createElement('select'); data.forEach(function(item, index) { let option = document.createElement('option'); console.log(item, index); }) } ``` ![Javascript APIs](./Javascript_APIs_33.png) Now we need to assign each country to an option from the select drop-down menu using the index number and use the country name as textContent: ```js function buildSelect(data) { let select = document.createElement('select'); data.forEach(function(item, index) { let option = document.createElement('option'); console.log(item, index); option.value = index; option.textContent = item.name; select.appendChild(option); }) document.querySelector('body').appendChild(select); } ``` ![Javascript APIs](./Javascript_APIs_34.png) We can now get rid of the input field and click button and instead add an event listener that detects when the user selects an option and displays the corresponding country information below the drop down menu. We will start with adding the eventListener that reacts to _Change_ and `console.log()` the event with another helper function: ```js function buildSelect(data) { let select = document.createElement('select'); data.forEach(function(item, index) { let option = document.createElement('option'); console.log(item, index); option.value = index; option.textContent = item.name; select.appendChild(option); }) select.addEventListener("change", outputData) document.querySelector('body').appendChild(select); } function outputData(e) { console.log(e); } ``` ![Javascript APIs](./Javascript_APIs_35.png) From this event log we now need to record the target value - which corresponds to the index number of the selected country. This number can then be used to select the country out of the __responseObject__: ```js function buildSelect(data) { let select = document.createElement('select'); data.forEach(function(item, index) { let option = document.createElement('option'); console.log(item, index); option.value = index; option.textContent = item.name; select.appendChild(option); }) select.addEventListener("change", outputData) document.querySelector('body').appendChild(select); } function outputData(e) { console.log(e.target.value); console.log(responseObject[e.target.value]); } ``` ![Javascript APIs](./Javascript_APIs_36.png) In this example I selected the __188__ out of the drop-down menu. Comparing this number to the index number of our __responseObject__ gives us all country information of `Saint Helena, Ascension and Tristan da Cunha`. We can now clean this function up a little and add an HTML output to it, to be able to display the information on our web site: ```html <html> <title>Javascript API Course</title> <body> <div id="output"></div> <img src="" style="max-width: 100px;"> <script src="fetch-json"></script> </body> </html> ``` ```js const output = document.querySelector("#output"); const url = "https://restcountries.eu/rest/v2/all"; let responseObject = {}; fetch(url).then(res => res.json()) .then(function (data) { responseObject = data; buildSelect(data); }) .catch(error => console.log(error)); function buildSelect(data) { let select = document.createElement('select'); data.forEach(function (item, index) { let option = document.createElement('option'); option.value = index; option.textContent = item.name; select.appendChild(option); }) select.addEventListener("change",outputData); document.querySelector('body').appendChild(select); } function outputData(e){ let country = responseObject[e.target.value]; console.log(country); output.innerHTML = '<h1>'+country.name+'</h1>'; output.innerHTML += '<p><strong>Native Name</strong>: '+country.nativeName+'</p>'; output.innerHTML += '<p><strong>Population</strong>: '+country.population+'</p>'; document.querySelector('img').src = country.flag; output.innerHTML += '<p><strong>Capital</strong>: '+country.capital+'</p>'; output.innerHTML += '<p><strong>Region</strong>: '+country.region+'</p>'; output.innerHTML += '<p><strong>Sub-Region</strong>: '+country.subregion+'</p>'; } ``` ![Javascript APIs](./Javascript_APIs_37.png)<file_sep># Dockerfile for vsftpd on CentOS7 FROM centos:7 MAINTAINER <EMAIL> RUN yum -y update; yum -y install which vsftpd net-tools vsftpd-sysvinit; yum clean all COPY vusers.txt /etc/vsftpd/ RUN db_load -T -t hash -f /etc/vsftpd/vusers.txt /etc/vsftpd/vsftpd-virtual-user.db; rm -v /etc/vsftpd/vusers.txt; \ chmod 600 /etc/vsftpd/vsftpd-virtual-user.db COPY vsftpd.conf /etc/vsftpd/ COPY vsftpd.virtual /etc/pam.d/ RUN mkdir -p /home/vftp/ftpuser; chown -R ftp:ftp /home/vftp EXPOSE 20 21 CMD ["/usr/sbin/vsftpd","-obackground=NO"]<file_sep>--- date: "2018-11-05" title: "Creating a Dashboard displaying Cryptocurrency Data using Node-RED" categories: - Node-RED --- ![<NAME>, Cambodia](./photo-11628186083_7be6d858ce_o.png) <!-- TOC --> - [Flow Export](#flow-export) - [Flow Export](#flow-export-1) - [Working with the Node-RED Dashboard](#working-with-the-node-red-dashboard) - [Adding Text Outputs](#adding-text-outputs) - [Flow Export](#flow-export-2) - [Adding a Graph](#adding-a-graph) - [Flow Export](#flow-export-3) <!-- /TOC --> ![Cryptocurrency Dashboard](./crypto_dash_01.png) We will use the STS [Binance](https://www.binance.com/) node to get access to a Cryptocurrency API. The node can be downloaded for [Node-RED](https://www.npmjs.com/package/node-red-contrib-binance). The following flow will get us the exchange price for Etherium: ## Flow Export ```json [{"id":"3e6606c0.272afa","type":"inject","z":"d20a8b82.28b458","name":"","topic":"ETHUSDT","payload":"","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":101,"y":122,"wires":[["c51a6ca4.fed82"]]},{"id":"c51a6ca4.fed82","type":"binance-get-price","z":"d20a8b82.28b458","name":"","ticker":"","x":253,"y":122,"wires":[["7d28d821.7a2098"]]},{"id":"7d28d821.7a2098","type":"debug","z":"d20a8b82.28b458","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":414,"y":122,"wires":[]}] ``` We start with setting an __Inject Node__ to an _empty string_ and its topic to __ETHUSDT__ ([exchange chart](https://www.binance.com/en/trade/ETH_USDT)) and connecting it to __getPrice Node__ - a __Debug Node__ will give us the result in form of a string - e.g. `294.50000000`. Let’s continue the example shown above but extend it to get all market info. For that we’ll need the __getAllPrices Node__: ## Flow Export ```json [{"id":"c458d9ea.5aa5c8","type":"inject","z":"d20a8b82.28b458","name":"","topic":"","payload":"","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":88,"y":202,"wires":[["777eaa32.d87b94"]]},{"id":"8d15769d.939e18","type":"debug","z":"d20a8b82.28b458","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":411,"y":202,"wires":[]},{"id":"777eaa32.d87b94","type":"binance-get-all-prices","z":"d20a8b82.28b458","name":"","x":241,"y":203,"wires":[["8d15769d.939e18"]]}] ``` You’ll get back a JSON object: ```json {"ETHBTC":"0.04154500","LTCBTC":"0.00881100","BNBBTC":"0.00158410","NEOBTC":"0.00293500","QTUMETH":"0.01625400","EOSETH":"0.02107000","SNTETH":"0.00014760","BNTETH":"0.00586300","BCCBTC":"0.07934100","GASBTC":"0.00099600","BNBETH":"0.03815800","BTCUSDT":"7087.54000000","ETHUSDT":"294.56000000","OAXETH":"0.00060790","DNTETH":"0.00008768","MCOETH":"0.01587500","ICNETH":"0.00162160","MCOBTC":"0.00065600","WTCBTC":"0.00058470","WTCETH":"0.01403500","LRCBTC":"0.00001880","LRCETH":"0.00045696","QTUMBTC":"0.00067500","YOYOBTC":"0.00000389","OMGBTC":"0.00062600","OMGETH":"0.01506100","ZRXBTC":"0.00011409","ZRXETH":"0.00274185","STRATBTC":"0.00023200","STRATETH":"0.00556200","SNGLSBTC":"0.00000358","SNGLSETH":"0.00008614","BQXBTC":"0.00007643","BQXETH":"0.00184060","KNCBTC":"0.00008018","KNCETH":"0.00193560","FUNBTC":"0.00000264","FUNETH":"0.00006301","SNMBTC":"0.00000867","SNMETH":"0.00020999","NEOETH":"0.07071400","IOTABTC":"0.00011167","IOTAETH":"0.00269772","LINKBTC":"0.00004652","LINKETH":"0.00112169","XVGBTC":"0.00000216","XVGETH":"0.00005220","SALTBTC":"0.00009110","SALTETH":"0.00219200","MDABTC":"0.00010141","MDAETH":"0.00242000","MTLBTC":"0.00009700","MTLETH":"0.00233400","SUBBTC":"0.00002510","SUBETH":"0.00060521","EOSBTC":"0.00087520","SNTBTC":"0.00000615","ETCETH":"0.04462400","ETCBTC":"0.00185300","MTHBTC":"0.00000350","MTHETH":"0.00008508","ENGBTC":"0.00011371","ENGETH":"0.00275400","DNTBTC":"0.00000365","ZECBTC":"0.02178200","ZECETH":"0.52429000","BNTBTC":"0.00024263","ASTBTC":"0.00001546","ASTETH":"0.00037070","DASHBTC":"0.02800500","DASHETH":"0.67228000","OAXBTC":"0.00002511","ICNBTC":"0.00006779","BTGBTC":"0.00314000","BTGETH":"0.07550700","EVXBTC":"0.00006646","EVXETH":"0.00160670","REQBTC":"0.00000681","REQETH":"0.00016531","VIBBTC":"0.00000601","VIBETH":"0.00014417","TRXBTC":"0.00000378","TRXETH":"0.00009076","POWRBTC":"0.00002953","POWRETH":"0.00071345","ARKBTC":"0.00012980","ARKETH":"0.00311500","YOYOETH":"0.00009421","XRPBTC":"0.00004887","XRPETH":"0.00117541","MODBTC":"0.00014340","MODETH":"0.00344900","ENJBTC":"0.00000609","ENJETH":"0.00014682","STORJBTC":"0.00004390","STORJETH":"0.00106740","BNBUSDT":"11.23240000","YOYOBNB":"0.00249000","POWRBNB":"0.01868000","KMDBTC":"0.00019090","KMDETH":"0.00461800","NULSBNB":"0.14727000","RCNBTC":"0.00000308","RCNETH":"0.00007473","RCNBNB":"0.00198800","NULSBTC":"0.00023190","NULSETH":"0.00559237","RDNBTC":"0.00006464","RDNETH":"0.00157000","RDNBNB":"0.04075000","XMRBTC":"0.01470900","XMRETH":"0.35541000","DLTBNB":"0.00441000","WTCBNB":"0.37010000","DLTBTC":"0.00000699","DLTETH":"0.00016860","AMBBTC":"0.00002448","AMBETH":"0.00058887","AMBBNB":"0.01529000","BCCETH":"1.91000000","BCCUSDT":"562.40000000","BCCBNB":"49.92000000","BATBTC":"0.00003213","BATETH":"0.00077556","BATBNB":"0.02031000","BCPTBTC":"0.00001419","BCPTETH":"0.00034189","BCPTBNB":"0.00896000","ARNBTC":"0.00004817","ARNETH":"0.00116856","GVTBTC":"0.00092160","GVTETH":"0.02213700","CDTBTC":"0.00000174","CDTETH":"0.00004205","GXSBTC":"0.00021910","GXSETH":"0.00526400","NEOUSDT":"20.78000000","NEOBNB":"1.85800000","POEBTC":"0.00000153","POEETH":"0.00003718","QSPBTC":"0.00000638","QSPETH":"0.00015348","QSPBNB":"0.00404300","BTSBTC":"0.00001702","BTSETH":"0.00041000","BTSBNB":"0.01064000","XZCBTC":"0.00183800","XZCETH":"0.04432100","XZCBNB":"1.16100000","LSKBTC":"0.00068370","LSKETH":"0.01652900","LSKBNB":"0.43470000","TNTBTC":"0.00000312","TNTETH":"0.00007581","FUELBTC":"0.00000212","FUELETH":"0.00005109","MANABTC":"0.00001031","MANAETH":"0.00024796","BCDBTC":"0.00156600","BCDETH":"0.03776000","DGDBTC":"0.00812500","DGDETH":"0.19500000","IOTABNB":"0.07054000","ADXBTC":"0.00002882","ADXETH":"0.00069670","ADXBNB":"0.01819000","ADABTC":"0.00001470","ADAETH":"0.00035386","PPTBTC":"0.00066080","PPTETH":"0.01593800","CMTBTC":"0.00001461","CMTETH":"0.00035100","CMTBNB":"0.00915000","XLMBTC":"0.00003264","XLMETH":"0.00078604","XLMBNB":"0.02059000","CNDBTC":"0.00000286","CNDETH":"0.00006910","CNDBNB":"0.00179900","LENDBTC":"0.00000210","LENDETH":"0.00005054","WABIBTC":"0.00003138","WABIETH":"0.00075479","WABIBNB":"0.01966000","LTCETH":"0.21271000","LTCUSDT":"62.47000000","LTCBNB":"5.57000000","TNBBTC":"0.00000181","TNBETH":"0.00004378","WAVESBTC":"0.00031590","WAVESETH":"0.00760000","WAVESBNB":"0.20090000","GTOBTC":"0.00001286","GTOETH":"0.00030992","GTOBNB":"0.00813000","ICXBTC":"0.00012810","ICXETH":"0.00308500","ICXBNB":"0.08101000","OSTBTC":"0.00000540","OSTETH":"0.00013037","OSTBNB":"0.00340900","ELFBTC":"0.00005959","ELFETH":"0.00143409","AIONBTC":"0.00008900","AIONETH":"0.00215100","AIONBNB":"0.05746000","NEBLBTC":"0.00034150","NEBLETH":"0.00826000","NEBLBNB":"0.21602000","BRDBTC":"0.00005417","BRDETH":"0.00130950","BRDBNB":"0.03405000","MCOBNB":"0.41402000","EDOBTC":"0.00013380","EDOETH":"0.00322500","WINGSBTC":"0.00002155","WINGSETH":"0.00052170","NAVBTC":"0.00004040","NAVETH":"0.00097500","NAVBNB":"0.02565000","LUNBTC":"0.00052290","LUNETH":"0.01255300","TRIGBTC":"0.00002580","TRIGETH":"0.00062000","TRIGBNB":"0.01633000","APPCBTC":"0.00001518","APPCETH":"0.00036750","APPCBNB":"0.00940000","VIBEBTC":"0.00000540","VIBEETH":"0.00013010","RLCBTC":"0.00005800","RLCETH":"0.00140000","RLCBNB":"0.03700000","INSBTC":"0.00004730","INSETH":"0.00114300","PIVXBTC":"0.00016890","PIVXETH":"0.00407500","PIVXBNB":"0.10727000","IOSTBTC":"0.00000226","IOSTETH":"0.00005441","CHATBTC":"0.00000228","CHATETH":"0.00005509","STEEMBTC":"0.00013830","STEEMETH":"0.00334000","STEEMBNB":"0.08701000","NANOBTC":"0.00043570","NANOETH":"0.01049300","NANOBNB":"0.27510000","VIABTC":"0.00014000","VIAETH":"0.00335500","VIABNB":"0.08719000","BLZBTC":"0.00002039","BLZETH":"0.00049054","BLZBNB":"0.01328000","AEBTC":"0.00016730","AEETH":"0.00404700","AEBNB":"0.10721000","NCASHBTC":"0.00000104","NCASHETH":"0.00002502","NCASHBNB":"0.00065200","POABTC":"0.00001421","POAETH":"0.00034231","POABNB":"0.00896000","ZILBTC":"0.00000660","ZILETH":"0.00015844","ZILBNB":"0.00415400","ONTBTC":"0.00038400","ONTETH":"0.00923600","ONTBNB":"0.24198000","STORMBTC":"0.00000129","STORMETH":"0.00003138","STORMBNB":"0.00081600","QTUMBNB":"0.42757000","QTUMUSDT":"4.79800000","XEMBTC":"0.00001578","XEMETH":"0.00037939","XEMBNB":"0.00998000","WANBTC":"0.00017850","WANETH":"0.00430000","WANBNB":"0.11361000","WPRBTC":"0.00000369","WPRETH":"0.00008884","QLCBTC":"0.00000870","QLCETH":"0.00020965","SYSBTC":"0.00001522","SYSETH":"0.00036521","SYSBNB":"0.00964000","QLCBNB":"0.00553500","GRSBTC":"0.00009288","GRSETH":"0.00224200","ADAUSDT":"0.10418000","ADABNB":"0.00928000","CLOAKBTC":"0.00036890","CLOAKETH":"0.00883800","GNTBTC":"0.00002444","GNTETH":"0.00058441","GNTBNB":"0.01528000","LOOMBTC":"0.00001641","LOOMETH":"0.00039616","LOOMBNB":"0.01035000","XRPUSDT":"0.34659000","BCNBTC":"0.00000031","BCNETH":"0.00000747","BCNBNB":"0.00019700","REPBTC":"0.00292200","REPETH":"0.07040000","REPBNB":"1.83300000","TUSDBTC":"0.00014137","TUSDETH":"0.00340892","TUSDBNB":"0.08969000","ZENBTC":"0.00278200","ZENETH":"0.06724000","ZENBNB":"1.76300000","SKYBTC":"0.00066600","SKYETH":"0.01612000","SKYBNB":"0.42000000","EOSUSDT":"6.20510000","EOSBNB":"0.55220000","CVCBTC":"0.00001934","CVCETH":"0.00046587","CVCBNB":"0.01216000","THETABTC":"0.00001492","THETAETH":"0.00035719","THETABNB":"0.00943000","XRPBNB":"0.03085000","TUSDUSDT":"1.00100000","IOTAUSDT":"0.79170000","XLMUSDT":"0.23181000","IOTXBTC":"0.00000216","IOTXETH":"0.00005221","QKCBTC":"0.00000562","QKCETH":"0.00013538","AGIBTC":"0.00000715","AGIETH":"0.00017260","AGIBNB":"0.00454000","NXSBTC":"0.00012290","NXSETH":"0.00297100","NXSBNB":"0.08070000","ENJBNB":"0.00386700","DATABTC":"0.00000564","DATAETH":"0.00013640","ONTUSDT":"2.72600000","TRXUSDT":"0.02670000","ETCUSDT":"13.13030000","ETCBNB":"1.16900000","ICXUSDT":"0.90870000","SCBTC":"0.00000091","SCETH":"0.00002189","SCBNB":"0.00057000","NPXSBTC":"0.00000028","NPXSETH":"0.00000677","KEYBTC":"0.00000106","KEYETH":"0.00002562","NASBTC":"0.00028230","NASETH":"0.00681100","NASBNB":"0.17885000","MFTBTC":"0.00000117","MFTETH":"0.00002814","MFTBNB":"0.00074000","DENTBTC":"0.00000043","DENTETH":"0.00001018","ARDRBTC":"0.00001670","ARDRETH":"0.00039800","ARDRBNB":"0.01038000","NULSUSDT":"1.64500000","HOTBTC":"0.00000012","HOTETH":"0.00000288","VETBTC":"0.00000251","VETETH":"0.00006076","VETUSDT":"0.01783000","VETBNB":"0.00159000","DOCKBTC":"0.00000257","DOCKETH":"0.00006148","POLYBTC":"0.00003207","POLYBNB":"0.02059000","PHXBTC":"0.00000264","PHXETH":"0.00006355","PHXBNB":"0.00166600","HCBTC":"0.00044210","HCETH":"0.01060800"} ``` ## Working with the Node-RED Dashboard ### Adding Text Outputs We now create a flow that triggers a __getBookTicker Node__ to get both the Bid and Ask Price for Bitcoin: #### Flow Export ```json [{"id":"96779e0a.9337b","type":"inject","z":"d20a8b82.28b458","name":"","topic":"BTCUSDT","payload":"","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":93,"y":308,"wires":[["9c77f53a.6ce3a8"]]},{"id":"48fa2713.832348","type":"ui_text","z":"d20a8b82.28b458","group":"cdbbe0de.60a4a","order":0,"width":0,"height":0,"name":"Bid Price","label":"Bid Price","format":"{{msg.payload.bidPrice}}","layout":"row-spread","x":401,"y":272,"wires":[]},{"id":"7a50b2bc.54ba6c","type":"ui_text","z":"d20a8b82.28b458","group":"cdbbe0de.60a4a","order":0,"width":0,"height":0,"name":"Ask Price","label":"Ask Price","format":"{{msg.payload.askPrice}}","layout":"row-spread","x":401,"y":343,"wires":[]},{"id":"9c77f53a.6ce3a8","type":"binance-get-book-ticker","z":"d20a8b82.28b458","name":"","ticker":"","x":251,"y":308,"wires":[["48fa2713.832348","7a50b2bc.54ba6c"]]},{"id":"cdbbe0de.60a4a","type":"ui_group","z":"","name":"Bitcoin (USDT)","tab":"4b188e36.81675","order":1,"disp":true,"width":"6","collapse":false},{"id":"4b188e36.81675","type":"ui_tab","z":"","name":"Cryptocurrency","icon":"fa-bitcoin"}] ``` The result of the _Ticker Node_ is a JSON Object: ```json {"symbol":"BTCUSDT","bidPrice":"7076.23000000","bidQty":"0.12495100","askPrice":"7079.97000000","askQty":"0.70947800"} ``` We can feed this into two __Text Nodes__ to display both the `{{msg.payload.bidPrice}}` and `{{msg.payload.askPrice}}` and embedd them into our [Node-RED Dashboard](https://github.com/mpolinowski/nodered-dashboard-getting-started). ### Adding a Graph Lets add a graph UI node and take the output from the __getBookTicker Node__ and prepared it so that the graph node can display both the ask price and the bid price on the same graph. We add two __Change Nodes__ and name them _isolateBidPrice_ and _isolateAskPrice_. Change nodes allow you to take input messages and change the output based on a set of rules. To be able to use the chart properly, we need to deliver a msg.payload with a chartable value, and a msg.topic set to the name of the chartable value. #### Flow Export ```json [{"id":"96779e0a.9337b","type":"inject","z":"d20a8b82.28b458","name":"","topic":"BTCUSDT","payload":"","payloadType":"str","repeat":"10","crontab":"","once":true,"onceDelay":0.1,"x":93,"y":352,"wires":[["9c77f53a.6ce3a8"]]},{"id":"48fa2713.832348","type":"ui_text","z":"d20a8b82.28b458","group":"cdbbe0de.60a4a","order":0,"width":0,"height":0,"name":"Bid Price","label":"Bid Price","format":"{{msg.payload.bidPrice}}","layout":"row-spread","x":453,"y":305,"wires":[]},{"id":"7a50b2bc.54ba6c","type":"ui_text","z":"d20a8b82.28b458","group":"cdbbe0de.60a4a","order":0,"width":0,"height":0,"name":"Ask Price","label":"Ask Price","format":"{{msg.payload.askPrice}}","layout":"row-spread","x":447,"y":402,"wires":[]},{"id":"9c77f53a.6ce3a8","type":"binance-get-book-ticker","z":"d20a8b82.28b458","name":"","ticker":"","x":251,"y":352,"wires":[["48fa2713.832348","7a50b2bc.54ba6c","7358a49.046bc5c","6813d17b.ecd96","a1197cd4.52fd"]]},{"id":"7358a49.046bc5c","type":"debug","z":"d20a8b82.28b458","name":"","active":false,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":533,"y":351,"wires":[]},{"id":"6813d17b.ecd96","type":"change","z":"d20a8b82.28b458","name":"isolateBidPrice","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.bidPrice","tot":"msg"},{"t":"set","p":"topic","pt":"msg","to":"bidPrice","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":406,"y":451,"wires":[["ecb25661.95ad88","347dfaf5.e16aa6"]]},{"id":"a1197cd4.52fd","type":"change","z":"d20a8b82.28b458","name":"isolateAskPrice","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.askPrice","tot":"msg"},{"t":"set","p":"topic","pt":"msg","to":"askPrice","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":407,"y":501,"wires":[["ecb25661.95ad88","347dfaf5.e16aa6"]]},{"id":"ecb25661.95ad88","type":"ui_chart","z":"d20a8b82.28b458","name":"Bitcoin Bid vs. Ask Price","group":"cdbbe0de.60a4a","order":0,"width":0,"height":0,"label":"Bitcoin","chartType":"line","legend":"false","xformat":"HH:mm:ss","interpolate":"step","nodata":"Waiting for Data","dot":false,"ymin":"","ymax":"","removeOlder":1,"removeOlderPoints":"","removeOlderUnit":"3600","cutout":0,"useOneColor":false,"colors":["#1e7ada","#aec7e8","#ff7f0e","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5"],"useOldStyle":false,"x":645,"y":482,"wires":[[],[]]},{"id":"347dfaf5.e16aa6","type":"debug","z":"d20a8b82.28b458","name":"","active":false,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":628,"y":433,"wires":[]},{"id":"a0b53f1.d5cf8c","type":"inject","z":"d20a8b82.28b458","name":"","topic":"ETHUSDT","payload":"","payloadType":"str","repeat":"10","crontab":"","once":true,"onceDelay":0.1,"x":91,"y":670,"wires":[["893d2e8e.27716"]]},{"id":"893d2e8e.27716","type":"binance-get-book-ticker","z":"d20a8b82.28b458","name":"","ticker":"","x":249,"y":670,"wires":[["2983c037.4f66f","2b7997e3.09eb48","42cdd723.9382e8","5416f903.cfad08"]]},{"id":"2b7997e3.09eb48","type":"change","z":"d20a8b82.28b458","name":"isolateAskPrice","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.askPrice","tot":"msg"},{"t":"set","p":"topic","pt":"msg","to":"askPrice","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":434,"y":695,"wires":[["126ae99d.4686e6"]]},{"id":"2983c037.4f66f","type":"change","z":"d20a8b82.28b458","name":"isolateBidPrice","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.bidPrice","tot":"msg"},{"t":"set","p":"topic","pt":"msg","to":"bidPrice","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":433,"y":645,"wires":[["126ae99d.4686e6"]]},{"id":"126ae99d.4686e6","type":"ui_chart","z":"d20a8b82.28b458","name":"Bitcoin Bid vs. Ask Price","group":"50447509.e5dd5c","order":3,"width":0,"height":0,"label":"Etherum","chartType":"line","legend":"false","xformat":"HH:mm:ss","interpolate":"step","nodata":"Waiting for Data","dot":false,"ymin":"","ymax":"","removeOlder":1,"removeOlderPoints":"","removeOlderUnit":"3600","cutout":0,"useOneColor":false,"colors":["#1e7ada","#99befd","#ff7f0e","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5"],"useOldStyle":false,"x":654,"y":665,"wires":[[],[]]},{"id":"5416f903.cfad08","type":"ui_text","z":"d20a8b82.28b458","group":"50447509.e5dd5c","order":1,"width":0,"height":0,"name":"Ask Price","label":"Ask Price","format":"{{msg.payload.askPrice}}","layout":"row-spread","x":422,"y":742,"wires":[]},{"id":"42cdd723.9382e8","type":"ui_text","z":"d20a8b82.28b458","group":"50447509.e5dd5c","order":2,"width":0,"height":0,"name":"Bid Price","label":"Bid Price","format":"{{msg.payload.bidPrice}}","layout":"row-spread","x":424,"y":600,"wires":[]},{"id":"cdbbe0de.60a4a","type":"ui_group","z":"","name":"Bitcoin (USDT)","tab":"4b188e36.81675","order":1,"disp":true,"width":"6","collapse":false},{"id":"50447509.e5dd5c","type":"ui_group","z":"","name":"Etherum (USDT)","tab":"4b188e36.81675","disp":true,"width":"6","collapse":false},{"id":"4b188e36.81675","type":"ui_tab","z":"","name":"Cryptocurrency","icon":"fa-bitcoin"}] ``` Now that we have two messages arriving at the chart node, it will use the msg.topic field to distinguish between the two and will apply a series colour to the data series that arrive, in our case dark blue to series 1 (bidPrice)and light blue to series 2 (askPrice). The last thing we do is change the inject node to repeat every 10 seconds so we don’t have to keep clicking on it. Deploy and take a look at the dashboard which should look like the screenshot on top of this article. <file_sep>--- date: "2019-03-31" title: "Introduction to TensorFlow 2 Beta" categories: - Machine Learning - Python --- ![Shanghai, China](./photo-456tdsfggd_67gfh6dgdf4_d.jpg) ## Installation of Tensorflow You can install TensorFlow directly through `pip3 install tensorflow` or with GPU support `pip3 install tensorflow-gpu` (make sure you have [Python v3](https://www.python.org/downloads/) installed). I have [Anaconda](https://docs.anaconda.com/anaconda/install/windows/) set up on my computer and am going to use it for this instead. If you already have Anaconda installed, make sure that it is up to date (make sure that you start the [Anaconda Prompt](https://docs.anaconda.com/anaconda/user-guide/getting-started/#write-a-python-program-using-anaconda-prompt-or-terminal) with Admin privileges): ```bash conda update conda conda update anaconda ``` Anaconda allows us to create a virtual environment in which we can run our TensorFlow program. To create the environment with the name `py3-TF2` run the following command in the Anaconda Prompt - hit `y` when prompted: ```bash conda create --name py3-TF2 python=3 conda activate py3-TF2 ``` ![Introduction to Tensorflow 2 Beta](./tensorflow_01.png) We can now continue installing TensorFlow inside this virtual environment. At the time of writing TensorFlow 2 is still in Beta and the exact version needs to be specified to prevent pip from using version 1 - please check the [latest version](https://www.tensorflow.org/install/) before running the command below: ```bash pip install tensorflow==2.0.0-beta1 ``` <!-- pip install matplotlib --> To be able to use the virtual environment in [Jupyter Notebook](https://docs.anaconda.com/anaconda/user-guide/getting-started/#run-python-in-a-jupyter-notebook) we need to install `ipykernel` and `nb_conda_kernels` inside it: ```bash pip install ipykernel conda install nb_conda_kernels ``` Open a new Python 3 project file inside Jupyter Notebook and verify that Tensorflow is up-and-running: ```bash import tensorflow as tf tf.__version__ ``` ![Introduction to Tensorflow 2 Beta](./tensorflow_02.png) Your research environment is now ready for use! ## Convolutional Neural Networks ([CNNS](https://www.tensorflow.org/beta/tutorials/images/intro_to_cnns)) * Classify an Image (e.g. a plane) * Classify and Segment (getting the outline of an object - e.g. a plane - inside an image for further analysis) * Recognize an Image (e.g. a Sukhoi SU-25T) [Underfitting](https://missinglink.ai/guides/neural-network-concepts/neural-network-bias-bias-neuron-overfitting-underfitting/): Our model has been trained on a data set that is too small. It can not be used to generalize or recognize. If our data set is too small we can use [Transfer Learning](http://cs231n.github.io/transfer-learning/) to adapt a external model by retraining it's outer layer with our training data. [Overfitting](https://missinglink.ai/guides/neural-network-concepts/neural-network-bias-bias-neuron-overfitting-underfitting/): The model was trained with a very specific data set and reaches a high percentage in recognizing fits. But it fails to generalize and will only be useful with your specific sample data. ## Picking a model There are three different types of Machine Learning: * __Supervised Learning__: We train our model with a set of labeled data before letting it have a go in the wild. * __Unsupervised Learning__: The model trains itself with unlabeled data and splits the set into a given number of groups based on similarity. The groups can afterwards be labeled by us based on the goal we are persuing. * __Reinforced Learning__: We let the model train unsupervised but add a reward system to make sure that the training goes into the direction we need it to go. In the following we will concentrate on __Supervised Learning__. ### Linear Model The simplest model is always the Linear Model where we just have to feed the learning process a few x- and y-values and have it interpolate data points in between: ```bash f(x) = xw + b ``` With `x` being the input, `w` the __Weight__ and `b` the __Bias__ for our model. Training the model would mean finding values for the weight and bias that the value of __y__ - with a given set of values for __x__ - comes as close to observed values as possible. An example would be a model that calculates the rent for an apartment based on the size of the apartment: ```bash rent = size * weight + bias ``` When we know more then one observation that would affect the prize of the apartment, we can simply add them up: ```bash Rent = (Size*Weight) + (Proximity to Subway*Weight) - (Proximity to City Center*Weight) + Bias ``` To assess the quality of our linear model we can use the squard-loss (__L2-norm__) that weights the distance of each datapoint from the position it should have according to our model. The smaller the sum over all those distances the more accurate represents our model the give dataset. ### Example: Simple Linear Regression #### Creating the Model using Numpy As an example we will create fake data with a linear relationship using `numpy` to help us to create and train a linear model. We will then use `matplotlib` and `mplot3d` to visualize the results we are getting: ```py observations = 1000 xs=np.random.uniform(low=-10, high=10, size=(observations,1)) zs=np.random.uniform(-10, 10,(observations,1)) inputs = np.column_stack((xs,zs)) ``` Note that the `size` is defined by the number of observations times the number of variables for our linear function - here we only use one variable `x` or `z`. The resulting matrix `inputs` consits of 2 columns each holding 1000 random values. We now need to create a target for our algorithm - a function that our model should find given the random but linear dataset, e.g.: ```bash f(x,y) = 3x - 4z + 7 + noise ``` The weights 3 and 4 as well as the bias 7 are randomly chosen and the noise is again generated using Numpy: ```py noise=np.random.uniform(-1,1,(observations,1)) targets = 3*xs - 4*zs + 7 noise ``` We can now plot this data and will receive a plane inside a threedimensional space: ![Introduction to Tensorflow 2 Beta](./tensorflow_03.png) We can now set an initial range for the algorithm to pick weights and biases from at random to find a good fit: ```py init_range=0.1 weights=np.random.uniform(-init_range,init_range,size=(2,1)) biases=np.random.uniform(-init_range,init_range,size=1) ``` In this example our initial weights and biases will be picked randomly from the interval [-0.1,0.1]. Last preparation needed is setting a learning rate - the smaller the number, the smaller the increments that are used by the learning algorithm. This will lead to a more accurate value but will slow the algorithm down. In the example we will set the learning rate to a value of: ```py learning_rate = 0.02 ``` #### Training the Model We now can use a for loop to iterate through our data (in TensorFLow 1 iteration is called an __Epoch__, see further down), calculate outputs and compare them to targets using the loss function. Every interation should refine the weights and biases of our model and minimize the result of the loss function for the next run. ```py for i in range (100): outputs=np.dot(inputs,weights)+biases deltas=outputs-targets loss=np.sum(deltas**2)/2/observations print(loss) deltas_scaled=deltas/observations weights = weights-learning_rate*np.dot(inputs.T,deltas_scaled) biases = biases-learning_rate*np.sum(deltas_scaled) ``` This function runs 100-times, optimizing the value for our weights and biases with each run. Printing the result of the loss function shows us a smaller value with each run: ![Introduction to Tensorflow 2 Beta](./tensorflow_04.png) In the initialization step we can see that the algorithm starts out with the following weights and biases: ```py print(weights) print(biases) [[-0.06156192] [-0.02018205]] [0.05114518] ``` After running training the algorithm with 100 iterations those values change to: ```py print(weights,biases) [[ 3.00537904] [-4.00032605]] [6.06949437] ``` ![Introduction to Tensorflow 2 Beta](./tensorflow_05.png) This is already very close to our target of 3&4 and 7 - but not close enough yet. We can increase the number of iterations in the learning step to improve the outcome. Or simply rerun the training step to add another 100 iterations: ```py print(weights,biases) [[ 3.00076145] [-4.00183613]] [6.86818993] ``` ![Introduction to Tensorflow 2 Beta](./tensorflow_06.png) Plotting the Outputs of our Model against the Target value shows us a linear function at an angle close to 45 degrees. Our model, almost perfectly, represents the training data: ![Introduction to Tensorflow 2 Beta](./tensorflow_07.png) ## Introduction to TensorFlow We can start by creating the same dataset as before only this time we will store the generated test dataset in a file format that supports storage of tensor data (n-dimensional arrays) (`.npz`) with the help of Numpy: ```py import numpy as np import matplotlib.pyplot as plt import tensorflow as tf observations = 1000 xs = np.random.uniform(low=-10, high=10, size=(observations,1)) zs = np.random.uniform(-10, 10, (observations,1)) generated_inputs = np.column_stack((xs,zs)) noise = np.random.uniform(-1, 1, (observations,1)) generated_targets = 3*xs - 4*zs + 7 + noise np.savez('tf-model-data-example', inputs=generated_inputs, targets=generated_targets) ``` Running this code will create a file `tf-model-data-example.npz` that holds our training data. In the next step we can load this data set with Numpy and use a Keras function to calculate the output (y) of our function (`output=np.dot(input,weights)+bias`): ```py training_data=np.load('tf_model_data_example.npz') input_size=2 output_size=1 model=tf.keras.Sequential([ tf.keras.layers.Dense(output_size) ]) model.compile(optimizer='sgd',loss='mean_squared_error' ) model.fit(training_data['inputs'],training_data['targets'],epochs=100,verbose=2) ``` You can display the Weights and Bias using the following code: ```py weights=model.layers[0].get_weights()[0] bias=model.layers[0].get_weights()[1] weights bias ``` Those values should now be close to our target as defined above `targets = 3*xs - 4*zs + 7 + noise` - Weights `3` & `-4` and `7` as the Bias. ### Making Predictions We can now use our model to make predictions `model.predict_on_batch(data)` for output values: ```py model.predict_on_batch(training_data['inputs']) ``` This will show you the outputs that have been calculated for the training data and previously compared to the target values to calculate the loss function. Plotting those values against the targets will again result in a line graph close to a 45 degrees angle: ```py plt.plot(np.squeeze(model.predict_on_batch(training_data['inputs'])),np.squeeze(training_data['targets'])) plt.xlabel('outputs') plt.ylabel('targets') plt.show() ``` ### Customizing your Model In the Numpy model earlier we set an initial range (`init_range=0.1`) to define the step size between epochs. But with TensorFlow we left everything at it's default, letting Keras choose for us - let's take control. #### Adding Initializers Here is how you add kernel and bias initializers to `tf.keras.layers.Dense(output_size, kernel_initializer, bias_initializer)`: ```py model=tf.keras.Sequential([ tf.keras.layers.Dense( output_size, kernel_initializer=tf.random_uniform_initializer(minval=-0.1, maxval=0.1), bias_initializer=tf.random_uniform_initializer(minval=-0.1, maxval=0.1) ) ]) ``` #### Setting the Learning rate We can also set the learning rate for our model, which in our Numpy model we defined with `learning_rate = 0.02`. In TensorFlow this is done by customizing the optimizer - we choose the `SGD` optimizer (Stochastic Gradient Descent optimizer) that supports a couple of modifiers, including the __Learning Rate__, __Momentum__, __Decay__, etc. ```py custom_optimizer=tf.keras.optimizers.SGD(learning_rate=0.02) model.compile(optimizer=custom_optimizer,loss='mean_squared_error' ) ``` ![Introduction to Tensorflow 2 Beta](./tensorflow_08.png)<file_sep>--- date: "2019-01-19" title: "Kubernetes Cluster Monitoring & Logging" categories: - LINUX - Docker - Kubernetes --- ![<NAME>, Cambodia](./photo-11627898645_5f0761ff9e_o.png) <!-- TOC --> - [Prometheus and Grafana](#prometheus-and-grafana) - [Helm Installation](#helm-installation) - [Prometheus Installation](#prometheus-installation) - [The ELK Stack](#the-elk-stack) - [Installation](#installation) <!-- /TOC --> ## Prometheus and Grafana [Prometheus](https://prometheus.io) is an open-source monitoring solution that can be used with the visualization frontend [Grafana](https://grafana.com) to display your clusters health status. To install them we are going to use the [Helm Package Manager](https://helm.sh) for Kubernetes. ### Helm Installation Helm helps you manage Kubernetes applications — Helm Charts helps you define, install, and upgrade even the most complex Kubernetes application. Charts are easy to create, version, share, and publish — so start using Helm and stop the copy-and-paste. To install Helm, head over to their [Github Page](https://github.com/helm/helm/) and grab the link to the install binaries - in my case the latest version is [Helm v2.12.3](https://github.com/helm/helm/releases/tag/v2.12.3). Use this link to download the binaries to your Centos server: ```bash wget https://storage.googleapis.com/kubernetes-helm/helm-v2.12.3-linux-amd64.tar.gz tar zxvf helm-v2.12.3-linux-amd64.tar.gz ``` The unzipped folder contains a file called __helm__ that needs to be copied to `cp helm /usr/local/bin/`. You can type `helm` to verify that it is working: --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_04.png) --- We can now type `helm init` to add the __Tiller Pod__ to our cluster. You can verify that Helm is connected by running `helm version` (it should show your Helm client as well as Kubernetes version) and `kubectl get pods -n kube-system`: --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_05.png) --- Before we can install package, first run a `helm repo update` to make sure that you are up to date. And to allow Helm/Tiller to install packages to the default namespace in Kubernetes, we first have to run these three commands (__optional__ - the installation below will create a custom namespace _monitoring_ that is not going to cause any issues): ```bash kubectl create serviceaccount --namespace kube-system tiller kubectl create clusterrolebinding tiller-cluster-rule --clusterrole=cluster-admin --serviceaccount=kube-system:tiller kubectl patch deploy --namespace kube-system tiller-deploy -p '{"spec":{"template":{"spec":{"serviceAccount":"tiller"}}}}' ``` ### Prometheus Installation To install Prometheus we are going to use a repository by CoreOS called [Prometheus Operator](https://github.com/helm/charts/tree/master/stable/prometheus-operator) that provides easy monitoring definitions for Kubernetes services and deployment and management of Prometheus instances. <!-- __Deprecation Warning__ Check [stable/prometheus-operator](https://github.com/helm/charts/tree/master/stable/prometheus-operator)! To install it with Helm we need [those 3 commands](https://github.com/coreos/prometheus-operator/tree/master/helm) ```bash helm repo add coreos https://s3-eu-west-1.amazonaws.com/coreos-charts/stable/ helm install coreos/prometheus-operator --name prometheus-operator --namespace monitoring helm install coreos/kube-prometheus --name kube-prometheus --namespace monitoring ``` > If you need to uninstall those packages later, you need to use the __purge__ flag: `helm delete --purge prometheus-operator` and `helm delete --purge kube-prometheus` --> To install the chart with the release name `test-release`: ```bash helm install --name test-release stable/prometheus-operator --namespace monitoring ``` helm install coreos/grafana --name my-release --set adminUser=bob The command deploys prometheus-operator on the Kubernetes cluster in the default configuration. The [configuration section](https://github.com/helm/charts/tree/master/stable/prometheus-operator#configuration) lists the parameters that can be configured during installation. The default installation includes Prometheus Operator, Alertmanager, Grafana, and configuration for scraping Kubernetes infrastructure. To uninstall/delete the prometheus-operator deployment: ```bash helm delete --purge test-release kubectl delete crd prometheuses.monitoring.coreos.com kubectl delete crd prometheusrules.monitoring.coreos.com kubectl delete crd servicemonitors.monitoring.coreos.com kubectl delete crd alertmanagers.monitoring.coreos.com ``` You can verify the successful installation with `kubectl get all -n monitoring`: --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_06.png) --- We can also verify that the Prometheus web interface is working - but as you can see above, the service `kube-prometheus` only has an internal port set (__ClusterIP__). We can edit this default configuration with: ```bash kubectl edit svc kube-prometheus -n monitoring ``` This, by default will open the service configuration in Vim - you can install __Nano__ and run `export EDITOR=nano` to prevent that from happening to you. --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_07.png) --- We added a `nodePort: 30088` and set the type from __ClusterIP__ to __NodePort__ to expose the port for us. Saving the file will automatically verify your edit and re-apply the service configuration, if no error was found. You can access the web interface now by typing in your Master Server IP address followed by the port __30088__: --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_08.png) --- But we are going to use Grafana as it offers a lot more as a frontend for Prometheus - I am going to revert the changes I made to service file to close the port __30088__ again and let's use it for the Grafana service instead: ```bash kubectl edit svc kube-prometheus-grafana -n monitoring ``` And you should now be greeted by the beautiful Grafana user interface: --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_09.png) --- ## The ELK Stack We build a Kubernetes cluster with a couple of micro services in the previous step and saw that we can get access to Kubernetes logs for each pod by first finding out it's name and then using the `logs` command: ```bash kubectl get pods kubectl logs webapp-774b78689-l9ftr ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_01.png) --- But it is really cumbersome to do when you have a large number of pods running. Also some information might be logged inside the container itself and are ephemeral in nature. The most common solution for a distributed logging system is the [ElasticStack](https://www.elastic.co) - also called the ELK stack as it is traditionally build on three blocks: * [Elasticsearch](https://hub.docker.com/_/elasticsearch) * [Logstash](https://hub.docker.com/_/logstash) * [Kibana](https://hub.docker.com/_/kibana) We are going to switch Logstash with [Fluentd](https://www.fluentd.org). __Fluentd__ is an open source data collector, which lets you unify the data collection and consumption for a better use and understanding of data. We are going to use it to connect to our pods, find active logs and collect them and offers a JSON structured downstream that we are going to feed into the Elasticsearch database. FluentD will have to be __installed on every node__ inside our cluster - this is done by configuring it as a [DaemonSet](https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/) instead of a _ReplicaSet_. __Elasticsearch__ is a distributed, RESTful search and analytics engine and will be used to store the data collected by FluentD in a NoSQL fashion. The distributed nature of Elasticsearch allows us to easily combine one or more instances of the database into a cluster. It is recommended to have __at least 2 instances__ running to ensure fail-over security. The Elasticsearch pod will also be created in a special kind of _ReplicaSet_ called a [StatefulSet](https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/). This ensures that the pods will not be assigned random names, but will always be called by their assigned pod name (__elasticsearch-logging__) followed by an incremental number `-0`, `-1`, etc - this is needed for pods that need to be clustered. And last we have the __Kibana__ frontend that lets you visualize the Elasticsearch data. So that hopefully we can extract some useful information from it - without having to sift through Gigabytes of hard to read Kubernetes logs. The frontend will only be used when we log in to check our logs. It is therefore sufficient to __only run 1 instance__ as it is not a critical infrastructure. ### Installation We are going to use a ready-2-go image from Kubernetes to [install FluentD, Kibana and Elasticsearch](https://github.com/kubernetes/kubernetes/tree/master/cluster/addons/fluentd-elasticsearch). This repository contains both the YAML config files for Kubernetes as well as the information for build the required images for the three components. We only need to download the YAML files and download them to the directory where you stored the cluster configuration. When you downloaded all 6 files apply all of them to your cluster: --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_02.png) --- ```bash kubectl apply -f . kubectl get all -n kube-system ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_logging_03.png) --- __TBC__ https://github.com/helm/charts/tree/master/stable/elastic-stack https://github.com/kiwigrid/helm-charts/tree/master/charts/fluentd-elasticsearch <file_sep>--- date: "2019-08-06" title: "MQTT Networks with Node-RED" categories: - MQTT - Node-RED - Smarthome - IoT --- import GifContainer from "../../src/components/ImageContainer"; ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Node-RED Configuration](#node-red-configuration) - [Node-RED JSON Flows](#node-red-json-flows) - [Node-RED User Interface](#node-red-user-interface) <!-- /TOC --> ## Node-RED Configuration We prepared Node-RED flows for you that give you access to all camera functions through the MQTT Broker. All you need to do, is to click on the flow you want to use ([see below](#node-red-json-flows)), copy it and paste it into the Node-RED import dialogue: --- ![Node-RED with your INSTAR IP Camera](./Node-RED_MQTT_01.png) --- ### Node-RED JSON Flows * [Alarm Menu](https://wiki.instar.com/Node-RED_Flows/MQTT-alarm-menu.json) * [Features Menu](https://wiki.instar.com/Node-RED_Flows/MQTT-features-menu.json) * [Multimedia Menu](https://wiki.instar.com/Node-RED_Flows/MQTT-multimedia-menu.json) * [Network Menu](https://wiki.instar.com/Node-RED_Flows/MQTT-network-menu.json) * [System Menu](https://wiki.instar.com/Node-RED_Flows/MQTT-system-menu.json) * [Recording Task Menu](https://wiki.instar.com/Node-RED_Flows/MQTT-task-menu.json) __Note__: It makes sense to delete every sequence inside those flows that you are not going to use. Now double-click the first MQTT Node (the first on the left) and assign your INSTAR MQTT Broker to Node-RED: --- ![Node-RED with your INSTAR IP Camera](./Node-RED_MQTT_02.png) --- Type in your camera's IP address as MQTT Server. Choose the MQTT Server Port that you have set in the MQTT Menu inside your camera's WebUI - the default value is `1883`. --- ![Node-RED with your INSTAR IP Camera](./Node-RED_MQTT_03.png) --- If you want to use the TLS encryption, choose the SSL Port instead - default is `8883`. Now you have to open your camera's MQTT Menu and download the Certificate (Client Key). And upload the certificate to Node-RED and save it: --- ![Node-RED with your INSTAR IP Camera](./Node-RED_MQTT_04.png) --- Now add the user login that you have set up in the MQTT Menu: --- ![Node-RED with your INSTAR IP Camera](./Node-RED_MQTT_05.png) --- Click on add to add the INSTAR MQTT Broker to Node-RED and make sure that every MQTT Node of the flow is using the broker. After clicking on __Deploy__ all MQTT Nodes should show that they successfully connected: --- ![Node-RED with your INSTAR IP Camera](./Node-RED_MQTT_06.png) --- ## Node-RED User Interface You are currently using the Node-RED admin panel under an URL looking something like this: `http://<IP Address>:1880/#flow/b7397920.044be8`. To switch to the Node-RED dashboard simply add a __ui__ to the end of it like `http://<IP Address>:1880/ui/`: --- ![Node-RED with your INSTAR IP Camera](./Node-RED_MQTT_07.png) --- Every flow that you have imported above will be on it's own board. Clicking on toggles and swiping sliders allows you to change the settings on your camera. __Note__ that the changes happen instantly, but some settings require a restart of your camera to become active - e.g. when changing a camera port, WDR settings, etc. As the WebUI is using the HTTP interface changes will not be reflected there unless you reload the menu. <GifContainer gifUrl="/assets/gif/Node-RED_MQTT_Camera_05.gif" alt="Node-RED with your INSTAR IP Camera" /><file_sep>--- date: "2019-09-24" title: "Installing MotionEye on CentOS8 with Podman" categories: - LINUX - Smarthome - IoT - Docker --- ![Shanghai, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installation through Podman on CentOS8](#installation-through-podman-on-centos8) - [Access MotionEye and Create a Camera](#access-motioneye-and-create-a-camera) - [Video Recording Persistence](#video-recording-persistence) <!-- /TOC --> ## Installation of MotionEye through Podman on CentOS8 The automatically built images are available on [Docker Hub](https://hub.docker.com/r/ccrisan/motioneye/). Download the image with one of the following command: ```bash podman pull ccrisan/motioneye:master-amd64 ``` Or on an non-internet device - run the docker or podman command on your PC with an internet connection, e.g. on a Windows PC: ```bash docker pull ccrisan/motioneye:master-amd64 docker save -o motioneye.docker ccrisan/motioneye:master-amd64 ``` And copy the `motioneye.docker` file to your CentOS system and run: ```bash podman load -i motioneye.docker podman run ccrisan/motioneye:master-amd64 ``` To run the container use the following command (__see correction below!__): ```bash podman run --name="motioneye" \ -p 8765:8765 \ --hostname="motioneye" \ -v /etc/localtime:/etc/localtime:ro \ -v /opt/motioneye:/etc/motioneye \ --restart="always" \ --detach=true \ ccrisan/motioneye:master-amd64 ``` But I ran into some security-related issues with SELinux and permissions on `/opt/motioneye/` and `/opt/motioneye/lib`. Make sure to run `chmod` with the necessary rights (`chmod -R 777 /opt/motioneye` to open it up completely - some might prefer `755` instead) and `chcon -Rt svirt_sandbox_file_t /opt/motioneye`. I also added `--security-opt label=disable \` tag and the re-stream port `8081` for my camera. Additionally I added a [motioneye.conf template](https://raw.githubusercontent.com/ccrisan/motioneye/78a89fbc2e2d2c284408f8eb1037c8598b4e715a/extra/motioneye.conf.sample) to the `/opt/motioneye` directory on my host system. ```bash podman run --name="motioneye" \ -p 8765:8765 \ --hostname="motioneye" \ -v /etc/localtime:/etc/localtime:ro \ -v /opt/motioneye:/etc/motioneye \ --security-opt label=disable \ --restart="always" \ --detach=true \ ccrisan/motioneye:master-amd64 ``` > Currently the video recordings from MotionEye are not persisted. To find out where those are stored we first have to run the container, check the location and revisit this run command ([see below](#video-recording-persistence)). Add additional port mappings with the `-p` parameter if you want to use the streaming feature of motion: `-p 8765:8765 -p 8081:8081`; for cameras added, numbering of ports used for streaming starts from `8081` (second camera will use port `8082`, etc). Ports used for streaming can be later changed in motionEye (Advanced Settings -> Video Streaming -> Streaming Port) but should always match the ones that are being exposed from Docker. If using additional services that make use of ports in the 808x range, then default mapping can be edited to avoid conflicting by mapping higher range port numbers that are not in use by other services (i.e., `-p 8765:8765 -p 58081:8081 -p 58082:8082`) Change the bind path `/etc/motioneye` according to your needs. The first contains the configuration files for motionEye. The bound file `/etc/localtime` is necessary for a proper timezone configuration inside the container using the timezone of the host. To forward a video device of your host use an additional parameter like the following ```bash --device=/dev/video0 ``` > Check the [Podman Commands](https://github.com/containers/libpod/blob/master/commands.md) for more details. ### Access MotionEye and Create a Camera Now that MotionEye is running we can access it on your servers IP address + the default port `8765` that we forwarded out of the container to all network interfaces, when ran the container. ![MotionEye CentOS8 Podman](./MotionEye_CentOS8_Podman_01.png) Click on the drop down menu marked with the red arrow to add a camera. The RTSP streaming URL for your camera is it's IP address + the stream that you want extract (with falling resolution) `/11`, `/12` or `/13` - e.g. ```bash rtsp://192.168.2.116/11 ``` The directory that will store our video recordings is given by the marked root directory: ```bash /var/lib/motioneye/{Camera Name} ``` Apply your settings and you should see your cameras live video. ### Video Recording Persistence At the moment all video recordings that we create will be stored inside the container and thus be deleted once we shut it down. To prevent this from happening we log us into the container, locate the media storage directory and and mount a volume to persists this data. ```bash podman ps -a CONTAINER ID IMAGE COMMAND STATUS PORTS NAMES b2352521ed1a localhost/ccrisan/motioneye:master-amd64 /bin/sh -c test -... Up 6 minutes ago 0.0.0.0:8765->8765/tcp motioneye ``` Now we know the container ID is `b2352521ed1a` (we could also use the container name - since we set it to `motioneye`). ```bash [root@CentOS8 ~]# podman exec -ti b2352521ed1a /bin/bash root@motioneye:/# find / -name motioneye /etc/motioneye /usr/local/lib/python2.7/dist-packages/motioneye /usr/local/share/motioneye /usr/share/motioneye /var/lib/motioneye root@motioneye:/# ll /var/lib/motioneye total 0 drwxrwxrwx. 3 root root 21 Jan 29 15:32 ./ drwxr-xr-x. 1 root root 23 Jan 29 00:56 ../ drwxr-xr-x. 2 root root 6 Jan 29 15:32 Camera1/ ``` We can see that the directory `/var/lib/motioneye` was created and holds a folder for our camera's recordings. We can now stop and delete the container and mount this directory as a volume to our host system to persist the data: ```bash podman stop motioneye podman rm motioneye podman run --name="motioneye" \ -p 8765:8765 \ -p 8081:8081 \ -p 8082:8082 \ --hostname="motioneye" \ -v /etc/localtime:/etc/localtime:ro \ -v /opt/motioneye:/etc/motioneye \ -v /opt/motioneye/lib:/var/lib/motioneye \ --security-opt label=disable \ --restart="always" \ --detach=true \ ccrisan/motioneye:master-amd64 ``` Camera re-stream ports collided with the default ports used by ioBroker => ```bash podman run --name="motioneye" \ -p 8765:8765 \ -p 7777:7777 \ -p 7778:7778 \ --hostname="motioneye" \ -v /etc/localtime:/etc/localtime:ro \ -v /opt/motioneye:/etc/motioneye \ -v /opt/motioneye/lib:/var/lib/motioneye \ --security-opt label=disable \ --restart="always" \ --detach=true \ ccrisan/motioneye:master-amd64 ```<file_sep>--- date: "2019-08-11" title: "MQTT Networks with Home Assistant" categories: - MQTT - Smarthome - IoT --- import GifContainer from "../../src/components/ImageContainer"; ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Home Assistant Installation on a Raspberry Pi](#home-assistant-installation-on-a-raspberry-pi) - [MQTT Binding](#mqtt-binding) - [Testing our MQTT Service](#testing-our-mqtt-service) - [Adding a UI Switch](#adding-a-ui-switch) - [Switch Component](#switch-component) - [Configuring the User Interface](#configuring-the-user-interface) - [Adding your Cameras Live Video](#adding-your-cameras-live-video) - [Adding a UI Push Button](#adding-a-ui-push-button) - [Automations](#automations) - [Testing our Service](#testing-our-service) - [Day/Night Switching Detection Areas](#daynight-switching-detection-areas) - [Using Home Assistant as the Alarmserver for your Camera](#using-home-assistant-as-the-alarmserver-for-your-camera) <!-- /TOC --> ## Home Assistant Installation on a Raspberry Pi Following the [installation instruction](https://www.home-assistant.io/docs/installation/raspberry-pi/) and the [SystemD Service Setup](https://www.home-assistant.io/docs/autostart/systemd/) it turned out to be a breeze to install HASS on our Raspberry Pi 3 with [Raspbian Buster](https://www.raspberrypi.org/downloads/raspbian/). ### MQTT Binding We noticed that now you are able to add an MQTT broker already during the initial setup. There is no longer the need for doing this inside the configuration YAML files: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_01.png) --- When you access the UI for the first time click on adding an extension and choose MQTT from the list. Now type in your INSTAR IP camera IP address (e.g. `192.168.2.165`) followed by the port `1883` and your INSTAR MQTT Broker login. --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_02.png) --- If you already installed Home Assistant, go to __Settings__ and __Integrations__ and select to add the _MQTT Broker_ there: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_03.png) ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_04.png) --- ### Testing our MQTT Service To test if everything is working, we need to go to __Developer Tools__ and __MQTT__. Here we can setup a subscription for the topic (__Listen to a topic__) `instar/000389888811/status/alarm/area1/enable` (please replace the __000389888811__ part with the MAC address of your camera). --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_05.png) --- In the screenshot you can see that we first received an update that the value for this topic was 0: `{"val":"0"}`. That means that the alarm detection area 1 was deactivated. Now by publishing the value `{"val":"1"}` on the topic `instar/000389888811/alarm/area1/enable` the area gets activated. You can verify this inside your cameras web user interface. You can also deactivate the area again inside the WebUI and see that the MQTT topic switches back to `{"val":"0"}`. ## Adding a UI Switch We now know that we are able to subscribe to MQTT status topics on our camera and to update these topics via the corresponding command topic. All we need to do now is to add a __Switch Component__ that does this for us and can be assigned to our Home Assistant dashboard. ### Switch Component To keep our main `configuration.yaml` uncluttered we will create a separate file `switches.yaml` that contains all our MQTT related switches and import this into the configuration by adding the following line: `switch: !include switches.yaml`. We can now add our first switch components - e.g. to activate and deactivate the motion detection areas: ```yaml - platform: mqtt name: 'Alarm Area 1' state_topic: 'instar/000389888811/status/alarm/area1/enable' command_topic: 'instar/000389888811/alarm/area1/enable' qos: 1 payload_on: '{"val":"1"}' payload_off: '{"val":"0"}' optimistic: false retain: false - platform: mqtt name: 'Alarm Area 2' state_topic: 'instar/000389888811/status/alarm/area2/enable' command_topic: 'instar/000389888811/alarm/area2/enable' qos: 1 payload_on: '{"val":"1"}' payload_off: '{"val":"0"}' optimistic: false retain: false - platform: mqtt name: 'Alarm Area 3' state_topic: 'instar/000389888811/status/alarm/area3/enable' command_topic: 'instar/000389888811/alarm/area3/enable' qos: 1 payload_on: '{"val":"1"}' payload_off: '{"val":"0"}' optimistic: false retain: false - platform: mqtt name: 'Alarm Area 4' state_topic: 'instar/000389888811/status/alarm/area4/enable' command_topic: 'instar/000389888811/alarm/area4/enable' qos: 1 payload_on: '{"val":"1"}' payload_off: '{"val":"0"}' optimistic: false retain: false ``` For each component - that we assign the switch template to inside our main configuration - we have to define the platform we want to use it on. Now this is the __mqtt__ platform that we added to Home Assistant. We have both a __state\_topic__ and a __command\_topic__. The first one takes the status topic and defines the state our switch is in. The command topic is the one that we use to update the state and trigger our camera to change it's corresponding internal state e.g. switch our alarm area on or off. Here you can use any of the MQTT Topics that belong to functions that are supported by your camera model. The message payload is in this case either `{"val":"1"}`, to switch the area on, or `{"val":"0"}` to deactivate the area. Those are the __payload\_on__ and __value\_payload__ for our switch. Note that you have to surround each one of those with _single-tick quotation marks_. Now we are also able to set some MQTT magic. The first one is `qos` and stands for __Quality of Service__ - where `0` stands for _fire-and-forget_ (your client might loose an status update if it does not receive the message), `1` means that it is verified that a status update is received and `2` is not relevant for us here (with a qos value of 2 it is made sure that every update is received but only received once - which is important e.g. when you are recording time series from measuring sensors). `optimistic` has to be used when your device does not have a separate `state_topic` - here you have to assume that the command you just sent worked and the state was updated. With `retain` you can hold on to the value of a topic even if the client or server cannot be reached. But we don't need this for our setup. ### Configuring the User Interface Now we need to add a new __View__ for our camera by switching the UI into the _configuration mode_: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_06.png) --- Now click on the __+__ as highlighted in the screenshot below and add a view for your camera: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_07.png) --- Switch to the new view and click on the big __+__ button to add a card that can contain our MQTT switches: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_08.png) --- Choose an __Entity Card__ and select all the switches we just added to our configuration. They should show up automatically - if not, try reloading Home Assistant: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_09.png) ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_10.png) --- Quit the configuration mode to get back to the regular UI and you should be able to see your new card with all 4 alarm area switches. Try switching your alarm areas from your cameras Web User Interface - once you apply your settings there all switched inside Home Assistant should react to the state change immediately. <GifContainer gifUrl="/assets/gif/Home_Assistant_12.gif" alt="Home Assistant with your INSTAR IP Camera" /> ## Adding your Cameras Live Video ## Adding a UI Push Button Some of our camera's functions don't have 2 states (on/off) - we just need a button that we can push and it should jump back to idle right after that. Examples are: * Manual Alarm Trigger: `alarm/pushalarm` * Go to Preset Position: `features/ptz/preset` * Move Up/Down/Left/Right: `features/ptz/move` etc. This can be done in Home Assistant with a script. Make sure that your `scripts.yaml` file exist inside the __.homeassistant__ folder and is linked into your main `configuration.yaml` - `script: !include scripts.yaml`. Then open the scripts file and add the following lines: ```yaml 9010_pushalarm: sequence: - service: mqtt.publish data_template: topic: instar/000389888811/alarm/pushalarm payload: '{"val":"1"}' qos: 1 9010_gotopos1: sequence: - service: mqtt.publish data_template: topic: instar/000389888811/features/ptz/preset payload: '{"val":"0"}' qos: 1 9010_moveright: sequence: - service: mqtt.publish data_template: topic: instar/000389888811/features/ptz/move payload: '{"val":"right"}' qos: 1 9010_movestop: sequence: - service: mqtt.publish data_template: topic: instar/000389888811/features/ptz/move payload: '{"val":"stop"}' qos: 1 ``` Choose a name for each script that helps you identify which camera you are addressing, e.g. `9010_pushalarm` and choose the MQTT topics that you want to add in form of buttons to your Home Assistant UI. Note that the __move command__ will move your camera's PTZ head continuously until you hit the __stop command__ - don't forget to add the stop button :) Just like with our [switches earlier](#configuring-the-user-interface) we now have to activate the configuration mode inside the Home Assistant Lovelace UI and click on the __+__ button to add a new __Entity__: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_13.png) ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_14.png) --- Now select the script you want to assign to this button from the __Entity List__ and name the card. Unfortunately, the UI does not yet give us the option to add a `name` and icon to the selected script. But if you click on the two wavy brackets - highlighted here (red arrow): --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_15.png) --- You can add them manually into the entity definition: ```yaml entities: - entity: script.9010_pushalarm icon: 'mdi:bell' name: IN-9010 FHD show_header_toggle: false title: Trigger Alarm type: entities ``` --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_16.png) --- You can choose your icon from the [Material Design Icons](https://cdn.materialdesignicons.com/4.5.95/). Note that the button we chose here - `icon: 'mdi:bell'` - is called `mdi-bell` in the Material Design documentation and this name has to be changed accordingly for Home Assistant to recognize the icon. --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_17.png) --- Here we added all Pan-Tilt, Preset Position and the Manual Alarm Trigger command to the Lovelace UI. ## Automations Now that we are able to manually trigger functions on our camera we now want to find a way to automate those processes through Home Assistant. To do this we will first add a prototype service to our `scripts.yaml` file. This service should be able to receive a __MQTT Topic__ (`target`) and __MQTT Payload__ (`message`) from an automation in the `automations.yaml` file. This can be done by adding the following lines into the script file: `scripts.yaml` ```yaml # MQTT Publish Service send_mqtt_command: sequence: - service: mqtt.publish data_template: topic: "{{ target }}" payload: "{{ message }}" qos: 1 ``` Now make sure that the `automations.yaml` file exists in the _.homeassistant_ directory and is linked into the main `configuration.yaml`: `automation: !include automations.yaml`. Then switch to the Lovelace UI, open the __Configuration Panel__ and select __Automation__: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_18.png) --- ### Testing our Service Click on the red __+__ button to add a new automation. Fill out the form as follows: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_19.png) ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_20.png) --- To test our automation we want to set up a MQTT topic `hass/automation/test` that, when it receives a payload of `{"val":"1"}` will call our prototype MQTT service we created above and passes down a __message__ and a __topic__ to it: ```json { "message": "{\"val\":\"1\"}", "target": "instar/000389888811/features/ptz/preset" } ``` Note that to be able to send a JSON formated payload inside this JSON expression, we have to escape the quotation marks with a backslash. By sending the payload `1` to `/features/ptz/preset` we have our camera moving to __preset position 2__. We can test this using __MQTT.fx__: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_21.png) --- Clicking on publish will trigger our automation, that then triggers our service telling our camera to move to preset 2. When you check the `automations.yaml` file inside the `.homeassistant` directory, you will see that Home Assistant created the automation for us as follows: ```yaml - id: '1571128164029' alias: TEST MQTT Automation trigger: - payload: '{"val":"1"}' platform: mqtt topic: hass/automation/test condition: [] action: - alias: '' data: message: '{"val":"1"}' target: instar/000389888811/features/ptz/preset service: script.send_mqtt_command ``` ### Day/Night Switching Detection Areas Now that we have proven that our concept is sound, we can continue and build our first meaningful automation. For this we can, again, use our prototype MQTT service and have an automation - triggered by a schedule or timed events like sunrise/sunset - pass it the necessary MQTT topics and payloads to switch our detection areas on or off: --- ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_22.png) ![Home Assistant with your INSTAR IP Camera](./Home_Assistant_23.png) --- This is the __Day Automation__ that is triggered by the __Sunrise__ and will update the MQTT topics to activate Alarm Detection Area 1 & 2, while deactivating Areas 3 & 4. Note that this automation combines 4 actions that all will be let loose on our MQTT Publishing Service every morning by sunrise. The __Night Automation__ is the opposite - triggered by __Sunset__ and deactivates Areas 1 & 2, while activating Areas 3 & 4. A quick look into the `automations.yaml` file shows us the two automations that were created through the Lovelace UI: ```yaml - id: '1571131738957' alias: Switch to Night Areas trigger: - event: sunset platform: sun condition: [] action: - data: message: '{"val":"0"}' target: instar/000389888811/alarm/area1/enable service: script.send_mqtt_command - data: message: '{"val":"0"}' target: instar/000389888811/alarm/area2/enable service: script.send_mqtt_command - data: message: '{"val":"1"}' target: instar/000389888811/alarm/area3/enable service: script.send_mqtt_command - data: message: '{"val":"1"}' target: instar/000389888811/alarm/area4/enable service: script.send_mqtt_command - id: '1571131880630' alias: Switch to Day Areas trigger: - event: sunrise platform: sun condition: [] action: - data: message: '{"val":"1"}' target: instar/000389888811/alarm/area1/enable service: script.send_mqtt_command - data: message: '{"val":"1"}' target: instar/000389888811/alarm/area2/enable service: script.send_mqtt_command - data: message: '{"val":"0"}' target: instar/000389888811/alarm/area3/enable service: script.send_mqtt_command - data: message: '{"val":"0"}' target: instar/000389888811/alarm/area4/enable service: script.send_mqtt_command ``` You can now wait for the next Sunrise or Sunset to see if your automation is working (or just use the Home Assistant Developer Tools inside the Lovelace UI to trigger this event).<file_sep>--- date: "2017-12-11" title: "Getting started with Python" categories: - Python --- ![Port Vila, Vanuatu](./photo-34445490202_b13f40bd9d_o.png) <!-- TOC depthFrom:2 depthTo:4 --> - [Hello World](#hello-world) - [User Input](#user-input) - [Variables](#variables) - [Data Types](#data-types) - [Strings](#strings) - [Numbers](#numbers) - [Booleans](#booleans) - [Lists](#lists) - [Set](#set) - [Tuples](#tuples) - [Ranges](#ranges) - [Dictionaries](#dictionaries) - [Conversion between Data Types](#conversion-between-data-types) <!-- /TOC --> ## Hello World ```python print("Hello World") ``` ## User Input ```python user_says = input ("Please enter a string: ") print(user_says) ``` `user_says` is a Python variable that is used to store data, in this case it is assigned the string from the input function. We can then print out the variable to see if it worked. ## Variables ![Python](./python_01.png) ## Data Types * mutable data type * lists * dictionaries * sets * immutable data types * strings * numbers * tuples * frozensets ### Strings ![Python](./python_02.png) ![Python](./python_03.png) ![Python](./python_04.png) ![Python](./python_05.png) ### Numbers ![Python](./python_06.png) ### Booleans ![Python](./python_07.png) ### Lists ![Python](./python_08.png) ![Python](./python_09.png) ![Python](./python_10.png) ### Set Sets = unordered lists of __unique items__. ![Python](./python_11.png) You can create a set from a list to remove duplicates. ![Python](./python_12.png) ![Python](./python_13.png) ![Python](./python_14.png) Just as list before sets are mutable - you can add or remove elements at will. To create an immutable set from a list you have to use __FrozenSets__: ![Python](./python_15.png) ### Tuples Tuples are immutable list - elements cannot be added or removed once the tuples was created. ![Python](./python_16.png) Tuples allow you to map values to variables by assigning a tuple, made up of variables, to a tuple, made up of values: ![Python](./python_17.png) ### Ranges ![Python](./python_18.png) ### Dictionaries Dictionaries are an unordered list of key-value pairs. Every key has to be unique and should be of an immutable type - strings, numbers or tuples. ![Python](./python_19.png) ## Conversion between Data Types ![Python](./python_20.png) ![Python](./python_21.png) <file_sep>--- date: "2019-09-26" title: "Kali Linux with Docker for Windows" categories: - LINUX --- ![<NAME>, Hongkong](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Installing Kali](#installing-kali) - [Installing Tools](#installing-tools) - [Committing the Image](#committing-the-image) - [Starting Kali Containers](#starting-kali-containers) <!-- /TOC --> ## Installing Kali see available docker images on [Docker Hub](https://hub.docker.com/u/kalilinux) ```bash docker pull kalilinux/kali-rolling ``` ```bash docker run -it kalilinux/kali-rolling /bin/bash ``` ## Installing Tools ```bash apt update && apt full-upgrade && apt auto-remove && apt auto-clean ``` ```bash apt-get install kali-linux-full ``` See [available packages](https://www.kali.org/news/kali-linux-metapackages/) * kali-linux * kali-linux-all * kali-linux-forensic * kali-linux-full * kali-linux-gpu * kali-linux-pwtools * kali-linux-rfid * kali-linux-sdr * kali-linux-top10 * kali-linux-voip * kali-linux-web * kali-linux-wireless Checked contained programs in packages: ```bash apt-cache show kali-linux-wireless |grep Depends ``` ## Committing the Image ```bash docker ps -a ``` ```bash docker start {containerID} docker attach {containerID} docker commit {containerID} kali docker images docker rm {containerID} ``` ## Starting Kali Containers ```bash docker run -ti -rm kali /bin/bash ``` ```bash docker run -it --rm -p 4444:4444 kali ```<file_sep>--- date: "2019-09-07" title: "Magento 2 Docker Development" categories: - Docker --- ![Siam Reap, Cambodia](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Docker on Windows](#docker-on-windows) - [Setting up the Environment](#setting-up-the-environment) - [Installing Magento](#installing-magento) - [Webmin](#webmin) - [MySQL](#mysql) - [Magento &amp; Apache](#magento-amp-apache) - [PHPMyAdmin](#phpmyadmin) - [Configuring Magento](#configuring-magento) <!-- /TOC --> I want to try set up a Magento 2 Store in Docker on a Windows 10 PC. ## Docker on Windows Start by [installing Docker](https://docs.docker.com/docker-for-windows/install/) on your Windows PC. Note that you need: * Windows 10 64-bit: Pro, Enterprise, or Education (Build 15063 or later). * Hyper-V and Containers Windows features must be enabled. After the installation sign in with your [Docker Hub](https://hub.docker.com) ID. Start the Docker Settings and select a drive you want to be available for your docker containers - this is the drive we need to store our source code on: ![Docker and Magento2 on Windows 10](./Magento2_Docker_01.png) ### Setting up the Environment Now we need to create a `docker-compose.yml` file that will help us setting up the Magento2/LAMP environment: ```yaml mysql: image: mysql:5.7 ports: - 3306:3306 environment: MYSQL_ROOT_PASSWORD: <PASSWORD> volumes: - ./src/db:/var/lib/mysql container_name: mysql_5.7 phpmyadmin: image: phpmyadmin/phpmyadmin:4.8 ports: - 8080:80 links: - mysql environment: PMA_HOST: mysql PMA_PORT: 3306 container_name: phpmyadmin_4.8 apache2: image: phpcuong/magento2-apache2:3.0 ports: - 80:80 - 10000:10000 - 443:443 links: - mysql volumes: - ./src/html:/var/www/html container_name: magento2.3 ``` Create two folders named `html` and `db` and point the two volumes declared above to those directories. In my case there is a `src` folder in the same directory as the compose YAML file. This folder contains both the db and html folder. Those instruction will set up a MySQL database, PHPMyAdmin and a Ubuntu container with Apache2, PHP 7.1.26, Webmin, Composer and Git. Now open PowerShell in the directory where you created the YAML file and run the command `docker-compose up -d` to set everything up: ![Docker and Magento2 on Windows 10](./Magento2_Docker_02.png) Once Docker finishes you can run `docker images` to see the downloaded images and `docker ps -a` to see that all 3 are now running in the background: ![Docker and Magento2 on Windows 10](./Magento2_Docker_03.png) ## Installing Magento We can now verify that the virtual environment inside the Docker container full fills the requirements for Magento 2.2.6: * Apache 2.4 * MySQL 5.7 * PHP 7.0.13 or higher To do this run the following command `docker exec -it magento2.3 bin/bash` to access the Docker container that we want to use to install Magento in (__Note__ that you can get the container name `magento2.3` from the screenshot above): ![Docker and Magento2 on Windows 10](./Magento2_Docker_04.png) ### Webmin As mentioned above this container comes with [Webmin](http://www.webmin.com) pre-installed. Webmin is a web-based interface for system administration for Unix. You can setup user accounts, Apache, DNS and much more. Webmin removes the need to manually edit Unix configuration files like /etc/passwd, and lets you manage a system from the console or remotely. The Webmin dashboard is exposed on Port `10000` and can accessed via localhost on your Windows machine: ``` https://127.0.0.1:10000 ``` * username: root * password: <PASSWORD> ![Docker and Magento2 on Windows 10](./Magento2_Docker_05.png) You can use it to make changes to your virtual server take care of outdated packages. __Note__ that changes you make here will be wiped if you rebuild the container: ![Docker and Magento2 on Windows 10](./Magento2_Docker_06.png) ### MySQL To check your MySQL version run the same command as before to enter the container `docker exec -it mysql_5.7 bin/bash`: Note that you now have to enter the `MYSQL_ROOT_PASSWORD` you defined in the compose YAML file with the following command `mysql -u root -p` to enter the database: ![Docker and Magento2 on Windows 10](./Magento2_Docker_07.png) We can now create a database for Magento called `magento_2_3_3` - rename depending on the [version](https://devdocs.magento.com/release/) you wish to install: ```sql create database magento_2_3_3; show databases; ``` ![Docker and Magento2 on Windows 10](./Magento2_Docker_08.png) ### Magento & Apache You can download the Magento Source code from the [Tech Resources](https://magento.com/tech-resources/download) page. Unzip all files to the `src/html` folder we created earlier. While we wait for those files to be unzipped we can already set up a host name for our shop, e.g. `magento-2-3-3.instar.com`. To do this we can edit the Windows __hosts__ file in __\Windows\System32\drivers\etc\\__ and add the following line: ``` 127.0.0.1 magento-2-3-3.instar.com ``` So now when we type in `magento-2-3-3.instar.com` to our browsers address bar the address will be resolved to `localhost`. Now go back to the __Webmin__ interface and point a virtual server to the Magento HTML code and assign the address that you just created your host for: ![Docker and Magento2 on Windows 10](./Magento2_Docker_09.png) To activate the new configuration we need to restart Apache: ``` docker exec -it magento2.3 bin/bash service apache2 restart ``` Apache is now hosting our Magento site on `magento-2-3-3.instar.com` and port `80`. Access it through your web browser and start the configuration wizard: ![Docker and Magento2 on Windows 10](./Magento2_Docker_10.png) __Note__ that the Magento admin panel can now be reached under `http://magento-2-3-3.instar.com/admin_1kji1i` with the password that you set during the setup process : ![Docker and Magento2 on Windows 10](./Magento2_Docker_11.png) To deploy the frontend run the following command from the `magento` container inside the Magento source folder (path depends on how you named the folder after extracting the Magento source): ``` docker exec -it magento2.3 bin/bash cd /var/www/html/Magento-CE-2.3.3 php bin/magento setup:static-content:deploy -f ``` ![Docker and Magento2 on Windows 10](./Magento2_Docker_12.png) You can find the frontend source code that is being deployed in `\src\html\Magento-CE-2.3.3\pub\static\frontend\Magento\blank\en_US`. The frontend is now available on port `80` with the set URL: ![Docker and Magento2 on Windows 10](./Magento2_Docker_13.png) ### PHPMyAdmin We can now access the PHPMyAdmin interface over port `8080` with the domain name we set up `magento-2-3-3.instar.com`: * username: `root` * password: `<PASSWORD>` (as set by MYSQL_ROOT_PASSWORD) ![Docker and Magento2 on Windows 10](./Magento2_Docker_14.png) The installation process populated our database successfully: ![Docker and Magento2 on Windows 10](./Magento2_Docker_15.png) ## Configuring Magento Start by opening the Admin Dashboard through the URL and Login you set in the steps before: ![Docker and Magento2 on Windows 10](./Magento2_Docker_16.png)<file_sep>--- date: "2018-01-28" title: "How to wrap your Source Code into a Docker Container" categories: - Javascript - Node - Docker --- ![Shanghai, China](./photo-19196703263_69f9f0df5f_o.png) As an example, we are going to use [express-generator](https://expressjs.com/en/starter/generator.html) to scaffold a [Node.js](https://nodejs.org) Web App and wrap it's source code into a Docker Container. [Github Repository](https://github.com/mpolinowski/express-generator-dockerrized) <!-- TOC --> - [Create a Node.js Web App](#Create-a-Nodejs-Web-App) - [Creating a Dockerfile for our App](#Creating-a-Dockerfile-for-our-App) - [FROM](#FROM) - [ENV](#ENV) - [COPY](#COPY) - [WORKDIR](#WORKDIR) - [RUN](#RUN) - [EXPOSE](#EXPOSE) - [ENTRYPOINT](#ENTRYPOINT) - [Building our custom Docker Image](#Building-our-custom-Docker-Image) <!-- /TOC --> ## Create a Node.js Web App We want to use express-generator to generate a basic Node Web App. We first need to install the generator globally on our machine: ```bash npm install -g express-generator ``` We then run express-generator to scaffold an app for use, using the [EJS Templating Engine](http://ejs.co) - check out [their website](https://expressjs.com/en/starter/generator.html) for more options - and put the source code into a folder named _express-generator-app-docker_: ```bash express --view=ejs express-generator-dockerrized ``` To install dependencies we need to enter the created directory and run _npm install_ : ```bash cd express-generator-dockerrized & npm install ``` We can test our web app by running _npm start_ and accessing _http://localhos:3000_ with a web browser: ![Express App in Docker Container](./express-dockerrized_01.png) ## Creating a Dockerfile for our App The [Dockerfile](https://docs.docker.com/engine/reference/builder/) is a text file that contains all the commands that the [docker build](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#build-context) command uses, in order, to build a our Docker image. ```dockerfile FROM node:latest MAINTAINER <NAME> ENV NODE_ENV=production ENV PORT=3000 COPY . /app WORKDIR /app RUN npm install EXPOSE 3000 ENTRYPOINT ["npm", "start"] ``` ### FROM We are using the [node](https://hub.docker.com/_/node/) Docker image as a base for our app. By just writing _[FROM](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#from)_ __node__ we will use the @latest version of Node.js available on the [Docker Hub](https://hub.docker.com) - [specify a version](https://hub.docker.com/r/library/node/tags/) when required - e.g. `node:latest`, `node:9.10.2` ### ENV The [Environment Variable](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#env) can be used to set, e.g. the Node Envrionment. Our app, as created by express-generator, also allows to change the port, the application is running on, to a different value, depending on the environment port defined here (see _./bin/www_ for how this is done). ### COPY [Copy](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#add-or-copy) copies the source code into a folder inside the container. The given example __. /app__ copies everything from the current directory into an _/app_ directory, that is generated inside the container. ### WORKDIR The [Working Directory](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#workdir) sets the context where scripts are run. We are going to add an `npm install` step, that needs to be run in the directory that contains our _package.json_. ### RUN The most common use-case for RUN is a _Node Application_ of `npm install`, to install our app dependencies, once our source code is copied into the container. ### EXPOSE This [exposes](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#expose) a port that our app will be reached through, once the container is running. When a __PORT__ was defined as a _Environment Variable_ (s. above), you can also use this value, by settings __EXPOSE $PORT__ ### ENTRYPOINT The [Entrypoint](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#entrypoint) allows you to configure a container that will run as an executable. Point it to the images main command. Since we installed our app through _express-generator_, the main file to run would be __./bin/www__. In our case, the file will be executed by `node ./bin/www`. But checking the generated _package.json_ file, we can see that we already have an NPM Script for that, that allows us to simply call __npm start__ instead. ## Building our custom Docker Image First we need to make sure that our Docker Image does not get unnecessary file - especially the __node\_modules__ and __.git__ folder should be excluded. Just like with a [.gitignore](https://www.gitignore.io) file, that excludes folders and files from git commits, we can create a __.dockerignore__ file: ``` .git Dockerfile* node_modules ``` Now we can run the __docker build__ command. ```bash docker build -f Dockerfile -t expressapp/node . ``` This will run the `build` function of Docker using the our Dockerfile (__Note__: you don't have to add the `-f` flag when naming your file _Dockerfile_. The flag is useful when you have more then one Dockerfiles for different scenarios and have to give them different names accordingly.). The `-t` flag adds a tag to the Docker image - this can be your docker hub username, or a build version, for internal use. The `.` in the end signifies that docker should search for your code in the current work directory. ![Express App in Docker Container](./express-dockerrized_02.png) You can check if the image was build by typing ``` docker images ``` ![Express App in Docker Container](./express-dockerrized_03.png) And run the container by typing: ``` docker run -d -p 8080:3000 expressapp/node ``` This command will run your container in `-d` daemon mode in the background and set the external port of your app to 8080 (the internal port is set by the express app to 3000). You can verify that your app is running by typing: ``` docker ps ``` ![Express App in Docker Container](./express-dockerrized_04.png) You now have your app running inside the container and can access it by opening http://localhost:8080 inside your web browser: ![Express App in Docker Container](./express-dockerrized_05.png) You can stop and delete the container (the docker image stays in place - so you can always restart it) by: ``` docker stop c165 docker rm c165 ``` The __c165__ are the first characters of the Container ID and can be used as a selector - of course, the ID will differ every time you rerun the container. ![Express App in Docker Container](./express-dockerrized_06.png) If you want to remove the complete image, you first have to check the image ID (just as shown above, you just need to type the first few characters to select the image - e.g. _9190_) by typing `docker images`, then delete it with `docker rmi 9190`<file_sep>--- date: "2019-09-08" title: "Elasticsearch 7 with Docker Compose" categories: - Docker - Elasticsearch --- ![ShenZhen, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) Let's run a __Elasticsearch 7.5__ as a single node cluster using Docker Compose with XPack disabled. To run the Elasticsearch 7 Docker image as a single node, you have to set `discovery.type` to `single-node`. At startup, the bootstrap checks are bypassed. The single node will elect itself as the master node and will not join a cluster with any other node. A complete docker-compose.yml example to run a single node Elasticsearch 7 Cluster including Kibana: ```yaml version: '3.7' services: # Elasticsearch Docker Images: https://www.docker.elastic.co/ elasticsearch: image: docker.elastic.co/elasticsearch/elasticsearch:7.5.0 container_name: elasticsearch environment: - xpack.security.enabled=false - discovery.type=single-node ulimits: memlock: soft: -1 hard: -1 nofile: soft: 65536 hard: 65536 cap_add: - IPC_LOCK volumes: - elasticsearch-data:/usr/share/elasticsearch/data ports: - 9200:9200 - 9300:9300 kibana: container_name: kibana image: docker.elastic.co/kibana/kibana:7.5.0 environment: - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 ports: - 5601:5601 depends_on: - elasticsearch volumes: elasticsearch-data: driver: local ``` Start Elasticsearch and Kibana using Docker Compose: ```bash docker-compose up -d ``` Your Elasticsearch node will startup now, and after a couple of seconds, you can reach it at `http://localhost:9200/`. Kibana should be running at `http://localhost:5601` now. To shut down Elasticsearch and Kibana run: ```bash docker-compose down ``` In case you also would like to remove the docker volume while shutting down run: ```bash docker-compose down -v ```<file_sep>--- date: "2020-05-04" title: "Neo4j Corona Tracing" categories: - Databases --- ![Shenzhen, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installing Neo4j Desktop](#installing-neo4j-desktop) - [Creating your Database](#creating-your-database) - [Importing Data](#importing-data) - [Working with your Data](#working-with-your-data) <!-- /TOC --> ## Installing Neo4j Desktop Download the latest version of the [Neo4j Desktop](https://neo4j.com/download-thanks-desktop/) AppImage and make it executable: ```bash chmod a+x neo4j-desktop-offline-1.2.9-x86_64.AppImage ``` Then run it from your Terminal `./neo4j-desktop-offline-1.2.9-x86_64.AppImage`. ### Creating your Database 1. Click on __New Project__, create the database and click on __Manage__ to set it up. ![Neo4j Desktop Application](./Neo4j_Corona_Tracing_01.png) 2. Click on __Open Folder__, enter the __import__ directory. 3. Copy your [Corona Tracing Data](https://gist.github.com/mpolinowski/a4e4844b2fd52394c7161e8a1a5f1937) as `.csv` file into the directory. ![Neo4j Desktop Application](./Neo4j_Corona_Tracing_02.png) __Alternative__: If you are [running Neo4j inside a Docker Container](https://mpolinowski.github.io/getting-to-know-neo-4-j) copy this file to `/opt/neo4j/import` and use the web client to run the import. ![Neo4j Desktop Application](./Neo4j_Corona_Tracing_04.png) 4. Now start the database and open the __Neo4j Explorer__. ![Neo4j Desktop Application](./Neo4j_Corona_Tracing_03.png) ### Importing Data Run the following Cypher query to add your data to your database: ```bash LOAD CSV WITH HEADERS FROM "file:///coronaTracing.csv" AS line WITH line MERGE (Infected:Person{name:line.Infected, Addr:line.InfectedAddress}) MERGE (Contact:Person{name:line.Contact, Addr:line.ContactAddress}) MERGE (Infected)-[info:Info{ `Contact Info`:line.ContactInfo, `Relationship`:line.`Relationship`, Location: CASE WHEN line.Geography IS NOT NULL THEN line.Geography ELSE '' END }]->(Contact) ``` ![Neo4j Desktop Application](./Neo4j_Corona_Tracing_05.png) ## Working with your Data 1. How many potential infections (query for relationship `Info` = event with the potential of an infection) ? ```bash MATCH (person:Person)-[rel:Info]->(:Person) RETURN COUNT (rel) ``` 2. How many persons have been traced from `Cordon M` ? ```bash MATCH (person:Person) WHERE person.Addr = 'CordonM' RETURN person ``` 3. How many infected persons has `Person A` been in contact with ? ```bash MATCH (carrier)-[r:Info]->(person:Person{name:'PersonA'}) RETURN carrier, person, r ``` 4. How many were potentially infected by `Person J` ? ```bash MATCH (carrier:Person{name:'PersonJ'})-[r:Info]->(person:Person) RETURN COUNT (person) AS Infected ``` 4.1. Omit potential double-count: ```bash MATCH (carrier:Person{name:'PersonJ'})-[r:Info]->(person:Person) RETURN COUNT (DISTINCT person) AS Infected ``` 5. Select all and order by number of potential infection event - find __Super Spreader__: ```bash MATCH (carrier:Person)-[rel:Info]->(person:Person) RETURN carrier, COUNT(carrier) AS Infected ORDER BY Infected DESC LIMIT 10 ``` _Switch results from `Graph` to `Table` to see the persons who are most likely to be super spreader_ 6. Select list of persons with highest number of potential infections: ```bash MATCH (n)-[r:Info]->(m) WITH n, COUNT((n)-[]->()) as num ORDER BY num DESC LIMIT 10 MATCH (n)-[i]->(b) RETURN n,i,b ``` 7. Show the person that exposed the most: ```bash MATCH (carrier:Person)-[rel:Info]->(person:Person) RETURN person, COUNT(*) AS Infections ORDER BY Infections DESC LIMIT 1 ``` 8. What carriers had contact with the person who was exposed the most ? ```bash MATCH (carrier:Person)-[rel:Info]->(person:Person) WITH person, COUNT(*) AS Infections ORDER BY Infections DESC LIMIT 1 MATCH (person:Person)<-[r]-(p) RETURN person, r, p ``` 9. Who in `Cordon A` did not infect anyone ? ```bash MATCH (a) WHERE NOT(a:Person)-[:Info]->() AND a.Addr = "CordonA" RETURN a ``` 10. Select carrier that infected the most of their own cordon: ```bash MATCH (carrier:Person)-[r]->(person:Person) WHERE carrier.Addr = person.Addr WITH carrier, COUNT(*) AS Infections ORDER BY Infections DESC LIMIT 2 MATCH (p:Person)<-[r]-(carrier) RETURN p,r,carrier ``` 11. Show the path of the infection between `Person K` and `Person L` ```bash MATCH path=(p1:Person{name:'PersonK'})-[*]->(p2:Person{name:'PersonL'}) RETURN path ```<file_sep>--- date: "1980-01-01" title: "Tomcat Checklists" categories: - dcs --- <!-- TOC --> - [Pilot Cold Startup](#pilot-cold-startup) - [Automated Carrier Landing System (ACLS)](#automated-carrier-landing-system-acls) <!-- /TOC --> ## Pilot Cold Startup 1. Oxygen on 2. Ground-Power and Air on (Canopy open!) 3. Make sure Parking Break is Engaged 4. Jester Start-up 5. Intercom Check 6. Arm Ejection Seat 7. Close Canopy 8. Crank Right-Engine 9. Right Engine at 3rd Mark set Right Throttle to idle 10. Crank Left Engine 11. Left Engine at 3rd Mark set Left Throttle to idle 12. Switch on all 3 SAS _Stability Augmentation System_ Switches (Pitch, Roll & Yaw) 13. Switch on HUD, VDI & HSD 14. Set HSD to TID to pass through Alignment Progress 15. Uncage Backup-ADI (Artificial Horizon on the Right) 16. Set Radar Altimeter to 500 Feet 17. Set Wing-Sweep forward, Reset and set to Auto (unless on a Carrier) 18. Set Position, Tail and Cockpit Lights 19. Set Radio to BOTH 20. Set TACAN (behind Radio) to Transmit/Receive 21. Ground-Power and Air off 22. After Alignment press Parking Break in and activate Nosewheel Steering ## Automated Carrier Landing System (ACLS) 1. Switch HUD to Landing Mode 2. VSI Trim Level at 250 Knots 3. Set Datalink to Carrier 4. Set ICLS to on ("Hidden" Switch on the Top Right of the Right Panel) 5. Set Steer CMD from Destination or TACAN to AWL 6. Set HUD and VDI to AWL 7. Set Auto-Pilot to ACL and Engage (Outer Switch Down / Inner Switch Up) 8. RIO: Set Beacon to ACLS - Green light on (Left Panel Back). 10. Hook, Gear and Flaps 11. Power to 1-notch above idle and APC (Automated Power Compensation) to Auto (Left of Throttle) 12. Keep Glideslope and Vertical Localizer on Center Dot in VDI 13. Indicator Lights left of VDI __Landing Check__, __ACLS READY__ click Nosewheel Steering Button<file_sep>--- date: "2019-08-07" title: "MQTT Networks with ioBroker" categories: - MQTT - Smarthome - IoT --- import GifContainer from "../../src/components/ImageContainer"; ![<NAME>, Cambodia](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Install ioBroker on CentOS 8](#install-iobroker-on-centos-8) - [Setting up Node.js](#setting-up-nodejs) - [ioBroker Install Script](#iobroker-install-script) - [Connecting to the MQTT Broker](#connecting-to-the-mqtt-broker) - [Node-RED Adapter](#node-red-adapter) - [Example Flow](#example-flow) - [MQTT Adapter](#mqtt-adapter) - [Visualization](#visualization) - [JSON Flow for Node-RED](#json-flow-for-node-red) - [Live Video](#live-video) - [Alarmserver and Webhooks](#alarmserver-and-webhooks) - [Contact ioBroker when your Camera triggers an Alarm](#contact-iobroker-when-your-camera-triggers-an-alarm) <!-- /TOC --> ## Install ioBroker on CentOS 8 If you are are running a different system, please follow the [official documentation](https://github.com/ioBroker/ioBroker/wiki/Installation) to help you getting started. ### Setting up Node.js We will start by installing [ioBroker](https://github.com/ioBroker/ioBroker/wiki/Installation) on with the [official Install Script](https://www.iobroker.net/#en/download) inside [Node.js v10](https://nodejs.org/en/). Running `yum module list nodejs` tells us what version of Node.js will be installed by default. The output - as shown below - shows us that we will get __Version 10__. ```bash CentOS-8 - AppStream Name Stream Profiles Summary nodejs 10 [d] common [d], development, minimal, s2i Javascript runtime Hint: [d]efault, [e]nabled, [x]disabled, [i]nstalled ``` So all we need to do is running the following command to set up our environment: ```bash yum module install nodejs ``` ### ioBroker Install Script Now all you need to do is to run the following script: ```bash curl -sL https://iobroker.net/install.sh | bash - ``` --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_01.png) --- After the script finishes you will be able to access the ioBroker interface on your servers IP address with the port `8081`: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_02.png) --- ## Connecting to the MQTT Broker ioBroker offers a couple of adapters that you can use to connect the software to your camera's MQTT interface. A few examples are: ### Node-RED Adapter You can [install Node-RED](https://github.com/ioBroker/ioBroker.node-red/blob/master/README.md) as a adapter directly from the ioBroker UI: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_03.png) --- You can set the Node-RED webserver port - default `1880`, a user login for the interface and a URL root path, e.g. `/nodered`: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_04.png) --- The Node-RED adapter should be up and running after saving your settings: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_05.png) --- Now you can access the Node-RED UI via your servers IP address, with the port, root URL and the login that you set earlier - e.g. `http://192.168.2.111:1880/nodered/`: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_06.png) --- #### Example Flow Start with an __MQTT Input__ node and add the Status Topic for the function you want to switch - e.g. `instar/000389888811/status/alarm/area1/enable` would return the current state (__on__ `{"val":"1"}` or __off__ `{"val":"0"}`) of the first alarm detection area: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_07.png) --- Now add a dashboard switch and assign it the two possible states `{"val":"1"}` or `{"val":"0"}` that the detection area can take on: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_08.png) --- And finish the sequence by adding a __MQTT Output__ node with the add the corresponding Command Topic `instar/000389888811/alarm/area1/enable`: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_09.png) --- The resulting flow looks like this and can be imported from the JSON code below: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_10.png) --- ```json [{"id":"b99baf44.25272","type":"mqtt in","z":"c2e5509e.eb938","name":"area1","topic":"instar/000389888811/status/alarm/area1/enable","qos":"1","datatype":"auto","broker":"b0e646ce.236148","x":70,"y":68,"wires":[["9158f257.9f7fc"]]},{"id":"7ec111e1.da923","type":"mqtt out","z":"c2e5509e.eb938","name":"area1","topic":"instar/000389888811/alarm/area1/enable","qos":"1","retain":"","broker":"b0e646ce.236148","x":340,"y":68,"wires":[]},{"id":"9158f257.9f7fc","type":"ui_switch","z":"c2e5509e.eb938","name":"Alarm Area 1","label":"Alarm Area 1","tooltip":"","group":"bb4da677.e6f318","order":0,"width":0,"height":0,"passthru":false,"decouple":"false","topic":"","style":"","onvalue":"{\"val\":\"1\"}","onvalueType":"str","onicon":"","oncolor":"","offvalue":"{\"val\":\"0\"}","offvalueType":"str","officon":"","offcolor":"","x":204,"y":68,"wires":[["7ec111e1.da923"]]},{"id":"b0e646ce.236148","type":"mqtt-broker","z":"","name":"IN-9010FHD","broker":"192.168.2.165","port":"8883","tls":"4a5bc5a5.6ceddc","clientid":"","usetls":true,"compatmode":false,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","closeTopic":"","closeQos":"0","closePayload":"","willTopic":"","willQos":"0","willPayload":""},{"id":"bb4da677.e6f318","type":"ui_group","z":"","name":"Alarm Area 1","tab":"b5b7854e.b8db58","disp":true,"width":"6","collapse":false},{"id":"4a5bc5a5.6ceddc","type":"tls-config","z":"","name":"","cert":"","key":"","ca":"","certname":"instar-cert.cert","keyname":"","caname":"","servername":"","verifyservercert":true},{"id":"b5b7854e.b8db58","type":"ui_tab","z":"","name":"MQTT","icon":"dashboard","disabled":false,"hidden":false}] ``` You can access the Node-RED dashboard by adding a `ui` to the URL your Node-RED is using - e.g. `http://192.168.2.111:1880/nodered/` -> `http://192.168.2.111:1880/nodered/ui`: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_11.png) --- ### MQTT Adapter To be able to communicate with our camera using the MQTT Protocol directly through ioBroker, we can also install the MQTT Broker/Client Adapter from the lists of available adapters. You can use this adapter to both publish and subscribe to MQTT messages (Client) as well as to act as your primary MQTT Server for all your INSTAR Full HD cameras. --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_12.png) --- After the installation, you will be greeted by the configuration screen. As you can see in the screenshot below, we did not opt to use the server, but we want to use the INSTAR MQTT Broker on our camera. If you want ioBroker to be the default MQTT Server, choose `Server/broker` instead of `Client/subscriber`. The server runs on the default port on 1883 and - in our case - requires the login that we set in our Broker Configuration. --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_13.png) --- We only want to connect one camera with the MAC address __00:03:89:88:88:11__ to ioBroker - so we know for sure that every message that is meant to control our camera starts with `instar/000389888811/#`, so we can set a general wildcard subscription here. --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_14.png) --- Now with everything set we can save everything and activate the MQTT adapter: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_15.png) --- The MQTT Broker installed on your camera retains the latest status update for each MQTT topic and will push all updates into the ioBroker object tree - because we created the wildcard subscription `instar/000389888811/#` above: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_16.png) --- So we now know what state the camera is in and can use this in one of the [UI / Visualization](/Advanced_User/IOBroker_on_Raspberry_Pi/Visualisation/) that ioBroker offers. But the interesting part is still missing - the __Command Topics__ that allow us to switch functions on our camera on or off. The ioBroker Vis UI requires the [RAW Topics](/Advanced_User/INSTAR_MQTT_Broker/) to connect with our camera and and we can use [MQTT.fx](/Advanced_User/INSTAR_MQTT_Broker/Windows_macOS_and_LINUX_Apps/) to add them to the ioBroker object tree: <GifContainer gifUrl="/assets/gif/ioBroker_INSTAR_MQTT_Broker.gif" alt="ioBroker with your INSTAR IP Camera" /> Connect [MQTT.fx](/Advanced_User/INSTAR_MQTT_Broker/Windows_macOS_and_LINUX_Apps/) with your MQTT broker and publish the topics that you want to add. For example publishing the number `1` under the topic `instar/000389888811/alarm/area1/sensitivity/raw` will set the sensitivity for [Alarm Area 1](/Web_User_Interface/1080p_Series/Alarm/Areas/) to the value `1`. Updating the topic `instar/000389888811/alarm/area1/enable/raw` will enable `1` or disable `0` the area. Publishing the topic will automatically add it to object tree giving us access to those command topics in the visualization of our choice: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_20.png) --- ## Visualization The vis adapter allows you to build your personal admin dashboard inside ioBroker. Head over to the Adapter tab and install Visualization to get started: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_17.png) --- You can open the __Vis__ configuration panel on port `8082` (be default) or you can click on this button in Instances overview: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_18.png) --- ioBroker will greet you with a demo view like this: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_19.png) --- We can now create an new view and drag a __Radio Button__ widget onto the canvas. Just assign the MQTT RAW Topics that you want to be updated when the button is used: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_21.png) --- Clicking the button will either send a `1` or `0` to the chosen topic - in my case `instar/000389888811/alarm/area2/enable/raw`. Once the camera - with the MAC address _00:03:89:88:88:11_ - receives this update, it will activate or deactivate the motion detection area 2. To change the sensitivity of this area we can use a __Horizontal Slider__ widget and assign it the corresponding MQTT RAW Topics - in my case `instar/000389888811/alarm/area2/sensitivity/raw`: --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_22.png) --- To test your widgets you can now switch to the Vis dashboard by clicking on the arrow button in the top right. __Note__ that you need a license to use the Visualization that is free for private use. <GifContainer gifUrl="/assets/gif/ioBroker_INSTAR_MQTT_Broker_23.gif" alt="ioBroker with your INSTAR IP Camera" /> The animation above shows the ioBroker Vis dashboard on the right and the Node-RED dashboard - we created in a previous step - on the left. Activating the alarm area or changing it's sensitivity on the Vis dashboard is automatically pushed to Node-RED and reflected on the dashboard. ### JSON Flow for Node-RED ```json [{"id":"b99baf44.25272","type":"mqtt in","z":"c2e5509e.eb938","name":"area2","topic":"instar/000389888811/status/alarm/area2/enable","qos":"1","datatype":"auto","broker":"b0e646ce.236148","x":70,"y":60,"wires":[["9158f257.9f7fc"]]},{"id":"7ec111e1.da923","type":"mqtt out","z":"c2e5509e.eb938","name":"area2","topic":"instar/000389888811/alarm/area2/enable","qos":"1","retain":"false","broker":"b0e646ce.236148","x":658,"y":60,"wires":[]},{"id":"9158f257.9f7fc","type":"ui_switch","z":"c2e5509e.eb938","name":"Alarm Area 2","label":"Alarm Area 2","tooltip":"","group":"bb4da677.e6f318","order":0,"width":0,"height":0,"passthru":false,"decouple":"true","topic":"","style":"","onvalue":"{\"val\":\"1\"}","onvalueType":"str","onicon":"","oncolor":"","offvalue":"{\"val\":\"0\"}","offvalueType":"str","officon":"","offcolor":"","x":517,"y":60,"wires":[["7ec111e1.da923"]]},{"id":"2df6850f.0faa9a","type":"mqtt in","z":"c2e5509e.eb938","name":"area2","topic":"instar/000389888811/status/alarm/area2/sensitivity","qos":"1","datatype":"auto","broker":"b0e646ce.236148","x":70,"y":160,"wires":[["f6880ca3.226b4"]]},{"id":"2b08d32a.75d44c","type":"mqtt out","z":"c2e5509e.eb938","name":"area2","topic":"instar/000389888811/alarm/area2/sensitivity","qos":"1","retain":"false","broker":"b0e646ce.236148","x":854,"y":161,"wires":[]},{"id":"4d19ae3.73a4a5","type":"change","z":"c2e5509e.eb938","name":"","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.val","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":347,"y":161,"wires":[["20fc6702.9f70c8"]]},{"id":"f6880ca3.226b4","type":"json","z":"c2e5509e.eb938","name":"","property":"payload","action":"","pretty":false,"x":194,"y":161,"wires":[["4d19ae3.73a4a5"]]},{"id":"d054f844.49d928","type":"function","z":"c2e5509e.eb938","name":"Transform","func":"msg.payload = '{\"val\":\"'+msg.payload+'\"}';\nreturn msg;","outputs":1,"noerr":0,"x":710,"y":161,"wires":[["2b08d32a.75d44c"]]},{"id":"20fc6702.9f70c8","type":"ui_slider","z":"c2e5509e.eb938","name":"","label":"Area Sensitivity","tooltip":"","group":"bb4da677.e6f318","order":1,"width":0,"height":0,"passthru":false,"outs":"all","topic":"","min":0,"max":"100","step":1,"x":544,"y":161,"wires":[["d054f844.49d928"]]},{"id":"1f9ab059.2a762","type":"json","z":"c2e5509e.eb938","name":"","property":"payload","action":"","pretty":false,"x":192,"y":60,"wires":[["1a86a3d8.30000c"]]},{"id":"1a86a3d8.30000c","type":"change","z":"c2e5509e.eb938","name":"","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.val","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":343,"y":60,"wires":[[]]},{"id":"b0e646ce.236148","type":"mqtt-broker","z":"","name":"IN-9010FHD","broker":"192.168.2.165","port":"8883","tls":"4a5bc5a5.6ceddc","clientid":"","usetls":true,"compatmode":false,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","closeTopic":"","closeQos":"0","closePayload":"","willTopic":"","willQos":"0","willPayload":""},{"id":"bb4da677.e6f318","type":"ui_group","z":"","name":"Alarm Area 2","tab":"b5b7854e.b8db58","disp":true,"width":"6","collapse":false},{"id":"4a5bc5a5.6ceddc","type":"tls-config","z":"","name":"","cert":"","key":"","ca":"","certname":"instar-cert.cert","keyname":"","caname":"","servername":"","verifyservercert":true},{"id":"b5b7854e.b8db58","type":"ui_tab","z":"","name":"MQTT","icon":"dashboard","disabled":false,"hidden":false}] ``` ## Live Video You can add your cameras live video with an __Image Widget__. All you need to add is the snapshot path for your camera and a desired refresh interval. For a camera on IP _192.168.2.165_ and the login _admin/instar_ the snapshot paths are (ordered in decreasing resolution): ```bash http://192.168.2.165/tmpfs/snap.jpg?usr=admin&pwd=<PASSWORD> http://192.168.2.165/tmpfs/auto.jpg?usr=admin&pwd=<PASSWORD> http://192.168.2.165/tmpfs/auto2.jpg?usr=admin&pwd=<PASSWORD> ``` --- ![ioBroker with your INSTAR IP Camera](./ioBroker_INSTAR_MQTT_Broker_24.png) --- To add a live video stream you can also use tools like MotionEye to re-stream the video directly to your ioBroker Vis. * [MotionEye Tutorial](/motion-eye-video-surveillance) ## Alarmserver and Webhooks ### Contact ioBroker when your Camera triggers an Alarm The easiest way to connect your cameras alarmserver to ioBroker is through the Node-RED adapter. Please check out our tutorial: * [Alarmserver Queries for your Smarthome](/webhooks-with-url-queries)<file_sep>--- date: "2020-05-06" title: "GraphQL API and GraphiQl" categories: - Databases - Javascript --- ![Bakthapur, Nepal](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [test](#test) <!-- /TOC --> ## test <file_sep>--- date: "2018-11-21" title: "Creating Node-RED Admin Panel using node-red-dashboard" categories: - IoT - Node-RED --- ![Harbin, China](./photo-34443677922_8f09e47dd3_o.png) <!-- TOC --> - [Introduction](#introduction) - [Adding a Dashboard](#adding-a-dashboard) - [Adding another Input](#adding-another-input) - [Adding a custom UI Widget](#adding-a-custom-ui-widget) - [Using external Libraries](#using-external-libraries) - [Export](#export) <!-- /TOC --> ## Introduction Before we look a how the chart node works, let's configure the inject node to send a timestamp every 5 seconds by setting the payload to timestamp and the repeat field to an interval of 5 seconds. ![node-red-dashboard](./node-red-dash_01.png) This will act as our repeating trigger. Now we need to set up the function node to generate a random number – we’ll use a simple JS math function to do this: ```js msg.payload = Math.round(Math.random()*100); return msg; ``` ## Adding a Dashboard We now have a random number generator to simulate a sensor input - let's install [node-red-dashboard](https://flows.nodered.org/node/node-red-dashboard) to visualize the result. We can install the node by clicking the top-right hamburger menu, choosing __Manage Palette__ and searching for `node-red-dashboard`: ![node-red-dashboard](./node-red-dash_02.png) The __chart node__ is now available to be added to our flow: ![node-red-dashboard](./node-red-dash_03.png) We can [configure](http://www.steves-internet-guide.com/node-red-dashboard/) the chart node: ![node-red-dashboard](./node-red-dash_04.png) And access it via `http://127.0.0.1:1880/ui/`: ![node-red-dashboard](./node-red-dash_05.png) We added the Graph to the Home tab - you can use the [dashboard settings](https://diyprojects.io/node-red-module-dashboard-gui/) to add more tabs or add more Groups to one tab: ![node-red-dashboard](./node-red-dash_06.png) Let’s create a gauge to show the last data value sent. Drag a __gauge node__ from the UI palette and wire it to the Random Number function node. Then double click to open up and let’s configure it: ![node-red-dashboard](./node-red-dash_07.png) We’ll us the same Tab, home and we’ll also add it to the same group – “Sensor Chart[Home]”. The Min and Max fields allow you to set the min and max values the gauge will shown. Make sure the max is set to 100 which is the most that the random number function node will generate. ![node-red-dashboard](./node-red-dash_08.png) Let’s use a couple of the other UI nodes, a slider node and a text node to show the same data on a slider and as a text string. But this time we want to add them to separate group on the same Home tab: ![node-red-dashboard](./node-red-dash_09.png) ![node-red-dashboard](./node-red-dash_10.png) ![node-red-dashboard](./node-red-dash_11.png) ## Adding another Input To simulate another Sensor Input, we can simply add another random number generator by editing our __function node__ : ```js var msg = {}; var msg1 = {}; msg.payload = Math.round(Math.random()*100); msg.topic = 'Line'; msg1.payload = Math.round(Math.random()*100); msg1.topic = 'Line1'; return [msg, msg1]; ``` ![node-red-dashboard](./node-red-dash_12.png) The __function node__ now has 2 outputs - lets connect our __graph and gauge node__ to the first and the __slider and text node__ to the second. And create a second __graph node__ in a new _group_ that is fed both data sources: ![node-red-dashboard](./node-red-dash_13.png) Et voilá! ![node-red-dashboard](./node-red-dash_14.png) ## Adding a custom UI Widget Let's clone our __function node__, connect the clone to the same timestamp __inject node__ the original is connected to and change the random number generator as follows: ```js min = Math.round(Math.random()*1000); max = Math.round((Math.random()*10000)+1000); msg.payload = { factory: { machine00: { min: min, max: max, failProb: 20, name: "GE 5200" }, machine01: { min: min*5, max: max*2, failProb: 10, name: "BB SQ100" } } } return msg; ``` We use the usual Math.random function to generate some fake data. However, instead of just passing this data in as payload, we insert the data into a JSON structure called factory. Factory has two elements -machine00 and machine01 which in turn have four elements min, max, failProb and name. We now need to create a UI __template node__ and set it up to display on our dashboard. ![node-red-dashboard](./node-red-dash_15.png) Add the following markup to the __template node__ ```html <div layout="row" layout-align="start center"> <span flex>RPM Min</span> <span flex>RPM Max</span> <span flex>Failure</span> <span flex>Type</span> </div> <div layout="row" layout-align="start center" ng-repeat="machine in msg.payload.factory"> <span flex style="color: green">{{machine.min}}</span> <span flex style="color: red">{{machine.max}}</span> <span flex style="color: black">{{machine.failProb}}%</span> <span flex style="color: black">{{machine.name}}</span> </div> ``` ![node-red-dashboard](./node-red-dash_16.png) The first \<div\> sets out the labels for the columns defining them using the \<span flex\> tag so they will fit the space nicely. The second \<div\> then uses the ng-repeat command to iterate through the JSON structure in payload and display the data in the columns. Remember, we named each set of data associated with a machine, machine00, machine01 – so we iterate through those. For each machine element, we use the sub-elements; min, max, failProb and name to fill the data fields. Note how we also use a style directive to set the colour for the text. The result is shown below. ![node-red-dashboard](./node-red-dash_17.png) ### Using external Libraries We want to build a custom widget using a 3rd party JavaScript charting library [morris.js](http://morrisjs.github.io/morris.js/). For this example we'll use a __donut chart__ since it's not available in the Node-RED dashboard widget set. We’ll create a very simple flow for this example consisting of a timer to trigger action every 5 seconds, a __function node__ to generate the fake data and the __template node__ to display the data. The function node is a simple as possible. It generates a random number and then uses that as a seed to calculate fake values for the different areas that consume power. ```js seed = Math.round(Math.random() * 1000); msg.payload = { officeW: seed, storageW: seed / 4, conW: seed / 12, parkW:seed / 8, serverW: seed / 2 }; return msg; ``` The template node is a little more complex than previous examples because we are pulling in an external charting library (morris.js) and it’s dependencies and we are writing some JavaScript code to use the incoming data (from the function node) to draw the [donut chart](http://morrisjs.github.io/morris.js/donuts.html). ```html <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/morris.js/0.5.1/morris.css"> <script src="https://cdnjs.cloudflare.com/ajax/libs/raphael/2.1.0/raphael-min.js"></script> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.0/jquery.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/morris.js/0.5.1/morris.min.js"></script> <div id="chart-example" style="height: 250px;"></div> <h3 align="center">Power Consumption [W] </h3> <script> (function(scope) { var timer = setInterval(function() { //check that the Morris libs are loaded, if not wait if (!window.Morris) return; clearInterval(timer); var myData; var myDonut = Morris.Donut({ element: 'chart-example', data: [ {label: "Office", value: 1 }, {label: "Storage", value: 1 }, {label: "Confernce", value: 1 }, {label: "Parking", value: 1 }, {label: "Server", value: 1 } ] }); scope.$watch('msg', function (msg) { //watch for an incoming NR msg if (msg) { myData = msg; myDonut.setData([ {label: "Office", value: myData.payload.officeW }, {label: "Storage", value: myData.payload.storageW }, {label: "Confernce", value: myData.payload.conW }, {label: "Parking", value: myData.payload.parkW }, {label: "Server", value: myData.payload.serverW } ]) }; }); //close out the watch }, 100); // close out the setInterval })(scope); </script> ``` To update the donut chart, we want to look at the incoming Node-RED message, and use its payload to update the donut chart. To do this, we use a watch function to look for changes to the scope variable msg, which contains the incoming Node-RED message. A good overview is available on the [Angular docs](https://docs.angularjs.org/guide/concepts) page. Briefly, Angular implements a Model-View-Controller (MVC) in which the model (the data) is separate from the view (the DOM). Controllers (code), Expressions and Directives manipulate the data which dynamically updates the view (the Dom) so you see the changes. The model is managed in a structure referred to as the scope. So coming back to our example, the code at line 37 tells the template node to look at the scope (all our data) and watch for changes to the variable msg. When msg changes, in this case because a new Node-RED message has arrived, the function then executes and simply extracts the new data from the msg.payload and uses it to call the setData() function on the Morris.Donut object (myDonut) which dynamically updates the donut chart. All of the above is wrapped in a function which passes the scope so that it can be accessed using the watch method. ![node-red-dashboard](./node-red-dash_18.png) ## Export ```json [{"id":"aafc0f94.6fc66","type":"tab","label":"dashboard","disabled":false,"info":""},{"id":"43533492.9531bc","type":"inject","z":"aafc0f94.6fc66","name":"","topic":"","payload":"","payloadType":"date","repeat":"5","crontab":"","once":false,"onceDelay":0.1,"x":89,"y":67,"wires":[["48b6e9f0.d271a8","b9bbdc1.c9bbe2","cb239309.e7f31"]]},{"id":"48b6e9f0.d271a8","type":"function","z":"aafc0f94.6fc66","name":"random","func":"var msg = {};\nvar msg1 = {};\n\nmsg.payload = Math.round(Math.random()*100);\nmsg.topic = 'Line';\n\nmsg1.payload = Math.round(Math.random()*100);\nmsg1.topic = 'Line1';\n\nreturn [msg, msg1];","outputs":2,"noerr":0,"x":170,"y":160,"wires":[["68ff1fd1.ffce4","69a616b4.778708","9417dbf0.ced388"],["9417dbf0.ced388","8381759d.688758","942e1691.340b98"]]},{"id":"68ff1fd1.ffce4","type":"ui_chart","z":"aafc0f94.6fc66","name":"Sensor Input","group":"5f5d8461.9b6d0c","order":0,"width":0,"height":0,"label":"Sensor Input","chartType":"line","legend":"false","xformat":"HH:mm:ss","interpolate":"bezier","nodata":"Waiting for Data","dot":false,"ymin":"","ymax":"","removeOlder":"5","removeOlderPoints":"","removeOlderUnit":"60","cutout":0,"useOneColor":false,"colors":["#1f77b4","#aec7e8","#ff7f0e","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5"],"useOldStyle":false,"x":359.5,"y":82,"wires":[[],[]]},{"id":"69a616b4.778708","type":"ui_gauge","z":"aafc0f94.6fc66","name":"","group":"5f5d8461.9b6d0c","order":0,"width":0,"height":0,"gtype":"gage","title":"Gauge","label":"units","format":"{{value}}","min":0,"max":"100","colors":["#00b500","#e6e600","#ca3838"],"seg1":"","seg2":"","x":340,"y":125,"wires":[]},{"id":"8381759d.688758","type":"ui_slider","z":"aafc0f94.6fc66","name":"","label":"slider","group":"a4cc4c7a.38c8d","order":0,"width":0,"height":0,"passthru":true,"topic":"","min":0,"max":"100","step":1,"x":349,"y":313,"wires":[[]]},{"id":"942e1691.340b98","type":"ui_text","z":"aafc0f94.6fc66","group":"a4cc4c7a.38c8d","order":0,"width":0,"height":0,"name":"","label":"Value","format":"{{msg.payload}}","layout":"row-spread","x":350,"y":357,"wires":[]},{"id":"9417dbf0.ced388","type":"ui_chart","z":"aafc0f94.6fc66","name":"Sensor Input","group":"89145363.61cc9","order":0,"width":0,"height":0,"label":"Sensor Input","chartType":"line","legend":"false","xformat":"HH:mm:ss","interpolate":"bezier","nodata":"Waiting for Data","dot":false,"ymin":"","ymax":"","removeOlder":"5","removeOlderPoints":"","removeOlderUnit":"60","cutout":0,"useOneColor":false,"colors":["#1f77b4","#aec7e8","#ff7f0e","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5"],"useOldStyle":false,"x":379,"y":204,"wires":[[],[]]},{"id":"b9bbdc1.c9bbe2","type":"function","z":"aafc0f94.6fc66","name":"random","func":"min = Math.round(Math.random()*1000);\nmax = Math.round((Math.random()*10000)+1000);\n\nmsg.payload = {\n factory: {\n machine00: {\n min: min,\n max: max,\n failProb: 20,\n name: \"GE 5200\"\n },\n machine01: {\n min: min*5,\n max: max*2,\n failProb: 10,\n name: \"BB SQ100\"\n }\n }\n}\n\nreturn msg;","outputs":1,"noerr":0,"x":176,"y":446,"wires":[["61f38b83.ebb474"]]},{"id":"61f38b83.ebb474","type":"ui_template","z":"aafc0f94.6fc66","group":"7a9b7006.b60ac","name":"","order":0,"width":"6","height":"2","format":"<div layout=\"row\" layout-align=\"start center\">\n <span flex>RPM [min]</span>\n <span flex>RPM [max]</span>\n <span flex>Failure</span>\n <span flex>Type</span>\n <br/>\n</div>\n<div layout=\"row\" layout-align=\"start center\" ng-repeat=\"machine in msg.payload.factory\">\n <span flex style=\"color: green\">{{machine.min}}</span>\n <span flex style=\"color: red\">{{machine.max}}</span>\n <span flex style=\"color: black\">{{machine.failProb}}%</span>\n <span flex style=\"color: black\">{{machine.name}}</span>\n</div>","storeOutMessages":true,"fwdInMessages":true,"templateScope":"local","x":361,"y":446,"wires":[[]]},{"id":"cb239309.e7f31","type":"function","z":"aafc0f94.6fc66","name":"random","func":"seed = Math.round(Math.random() * 1000);\n\nmsg.payload = {\n officeW: seed,\n storageW: seed / 4,\n conW: seed / 12,\n parkW:seed / 8,\n serverW: seed / 2\n};\n\nreturn msg;","outputs":1,"noerr":0,"x":177,"y":516,"wires":[["d34891c8.a107c"]]},{"id":"d34891c8.a107c","type":"ui_template","z":"aafc0f94.6fc66","group":"7a9b7006.b60ac","name":"","order":0,"width":"6","height":"6","format":"<link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/morris.js/0.5.1/morris.css\">\n <script src=\"https://cdnjs.cloudflare.com/ajax/libs/raphael/2.1.0/raphael-min.js\"></script>\n <script src=\"https://ajax.googleapis.com/ajax/libs/jquery/1.9.0/jquery.min.js\"></script>\n <script src=\"https://cdnjs.cloudflare.com/ajax/libs/morris.js/0.5.1/morris.min.js\"></script>\n\n<div id=\"chart-example\" style=\"height: 250px;\"></div>\n\n<h3 align=\"center\">Power Consumption [W] </h3>\n\n<script>\n\n(function(scope) {\n var timer = setInterval(function() { //check that the Morris libs are loaded, if not wait\n if (!window.Morris) return;\n clearInterval(timer);\n \n var myData;\n \n var myDonut = Morris.Donut({\n element: 'chart-example',\n data: [\n {label: \"Office\", value: 1 },\n {label: \"Storage\", value: 1 },\n {label: \"Confernce\", value: 1 },\n {label: \"Parking\", value: 1 },\n {label: \"Server\", value: 1 }\n ]\n });\n \n scope.$watch('msg', function (msg) { //watch for an incoming NR msg\n if (msg) {\n \n myData = msg;\n myDonut.setData([\n {label: \"Office\", value: myData.payload.officeW },\n {label: \"Storage\", value: myData.payload.storageW },\n {label: \"Confernce\", value: myData.payload.conW },\n {label: \"Parking\", value: myData.payload.parkW },\n {label: \"Server\", value: myData.payload.serverW }\n ])\n };\n }); //close out the watch \n }, 100); // close out the setInterval \n})(scope);\n\n\n</script>","storeOutMessages":true,"fwdInMessages":true,"templateScope":"local","x":363,"y":517,"wires":[[]]},{"id":"5f5d8461.9b6d0c","type":"ui_group","z":"","name":"Sensor Chart","tab":"e98cb2c4.c173b","order":1,"disp":true,"width":"6","collapse":false},{"id":"a4cc4c7a.38c8d","type":"ui_group","z":"","name":"Sensor Slider","tab":"e98cb2c4.c173b","order":2,"disp":true,"width":"6","collapse":false},{"id":"89145363.61cc9","type":"ui_group","z":"","name":"Both Sensors","tab":"e98cb2c4.c173b","order":3,"disp":true,"width":"6","collapse":false},{"id":"7a9b7006.b60ac","type":"ui_group","z":"","name":"Custom Widget","tab":"e98cb2c4.c173b","order":4,"disp":true,"width":"6","collapse":false},{"id":"e98cb2c4.c173b","type":"ui_tab","z":"","name":"Home","icon":"dashboard"}] ```<file_sep>--- date: "2017-07-07" title: "Windows Control" categories: - Windows --- ![<NAME>](./photo-11627014666_359f04f9db_o.png) > This is my Angry-Blog - every time I run into an undesired Windows feature, I want to use this space to unload my emotional ballast. My wording might therefore be slightly more colorful than usual. Readers discretion is advised. <!-- TOC --> - [Prevent Auto-Reboot](#prevent-auto-reboot) <!-- /TOC --> ## Prevent Auto-Reboot Windows Updates are important and sometimes require you to reboot. Some Operating systems are able to gracefully shutdown and reboot restoring all programs that were running - __WINDOWS CANNOT__. But still, Windows keeps rebooting... And everytime I keep my PC running over night there is a perfectly good reason for me to do that! How to stop your PC from automatically restarting after installing updates. First I tried the task scheduler: 1. Open __Start__. 2. Search for `Task Scheduler` and click the result to open the tool. 3. Right-click the __Reboot task__ and select Properties (note that it was already set to disabled). ![Windows 10 Whisperer](./win10_01.png) 4. Go to the __Triggers__ tab, uncheck __enabled__ and confirm (this should prevent the task from being executed). ![Windows 10 Whisperer](./win10_02.png) ![Windows 10 Whisperer](./win10_03.png) 5. But it seems that my administrator account does not have the rights to do that (?????????????????) ![Windows 10 Whisperer](./win10_04.png) 6. Also deleting (right-click on __Reboot__ and choose __delete__) the complete task fails. ![Windows 10 Whisperer](./win10_05.png) 7. Use __Run__ (Windows key + R keyboard shortcut to open the Run command) and type in `%windir%\System32\Tasks\Microsoft\Windows\UpdateOrchestrator` and confirm. ![Windows 10 Whisperer](./win10_06.png) 8. Rename the Reboot file to `Reboot.bak` and create a folder called `Reboot`. ![Windows 10 Whisperer](./win10_07.png) ![Windows 10 Whisperer](./win10_08.png) ## Making Room for an Update Temporarily deactivate hibernation (delete `hibfile` from your C drive) so an update has a chance to run through: ```bash powercfg -h off powercfg -h on ``` <file_sep>--- date: "2017-12-19" title: "Umlaute" categories: - Windows --- ![Harbin, China](./photo-33795787813_cf3d8edcc6_o.png) <!-- TOC --> - [Hacking your US keyboard](#hacking-your-us-keyboard) - [Solving 1st World Problems by reading Usermanuals](#solving-1st-world-problems-by-reading-usermanuals) <!-- /TOC --> # Hacking your US keyboard ### Solving 1st World Problems by reading Usermanuals I finally figured out how to write German Umlaute on an US keyboard, after studying the Windows 10 documentation for a while. __tl;dr__ Switch your keyboard from ENG (US) or (UK) to ENG (International). Then type quotationmarks + the letter you want to change into an Umlaut. ``` "+a = ä "+A = Ä "+o = ö "+O = Ö "+u = ü "+U = Ü ``` ``` ALT+s = ß ``` ``` LALT+0176(NUMPAD!) = ° (degrees) ``` ![Windows 10](./Umlaute_01.png) Go to Settings and choose Time&Language ![Windows 10](./Umlaute_02.png) Add a language ![Windows 10](./Umlaute_03.png) Choose English ![Windows 10](./Umlaute_04.png) English (United States) ![Windows 10](./Umlaute_05.png) Go back to the screen from step 2 and click on options ![Windows 10](./Umlaute_06.png) Add a keyboard ![Windows 10](./Umlaute_07.png) Choose United States International. You can then go back, make this language your default and delete your previous language pack. <file_sep>--- date: "2019-09-12" title: "Magento 2 and Elasticsearch" categories: - LINUX - Elasticsearch - Magento --- ![Patan, Nepal](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Install prerequisites and Elasticsearch](#install-prerequisites-and-elasticsearch) - [Installing OpenJDK 8](#installing-openjdk-8) - [Install Elasticsearch 6.x](#install-elasticsearch-6x) - [Configure NGINX and Elasticsearch](#configure-nginx-and-elasticsearch) - [Set up NGINX as a proxy](#set-up-nginx-as-a-proxy) - [Configure Elasticsearch within Magento](#configure-elasticsearch-within-magento) - [Reindexing catalog search and refreshing the full page cache](#reindexing-catalog-search-and-refreshing-the-full-page-cache) <!-- /TOC --> ## Install prerequisites and Elasticsearch ### Installing OpenJDK 8 Start by verifying that Java hasn't been installed yet on your system: ```bash java -version -bash: java: command not found ``` Java LTS version 8 is not available in the official Debian Buster repositories. You can still install it by enabling the AdoptOpenJDK repository which provides prebuilt OpenJDK packages ([check alternatives](https://stackoverflow.com/questions/57031649/how-to-install-openjdk-8-jdk-on-debian-10-buster)): ```bash apt update apt install apt-transport-https ca-certificates wget dirmngr gnupg software-properties-common ``` Import the repository’s GPG key using the following wget command: ```bash wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | sudo apt-key add - ``` Add the AdoptOpenJDK APT repository to your system: ```bash sudo add-apt-repository --yes https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/ ``` Once the repository is enabled, update apt sources and install Java 8 using the following commands: ```bash sudo apt update sudo apt install adoptopenjdk-8-hotspot ``` Finally, verify the installation by checking the Java version. The output should look something like this:: ```bash java -version openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) ``` ### Install Elasticsearch 6.x Download and install the public signing key: ```bash wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add - ``` Save the repository definition to `/etc/apt/sources.list.d/elastic-6.x.list`: ```bash echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-6.x.list ``` You can install the Elasticsearch Debian package with: ```bash apt update && sudo apt install elasticsearch ``` To configure Elasticsearch to start automatically when the system boots up, run the following commands: ```bash sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable elasticsearch.service ``` Elasticsearch can be started and stopped as follows: ```bash systemctl start elasticsearch.service systemctl stop elasticsearch.service ``` These commands provide no feedback as to whether Elasticsearch was started successfully or not. Instead, this information will be written in the log files located in /var/log/elasticsearch/. You can also check the service status: ```bash systemctl status elasticsearch.service ● elasticsearch.service - Elasticsearch Loaded: loaded (/lib/systemd/system/elasticsearch.service; enabled; vendor preset: enabled) Active: active (running) since Mon 2020-01-13 07:47:05 CET; 8s ago Docs: http://www.elastic.co Main PID: 30088 (java) Tasks: 30 (limit: 4915) Memory: 1.2G CGroup: /system.slice/elasticsearch.service ├─30088 /bin/java -Xms1g -Xmx1g -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccup └─30180 /usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/bin/controller Jan 13 07:47:05 Magento2 systemd[1]: Started Elasticsearch. ``` You can now access Elasticsearch by: ```bash url -X GET "localhost:9200/?pretty" { "name" : "NPsrvIo", "cluster_name" : "elasticsearch", "cluster_uuid" : "aCUd90SnRCynWMZCw-AAhg", "version" : { "number" : "6.8.6", "build_flavor" : "default", "build_type" : "deb", "build_hash" : "3d9f765", "build_date" : "2019-12-13T17:11:52.013738Z", "build_snapshot" : false, "lucene_version" : "7.7.2", "minimum_wire_compatibility_version" : "5.6.0", "minimum_index_compatibility_version" : "5.0.0" }, "tagline" : "You Know, for Search" } ``` Elasticsearch is configured to only be accessible from localhost: ![Magento2 and Elasticsearch](./magento_elasticsearch_01.png) You can change this in the __Network Section__ of `/etc/elasticsearch/elasticsearch.yml`: ```yml # ---------------------------------- Network ----------------------------------- # # Set the bind address to a specific IP (IPv4 or IPv6): # #network.host: 192.168.0.1 # # Set a custom port for HTTP: # #http.port: 9200 # # For more information, consult the network module documentation. ``` By default the `network.host` is set to `127.0.0.1` - setting it to `0.0.0.0` will open Elasticsearch up to all your network interfaces. But be aware that Elasticsearch 6 does not have a [free User Authentication](/securing-elasticsearch-with-read-only-rest) pre-installed! ## Configure NGINX and Elasticsearch ### Set up NGINX as a proxy Create a new file `/etc/nginx/sites-available/elasticsearch.conf` with the following content: ```bash server { listen 8080; location /_cluster/health { proxy_pass http://localhost:9200/_cluster/health; } } ``` Symlink the configuration file into `site-enabled` and restart NGINX: ```bash ln -s /etc/nginx/sites-available/elasticsearch.conf /etc/nginx/sites-enabled nginx -t service nginx restart ``` Verify the proxy works by entering the following command `netstat -plnt` and try opening the URL inside your browser `http://your-server.de:8080/_cluster/health` : ![Magento2 and Elasticsearch](./magento_elasticsearch_02.png) We now have [Magento 2.3.3 running on Port 80](/magento-2-installation-with-nginx-on-debian-buster) and our health check for Elasticsearch on Port 8080 - both available on all network interfaces `0.0.0.0`. While the Elasticsearch API Ports 9200/9300 are securely bound to `127.0.0.1` and cannot be accessed from the internet. ### Configure Elasticsearch within Magento 1. Log in to the Magento Admin as an administrator. 2. Click __Stores__ > __Configuration__ > __Catalog__ > __Catalog Search__. 3. From the Search Engine list, select the correct Elasticsearch version as the following figure shows. ![Magento2 and Elasticsearch](./magento_elasticsearch_03.png) | __Option__ | __Description__ | |---|---| | __Elasticsearch Server Hostname__ | Enter the fully qualified hostname or IP address of the machine running Elasticsearch - as we are running Elasticsearch locally, we can use `localhost` | | __Elasticsearch Server Port__ | Enter the Elasticsearch web server proxy port - for us `9200` | | __Elasticsearch Index Prefix__ | Enter the Elasticsearch index prefix. If you use a single Elasticsearch instance for more than one Magento installation | | __Enable Elasticsearch HTTP Auth__ | Click Yes only if you enabled authentication for your Elasticsearch server - we did not. | 4. Click Test Connection. ![Magento2 and Elasticsearch](./magento_elasticsearch_04.png) ### Reindexing catalog search and refreshing the full page cache After you change Magento’s Elasticsearch configuration, you must reindex the catalog search index and refresh the full page cache using the Admin or command line. To refresh the cache using the Admin: 1. In the Admin, click System > Cache Management. 2. Select the checkbox next to Page Cache. 3. From the Actions list in the upper right, click Refresh. 4. Flush the Magento Cache ![Magento2 and Elasticsearch](./magento_elasticsearch_05.png) Enter the following command to reindex the catalog search index only: ```bash bin/magento indexer:reindex catalogsearch_fulltext ``` Enter the following command to reindex all indexers: ```bash bin/magento indexer:reindex ``` Wait until reindexing completes. ![Magento2 and Elasticsearch](./magento_elasticsearch_06.png) <!-- Installing Elasticsuite To make use of Elasticsearch in Magento 2 we need to install the [Elasticsuite Plugin](https://github.com/Smile-SA/elasticsuite.) Plugins can be installed with the `bin/elasticsearch-plugin` tool of Elastic Search. You have to go to your Elastic Search installation directory. Locate your "Home" directory ( Look for Settings -> Path -> Home in the following command output for value ): ```bash curl localhost:9200/_nodes/settings?pretty=true ``` ![Docker and Magento2 on Windows 10](./magento_elasticsearch_04x.png) Elasticsearch Container The default directory `home` for Elasticsearch is `/usr/share/elasticsearch`. We now have to enter the Elasticsearch container: ```bash docker exec -it elasticsearch7.5 /bin/bash or docker exec -t -i ContainerID /bin/bash ``` And enter the `home directory` and install the following plugins: ```bash bin/elasticsearch-plugin install analysis-phonetic bin/elasticsearch-plugin install analysis-icu ``` ![Docker and Magento2 on Windows 10](./magento_elasticsearch_05x.png) To restart Elasticsearch we now have to restart the `elasticsearch` container. ```bash docker restart elasticsearch5.7 ``` Magento Container We can now install the latest version of ElasticSuite through the Magento composer CLI: ```bash composer require smile/elasticsuite ~2.8.0 ``` And we end up with a scary error message - I tried to log in with my Magento admininistrator credentials, but [apparently that was wrong](https://devdocs.magento.com/guides/v2.3/install-gde/prereq/connect-auth.html): ![Docker and Magento2 on Windows 10](./magento_elasticsearch_06x.png) Access Keys To download packages from the [Magento Marketplace](https://marketplace.magento.com/) we first have to sign in, select __My Profile__ and click on __Access Keys__ in the Marketplace tab. ![Docker and Magento2 on Windows 10](./magento_elasticsearch_07x.png) Click __Create a New Access Key__. Enter a specific name for the keys (e.g., the name of the developer receiving the keys) and click __OK__. New public and private keys are now associated with your account that you can click to copy. Save this information or keep the page open when working with your Magento project. Use the __Public key as your username__ and the __Private key as your password__! But even so I kept receiving the error message: ```json [Composer\Downloader\TransportException] Invalid credentials for 'https://repo.magento.com/packages.json', aborting. ``` The solution was that my installation only had a sample `auth.json` file in the root directory of the Magento content. You need to create a copy and fill out your credentials inside. Afterwards Composer no longer asks for your login: ![Docker and Magento2 on Windows 10](./magento_elasticsearch_08x.png) Enable the module and run ElasticSuite If your Magento instance is already installed, you have to enable the ElasticSuite modules using : ```bash bin/magento module:enable Smile_ElasticsuiteCore Smile_ElasticsuiteCatalog Smile_ElasticsuiteSwatches Smile_ElasticsuiteCatalogRule Smile_ElasticsuiteVirtualCategory Smile_ElasticsuiteThesaurus Smile_ElasticsuiteCatalogOptimizer Smile_ElasticsuiteTracker Smile_ElasticsuiteAnalytics ``` ![Docker and Magento2 on Windows 10](./magento_elasticsearch_09x.png) As recommend run `bin/magento setup:upgrade` and `bin/magento setup:di:compile` afterwards. Then configure the ElasticSearch server to point to your ElasticSearch cluster (optional if using default server location, http://localhost:9200) and reimport the config : ```bash bin/magento config:set -l smile_elasticsuite_core_base_settings/es_client/servers elasticsearch7.5:9200 bin/magento config:set -l smile_elasticsuite_core_base_settings/es_client/enable_https_mode 0 bin/magento config:set -l smile_elasticsuite_core_base_settings/es_client/enable_http_auth 0 bin/magento config:set -l smile_elasticsuite_core_base_settings/es_client/http_auth_user "" bin/magento config:set -l smile_elasticsuite_core_base_settings/es_client/http_auth_pwd "" bin/magento app:config:import ``` Process First Indexing Once you have completed all previous points, you can process a full reindexing of the catalogsearch_fulltext index. ```bash bin/magento index:reindex catalogsearch_fulltext bin/magento index:reindex elasticsuite_categories_fulltext bin/magento index:reindex elasticsuite_thesaurus ``` Once the reindex is done, you are all set and can view your frontend catalog and search pages being rendered via Smile Elastic Suite ! Commit the Changes Now that we've modified the container we have to commit the changes. To commit the changes and create a new image based on said changes, issue the command: ```bash docker commit CONTAINER_ID elasticsearch7.5-template docker commit CONTAINER_ID magento2.3.3-template ``` Where CONTAINER_ID is the ID given to you when you initially ran the container. ![Docker and Magento2 on Windows 10](./magento_elasticsearch_xx.png) ```yaml mysql: image: mysql:5.7 ports: - 3306:3306 environment: MYSQL_ROOT_PASSWORD: <PASSWORD> volumes: - ./src/db:/var/lib/mysql container_name: mysql_5.7 phpmyadmin: image: phpmyadmin/phpmyadmin:4.8 ports: - 8080:80 links: - mysql environment: PMA_HOST: mysql PMA_PORT: 3306 container_name: phpmyadmin_4.8 apache2: image: magento2.3.3-template ports: - 80:80 - 10000:10000 - 443:443 links: - mysql - elasticsearch volumes: - ./src/html:/var/www/html container_name: magento2.3 elasticsearch: image: elasticsearch7.5-template container_name: elasticsearch7.5 environment: - xpack.security.enabled=false - discovery.type=single-node ulimits: memlock: soft: -1 hard: -1 nofile: soft: 65536 hard: 65536 cap_add: - IPC_LOCK volumes: - ./src/elasticsearch-data:/usr/share/elasticsearch/data ports: - 9200:9200 - 9300:9300 ``` --><file_sep>--- date: "2016-08-11" title: "Node Express Static" categories: - Javascript - Node - Express --- ![<NAME>, Cambodia](./photo-11626620916_804d27ae54_o.png) ## Node/Express Wiki/Knowledgebase **Bootstrap/Accelerated Mobile Pages** This code is part of a training in web development with **Node.js** and **Express /Generator**. Goal of this course is to quickly set up a node/express environment to serve static HTML/CSS/JS content. <!-- TOC --> - [Node/Express Wiki/Knowledgebase](#nodeexpress-wikiknowledgebase) - [1 Install Node.js and Express.js to develop our Web Application](#1-install-nodejs-and-expressjs-to-develop-our-web-application) - [2 Preparing the Site Structure](#2-preparing-the-site-structure) - [3 Install and Configure Gulp.js](#3-install-and-configure-gulpjs) - [4 Install NGINX on a CentOS 7 web server](#4-install-nginx-on-a-centos-7-web-server) - [5 Install Node.js on a CentOS 7 web server](#5-install-nodejs-on-a-centos-7-web-server) - [6 Clone Repo from Git](#6-clone-repo-from-git) - [7 Run the app as a service (PM2)](#7-run-the-app-as-a-service-pm2) - [8 Install Java](#8-install-java) - [Public Signing Key](#public-signing-key) - [Install Java 8](#install-java-8) - [9 Install Elasticsearch](#9-install-elasticsearch) - [Public Signing Key](#public-signing-key-1) - [Install Elasticsearch](#install-elasticsearch) - [Access-Control-Allow-Origin](#access-control-allow-origin) - [Set-up Elasticsearch Service](#set-up-elasticsearch-service) - [10 Install Kibana](#10-install-kibana) - [Create and edit a new yum repository file for Kibana](#create-and-edit-a-new-yum-repository-file-for-kibana) - [Install Kibana with this command:](#install-kibana-with-this-command) - [Set Elasticsearch Connection URL](#set-elasticsearch-connection-url) - [Install Kibana Service](#install-kibana-service) - [Secure Kibana with NGINX](#secure-kibana-with-nginx) - [Securing Kibana in a Nginx server block](#securing-kibana-in-a-nginx-server-block) - [Install Sense](#install-sense) <!-- /TOC --> ![Express.js Website Tutorial](./node-express-static_01.png) ### 1 Install Node.js and Express.js to develop our Web Application ___ * Install [Node.js](https://nodejs.org/en/download/). * Install express-generator globally to set up our node/express scaffolding: *npm install -g express-generator* * Apply the generator with the EJS templating engine (*-e*) and give your app an name (*e.g. express-static*): *express -e express-static* * Switch to the created express-static folder and install dependencies: *cd express-static && npm install* * Start the app with: DEBUG=my-application ./bin/www ### 2 Preparing the Site Structure ___ The following folders and files will be created in the **dev** folder. Later [Gulp.js](#4-install-and-configure-gulpjs) will be used to create a distilled version ready for deployment in the **build** folder. * Add *partials/content* to the *views* folder * Add *partials/template* to views and add *head.ejs*, *header.ejs*, *footer.ejs* and *jsdefaults.ejs* * Copy content from the *head* section of your web site (*index.ejs*) to *head.ejs* * Reference the *head.ejs* file inside the *head* section of index.ejs: *<% include partials/template/head.ejs %>* * Add *<script src="/javascrip/scrip.js"></script>* to *jsdefaults.ejs* and reference it in *index.ejs* at the end of body: *<% include partials/template/jsdefaults.ejs %>* * Copy all JS script tags to *jsdefaults.ejs* * Copy content from the *footer* section of your web site (*index.ejs*) to *footer.ejs* * Reference the *footer.ejs* file inside the *footer* section of index.ejs: *<% include partials/template/footer.ejs %>* * Repeat with other partials that you might want to reuse on other pages! * Copy all your websites (EJS files) pages into the content folder <br/> __Now lets try to build this:__ ![INSTAR Wiki Knowledgebase](./node-express-static_01.png) ### 3 Install and Configure Gulp.js ___ * **Step One** — Install [Gulp](https://github.com/gulpjs/gulp/blob/master/docs/getting-started.md) globally: ``` npm install --global gulp-cli ``` * **Step Two** — Install [Gulp 4](https://github.com/gulpjs/gulp/tree/4.0) into your Project - cd to project directory and: ``` npm install --save-dev gulpjs/gulp#4.0 ``` * **Step Three** — Create a gulpfile.babel.js at the root of your project: Node already supports a lot of ES2015, to avoid compatibility problem we suggest to install Babel and rename your gulpfile.js as gulpfile.babel.js. ``` npm install --save-dev babel-register babel-preset-es2015 ``` Then create a .babelrc file with the preset configuration. ``` { "presets": [ "es2015" ] } ``` Now install all Gulp dependencies that you want to use ``` npm install --save-dev gulp-babel gulp-uglify gulp-rename gulp-clean-css gulp-htmlclean gulp-newer gulp-imagemin del ``` Now write the gulpfile.babel.js and import all gulp dependencies... ```javascript import gulp from 'gulp'; import babel from 'gulp-babel'; import uglify from 'gulp-uglify'; import rename from 'gulp-rename'; import cleanCSS from 'gulp-clean-css'; import cleanHTML from 'gulp-htmlclean'; import newer from 'gulp-newer'; import imagemin from 'gulp-imagemin'; import del from 'del'; ``` ... and write your Gulp Tasks. * **Step Four** — Define your source and destination directories: ```javascript const paths = { views: { src: 'dev/views/**/*.ejs', dest: 'build/views/', }, images: { src: 'dev/public/images/**/*.{jpg,jpeg,png}', dest: 'build/public/images/', }, styles: { src: 'dev/public/stylesheets/**/*.css', dest: 'build/public/stylesheets/', }, scripts: { src: 'dev/public/javascripts/**/*.js', dest: 'build/public/javascripts/', }, }; ``` * **Step Five** — Add a Gulp Task [using imagemin](https://www.npmjs.com/package/gulp-imagemin) to compress your images: ```javascript export function images() { return gulp.src(paths.images.src) .pipe(newer(paths.images.dest)) // Pass through newer files only .pipe(imagemin()) .pipe(gulp.dest(paths.images.dest)); } ``` Run the task with: ``` gulp images ``` to compress all images in ./dev/public/images and save them in ./build/public/images. * **Step Six** — Add a Gulp Task to minify CSS, EJS/HTML and JS: Now we have to create minify jobs for each file type - (add more tasks if needed): ```javascript // Minify EJS files export function views() { return gulp.src(paths.views.src) .pipe(newer(paths.views.dest)) // Pass through newer files only .pipe(cleanHTML()) .pipe(gulp.dest(paths.views.dest)); } // Minify CSS files export function styles() { return gulp.src(paths.styles.src) .pipe(newer(paths.styles.dest)) // Pass through newer files only .pipe(cleanCSS()) // Pass in options to the stream .pipe(rename({ basename: 'main', suffix: '.min', })) .pipe(gulp.dest(paths.styles.dest)); } // Minify JS files export function scripts() { return gulp.src(paths.routes.src, { sourcemaps: true }) .pipe(newer(paths.routes.dest)) // Pass through newer files only .pipe(babel()) .pipe(uglify()) .pipe(gulp.dest(paths.routes.dest)); } // Minify routes gulp.task('routes', function() { return gulp.src(routes.in) .pipe(newer(routes.out)) .pipe(uglify()) .pipe(gulp.dest(routes.out)) }); ``` All those tasks can be triggered individually - e.g.: ``` gulp views ``` But to make it more convenient, we will create a combined task - that will also watch for changes: ```javascript export function watch() { gulp.watch(paths.views.src, views); gulp.watch(paths.images.src, images); } ``` You can create a build task to create a fresh build: ```javascript Const build = gulp.series(clean, gulp.parallel(views, images, styles, scripts)); export { build }; ``` ``` gulp build ``` This task will grab all files from the dev folder, minify/compress them and save them in the build folder. ### 4 Install NGINX on a CentOS 7 web server ___ * **Step One** — Add Nginx Repository Step One—Add Nginx Repository To add the CentOS 7 EPEL repository, open terminal and use the following command: ``` sudo yum install epel-release ``` * **Step Two** — Install Nginx Now that the Nginx repository is installed on your server, install Nginx using the following yum command: ``` sudo yum install nginx ``` * **Step Three** — Start Nginx Nginx does not start on its own. To get Nginx running, type: ``` sudo systemctl start nginx ``` Test: ``` http://server_domain_name_or_IP/ ``` * **Step Four** — Nginx as a Service To enable Nginx to start when your system boots, enter the following command: ``` sudo systemctl enable nginx ``` Always test your config after changes: ``` nginx -t ``` And try to reload instead of restarting - reload will fail but old config keeps running in case of error ``` service nginx reload ``` To restart the Nginx service, enter the following command: ``` service nginx restart ``` ### 5 Install Node.js on a CentOS 7 web server ___ * **Step One** — Download the Node.js Source Choose your version and download the source: ``` curl --silent --location https://rpm.nodesource.com/setup_7.x | bash - ``` * **Step Two** — Install Node.js Then install, as root: ``` sudo yum -y install nodejs ``` ``` sudo yum install -y gcc-c++ make ``` ### 6 Clone Repo from Git ___ * **Step One** — Install Git After this line you will have Git installed on your CentOS server: ``` sudo yum install -y git ``` Putting your Git code on your server: ``` cd /opt/ sudo mkdir apps sudo chown your_app_user_name app git clone https://github.com/INSTAR-Deutschland/express-static.git apps cd apps npm install ``` Update an existing repo by cd into directory and: ``` git pull origin master ``` ### 7 Run the app as a service (PM2) ___ * **Step One** — Demonization Now we will install PM2, which is a process manager for Node.js applications. PM2 provides an easy way to manage and daemonize applications (run them as a service). We will use Node Packaged Modules (NPM), which is basically a package manager for Node modules that installs with Node.js, to install PM2 on our app server. Use this command to install PM2: ``` sudo npm install pm2@latest -g ``` To update PM2 to the latest version and update version in memory ``` sudo npm install pm2@latest -g pm2 update ``` * **Step Two** — Manage Application with PM2 The first thing you will want to do is use the pm2 start command to run your application, app.js, in the background. With node Node apps the entry point is the app.js (or index.js). In case you used Express-Generator to do your app scaffolding, use the www file in the /bin directory instead : ``` pm2 start app.js ``` This also adds your application to PM2's process list, which is outputted every time you start an application: | App name | id | mode | pid | status | restart | uptime | mem | watching | | ------------- |:-------------:| :-----:| :-----:| :-----:| :-----:| :-----:| :-----:| -----:| | app | 0 | fork | 9495 | online | 0 | 0s | 36.4 MB | disabled | Repeat for all your node apps and save: ``` pm2 save ``` Applications that are running under PM2 will be restarted automatically if the application crashes or is killed, but an additional step needs to be taken to get the application to launch on system startup (boot or reboot). Luckily, PM2 provides an easy way to do this, the startup subcommand. Once you started all apps, type the following to make sure that they restart after a server restart. You must also specify the init system you are running on, which is centos, in our case: ``` pm2 startup [ubuntu | ubuntu14 | ubuntu12 | centos | centos6 | arch | oracle | amazon | macos | darwin | freesd | systemd | systemv | upstart | launchd | rcd] ``` The startup subcommand generates and configures a startup script to launch PM2 and its managed processes on server boots. We won´t specify our OS and let pm2 decide what to do: ``` sudo pm2 startup ``` Bring back previously saved processes (via pm2 save): ``` pm2 save ``` Disabling startup system ``` pm2 resurrect ``` Disabling startup system ``` pm2 unstartup ``` * **Step Three** — Other PM2 Commands (Optional) Stop an application with this command (specify the PM2 App name or id): ``` sudo pm2 stop app ``` Restart an application with this command (specify the PM2 App name or id): ``` sudo pm2 restart app ``` Will 0s downtime reload (for NETWORKED apps): ``` sudo pm2 reload all ``` Will remove process 0 from pm2 list: ``` pm2 delete 0 ``` The list of applications currently managed by PM2 can also be looked up with the list subcommand: ``` pm2 list ``` More information about a specific application can be found by using the info subcommand (specify the PM2 App name or id): ``` pm2 info app ``` The PM2 process monitor can be pulled up with the monit subcommand. This displays the application status, CPU, and memory usage: ``` pm2 monit ``` ### 8 Install Java ___ #### Public Signing Key Download the Oracle Java 8: ``` cd ~ wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" "http://download.oracle.com/otn-pub/java/jdk/8u73-b02/jdk-8u73-linux-x64.rpm" ``` #### Install Java 8 ``` sudo yum -y localinstall jdk-8u73-linux-x64.rpm ``` Now Java should be installed at /usr/java/jdk1.8.0_73/jre/bin/java, and linked from /usr/bin/java. u may delete the archive file that you downloaded earlier: ``` rm ~/jdk-8u*-linux-x64.rpm ``` ### 9 Install Elasticsearch #### Public Signing Key Download and install the public signing key: ``` rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch ``` Add the following in your /etc/yum.repos.d/ directory in a file with a .repo suffix, for example elasticsearch.repo ``` [elasticsearch-5.x] name=Elasticsearch repository for 5.x packages baseurl=https://artifacts.elastic.co/packages/5.x/yum gpgcheck=1 gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch enabled=1 autorefresh=1 type=rpm-md ``` #### Install Elasticsearch ``` sudo yum install elasticsearch ``` #### Access-Control-Allow-Origin Restrict outside access to your Elasticsearch instance (port 9200) ``` sudo vi /etc/elasticsearch/elasticsearch.yml -> network.host: localhost ``` To get rid of the following Error: ``` Failed to load http://localhost:9200/: No 'Access-Control-Allow-Origin' header is present on the requested resource. Origin 'http://localhost:8000' is therefore not allowed access. ``` Add the following line to elasticsearch.yml ``` http.cors: enabled: true allow-origin: /https?:\/\/localhost(:[0-9]+)?/ ``` #### Set-up Elasticsearch Service To configure Elasticsearch to start automatically when the system boots up, run the following commands: ``` sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable elasticsearch.service ``` Elasticsearch can be started and stopped as follows: ``` sudo systemctl start elasticsearch.service sudo systemctl stop elasticsearch.service ``` | Type | Description | Location RHEL/CentOS | | ------------- |:-------------:| -----:| | home | Home of elasticsearch installation. | /usr/share/elasticsearch | | bin | Binary scripts including elasticsearch to start a node. | /usr/share/elasticsearch/bin | | conf | Configuration files elasticsearch.yml and logging.yml. | /etc/elasticsearch | | conf | Environment variables including heap size, file descriptors. | /etc/sysconfig/elasticsearch | | data | The location of the data files of each index / shard allocated on the node. | /var/lib/elasticsearch | | logs | Log files location | /var/log/elasticsearch | | plugins | Plugin files location. Each plugin will be contained in a subdirectory. | /usr/share/elasticsearch/plugins | | repo | Shared file system repository locations. | Not configured | | script | Location of script files. | /etc/elasticsearch/scripts | ### 10 Install Kibana ___ #### Create and edit a new yum repository file for Kibana ``` sudo vi /etc/yum.repos.d/kibana.repo ``` Add the following repository configuration: ``` [kibana-5.x] name=Kibana repository for 5.x packages baseurl=https://artifacts.elastic.co/packages/5.x/yum gpgcheck=1 gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch enabled=1 autorefresh=1 type=rpm-md ``` #### Install Kibana with this command: ``` sudo yum install kibana ``` #### Set Elasticsearch Connection URL ``` sudo vi /opt/kibana/config/kibana.yml -> elasticsearch.url: "http://localhost:9200" ``` #### Install Kibana Service To configure Kibana to start automatically when the system boots up, run the following commands: ``` sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable kibana.service ``` Kibana can be started and stopped as follows: ``` sudo systemctl start kibana.service sudo systemctl stop kibana.service ``` #### Secure Kibana with NGINX Use NGINX to securely access Kibana and use htpasswd to create an admin user: ``` sudo yum -y install httpd-tools sudo htpasswd -c /etc/nginx/htpasswd.users admin ``` Add your password. #### Securing Kibana in a Nginx server block ``` sudo vi /etc/nginx/nginx.conf ``` Find the default server block (starts with server {), the last configuration block in the file, and delete it. When you are done, the last two lines in the file should look like this: ``` include /etc/nginx/conf.d/*.conf; } ``` Now we will create an Nginx server block in a new file: ``` sudo vi /etc/nginx/conf.d/kibana.conf ``` Paste the following code block into the file. Be sure to update the server_name to match your server's name: ``` server { listen 80; server_name example.com; auth_basic "Restricted Access"; auth_basic_user_file /etc/nginx/htpasswd.users; location / { proxy_pass http://localhost:5601; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection 'upgrade'; proxy_set_header Host $host; proxy_cache_bypass $http_upgrade; } } ``` Now start and enable Nginx to put our changes into effect: ``` sudo systemctl start nginx sudo systemctl enable nginx ``` #### Install Sense Once Kibana is installed, you can install Sense running the following command from your /opt/kibana folder: ``` ./bin/kibana plugin --install elastic/sense ``` You will now need to start Kibana: ``` ./bin/kibana ``` The apps are now available via: ``` http://localhost:5601/app/kibana http://localhost:5601/app/sense ``` Use Sense to feed Elasticsearch with mappings/postings (see wiki-data.json)<file_sep>--- date: "2019-02-09" title: "Zoneminder Dockerized" categories: - IoT - Smarthome --- ![Furano, Japan](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Installation of Docker on CentOS](#installation-of-docker-on-centos) - [Zoneminder and MySQL](#zoneminder-and-mysql) - [Accessing the Zoneminder applications](#accessing-the-zoneminder-applications) - [Configuring ZoneMinder](#configuring-zoneminder) - [FirewallD](#firewalld) - [Adding a INSTAR 1080p Camera](#adding-a-instar-1080p-camera) - [Adding your cameras MJPEG Stream](#adding-your-cameras-mjpeg-stream) - [Adding your cameras RTSP Stream](#adding-your-cameras-rtsp-stream) - [Accessing your Camera](#accessing-your-camera) <!-- /TOC --> ZoneMinder is intended for use in single or multi-camera video security applications, including commercial or home CCTV, theft prevention and child, family member or home monitoring and other domestic care scenarios such as nanny cam installations. It supports capture, analysis, recording, and monitoring of video data coming from one or more video or network cameras attached to a Linux system. ZoneMinder also support web and semi-automatic control of Pan/Tilt/Zoom cameras using a variety of protocols. It is suitable for use as a DIY home video security system and for commercial or professional video security and surveillance. It can also be integrated into a home automation system via X.10 or other protocols. If you're looking for a low cost CCTV system or a more flexible alternative to cheap DVR systems, then why not give ZoneMinder a try? ## Installation of Docker on CentOS Docker can be installed on Linux, Windows and macOS. In this example we will use CentOS Linux - for other operating systems, please [check the official documentation](https://docs.docker.com/install/). First we need to add the repository to pull the Docker code from - type `nano /etc/yum.repos.d/virt7-docker-common-release.repo` and add: ```yaml [virt7-docker-common-release] name=virt7-docker-common-release baseurl=http://cbs.centos.org/repos/virt7-docker-common-release/x86_64/os/ gpgcheck=0 ``` Then install [Docker](https://www.docker.com/) on Centos server: ```bash yum update yum -y install --enablerepo=virt7-docker-common-release docker systemctl enable docker systemctl start docker ``` ## Zoneminder and MySQL We will use the [Zoneminder](https://hub.docker.com/r/quantumobject/docker-zoneminder) from [QuantumObject](https://github.com/QuantumObject) that requires a MySQL v5.7 server. To run with MySQL in a separate container use the command below: ```bash docker network create net docker run -d -e TZ=Europe/Berlin -e MYSQL_USER=zmuser -e MYSQL_PASSWORD=<PASSWORD> -e MYSQL_DATABASE=zm -e MYSQL_ROOT_PASSWORD=<PASSWORD> -e MYSQL_ROOT_HOST=% --net net --name db mysql/mysql-server:5.7 echo "wait until MySQL startup..." ``` --- ![ZoneMinder & Docker](./Zoneminder_01.png) --- Then download and run the ZoneMinder container: ```bash docker run -d --shm-size=4096m -e TZ=Europe/Berlin -e ZM_DB_HOST=db --net net --name zm -p 80:80 quantumobject/docker-zoneminder ``` --- ![ZoneMinder & Docker](./Zoneminder_02.png) --- You can verify that both containers are running with - `docker ps`: --- ![ZoneMinder & Docker](./Zoneminder_03.png) --- ## Accessing the Zoneminder applications After that check with your browser at addresses plus the port assigned by docker: --- ![ZoneMinder & Docker](./Zoneminder_04.png) --- ``` http://host_ip:port/zm/ ``` Them log in with login/password : <PASSWORD> , Please change password right away and check on-line [documentation](http://www.zoneminder.com/wiki/index.php/Documentation) to configure ZoneMinder. To access the container from the server that the container is running: ```bash docker exec -it container_id /bin/bash ``` ## Configuring ZoneMinder ### FirewallD Before adding a camera make sure that the ports 80 and 554 are accessible on your server. If you are running [FirewallD](https://firewalld.org/documentation/) use the following commands to open these ports: ```bash firewall-cmd --permanent --add-port=554/tcp firewall-cmd --permanent --add-port=80/tcp firewall-cmd --reload ``` ### Adding a INSTAR 1080p Camera Open the __Console__ tab and click on __Add__: --- ![ZoneMinder & Docker](./Zoneminder_05.png) --- On the __General__ tab give your camera a name and choose __Remote__ as __Source Type__: --- ![ZoneMinder & Docker](./Zoneminder_06.png) --- #### Adding your cameras MJPEG Stream For the JPG stream choose: | | | |---|---| | Protocol | HTTP | | Host Name | username:password@camera-ip | | Port | 80 | | Path | /tmpfs/snap.jpg | | Width | 1920 | | Height | 1080 | --- ![ZoneMinder & Docker](./Zoneminder_07.png) --- #### Adding your cameras RTSP Stream For the JPG stream choose: | | | |---|---| | Protocol | RTSP | | Method | RTP/RTSP | | Host Name | username:password@camera-ip | | Port | 554 | | Path | /11 | | Width | 1920 | | Height | 1080 | --- ![ZoneMinder & Docker](./Zoneminder_08.png) --- ## Accessing your Camera --- ![ZoneMinder & Docker](./Zoneminder_09.png) ![ZoneMinder & Docker](./Zoneminder_10.png) ---<file_sep>--- date: "2020-01-12" title: "Web traffic redirection with Node and Express on CentOS8" categories: - LINUX --- ![Shenzhen, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installing Node JS](#installing-node-js) - [Installing Greenlock Express](#installing-greenlock-express) - [Installing Process Monitor 2](#installing-process-monitor-2) <!-- /TOC --> ## Installing Node JS Follow [the instruction](https://github.com/nodesource/distributions/blob/master/README.md#rpm) for your version of Node.js: ```bash # As root curl -sL https://rpm.nodesource.com/setup_14.x | bash - # No root privileges curl -sL https://rpm.nodesource.com/setup_14.x | sudo bash - ``` Run the following to install Node.js 14.x and npm: ```bash sudo yum install -y nodejs ``` You may also need development tools to build native addons: ```bash sudo yum install gcc-c++ make ``` ## Installing Greenlock Express [Greenlock Express](https://www.npmjs.com/package/greenlock-express) is a Web Server with Fully Automated HTTPS and renewals. ```bash mkdir /opt/web-redirection cd /opt/web-redirection npm init npm install greenlock-express@v4 ``` ## Installing Process Monitor 2 [PM2](https://pm2.keymetrics.io) is a daemon process manager that will help you manage and keep your application online 24/7: ```bash npm install pm2 -g ```<file_sep>--- date: "2019-01-05" title: "Sensors Data and Grafana" categories: - LINUX - IoT - Databases --- ![Hongkong](./photo-19453645663_6dzgdxff_o.png) <!-- TOC --> - [Installing Grafana](#installing-grafana) <!-- /TOC --> [Grafana](https://grafana.com/grafana) allows you to query, visualize, alert on and understand your metrics no matter where they are stored. Create, explore, and share dashboards with your team and foster a data driven culture. The Grafana project can also be found on [Github](https://github.com/grafana/grafana). ## Installing Grafana Since I am working on a Raspberry Pi 2 (the same would be true for a v3) with Raspbian installed I will grab the [download link](https://grafana.com/grafana/download?platform=arm) for Grafana for [Ubuntu & Debian(ARMv7)](https://grafana.com/docs/installation/debian/): __Correction I seemed to be using a single core version of the Raspberry Pi 2 that requires the ARMv6 build - see below__ ```bash wget https://dl.grafana.com/oss/release/grafana_6.1.6_armhf.deb sudo apt-get install -y adduser libfontconfig1 sudo dpkg -i grafana_6.1.6_armhf.deb ``` In my case _libfontconfig1_ was missing dependencies after the installation and and the installation of Grafana failed - I was able to fix this issue by running `sudo apt --fix-broken install` and restart the Grafana installation. We can use SystemD to start the Grafana Service: ```bash sudo systemctl daemon-reload sudo systemctl start grafana-server sudo systemctl status grafana-server ``` Unfortunately I am seeing error messages here - and as stated above, I was [using the wrong build of Grafana](https://community.grafana.com/t/grafana-wont-start-on-raspberry-pi/8525/3)... ```bash 09:11:51 raspberrypi systemd[1]: Stopped Grafana instance. 09:11:51 raspberrypi systemd[1]: grafana-server.service: Start request repeated too quickly. 09:11:51 raspberrypi systemd[1]: Failed to start Grafana instance. 09:11:51 raspberrypi systemd[1]: grafana-server.service: Unit entered failed state. 09:11:51 raspberrypi systemd[1]: grafana-server.service: Failed with result ‘signal’. ``` So once again, with more feeling (and the correct version [Ubuntu & Debian(ARMv6)](https://grafana.com/grafana/download?platform=arm)): ```bash sudo systemctl stop grafana-server sudo apt-get remove --auto-remove grafana wget https://dl.grafana.com/oss/release/grafana-rpi_6.1.6_armhf.deb sudo apt-get install -y adduser libfontconfig1 sudo dpkg -i grafana-rpi_6.1.6_armhf.deb sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable grafana-server sudo /bin/systemctl start grafana-server ``` This time everything worked as planned: ![Grafana](./grafana_01.png) I had to wait for a few minutes for the service to come online - on a RasPi v3 this should be less of an issue... I am now able to access the Grafana frontend on the IP address of my Pi and the port 3000 - `http://192.168.2.64:3000/login`. The default login is __user = admin__ and __password = <PASSWORD>__ - but you will be asked to change this right away: ![Grafana](./grafana_02.png) We are greeted by a wizard that leads us through the initial set up process - we have to start by adding a data source. I am going to use the data I started storing inside a [MariaDB database in another article](/zigbee2mqtt-xiaomi-fhem/#adding-sql-logging) and choose MySQL: ![Grafana](./grafana_03.png) For this project I created a database called `fhem`. MariaDB is running on the same Pi and can be accessed over `localhost:3306` by the user `fhemuser` with the password `<PASSWORD>`. Clicking on __Save & Test__ at the bottom of the page tells me that the __Database Connection is OK__: ![Grafana](./grafana_04.png) Back to the __Home Screen__ I now click on __New Dashboard__: ![Grafana](./grafana_05.png) Here I can choose a __Visualization__ and select a __Graph__. On the empty coordinate system I can click on the __Panel Title__ to add a data source for the graph: ![Grafana](./grafana_06.png) My [database has two tables](/zigbee2mqtt-xiaomi-fhem/#adding-sql-logging) - one with the __current values__ and one table that collects the __historic values__ for all readings. And both have 7 rows - `TIMESTAMP`, `DEVICE`, `TYPE`, `EVENT`, `READING`, `VALUE`, `UNIT`: ![Grafana](./grafana_07.png) For my graph I am interested in the __history table__ and I want to begin to create a plot of the _Temperature Sensor Reading_ from the `temp_hum_sensor` device. Grafana helps with creating the corresponding SQL query. But I am receiving an error because the numeric reading is stored as _type string_: ![Grafana](./grafana_08.png) But we can re-write the SQL query using __CAST__ to convert the data type of our `VALUES` to `DECIMAL`: ```sql SELECT UNIX_TIMESTAMP(`TIMESTAMP`) as time_sec, CAST(`VALUE` AS DECIMAL(10, 6)) as value, 'temperature' as metric FROM `history` WHERE $__timeFilter(`TIMESTAMP`) AND `DEVICE` = 'temp_hum_sensor' AND `READING` = 'temperature' ORDER BY `TIMESTAMP` ASC ``` ![Grafana](./grafana_09.png) Continuing through the wizard on the left allows me to further customize the graph: ![Grafana](./grafana_10.png) Now we can return to the home screen and save our first dashboard: ![Grafana](./grafana_11.png)<file_sep>--- date: "2018-10-27" title: "Red Hat Certified Engineer (RHCE) Exam" categories: - LINUX --- ![<NAME>](./photo-11628105264_1caf6ec237_o.jpg) This exam and the exam objectives provided here are based on the Red Hat® Enterprise Linux® 7.0 version of the exam. The performance-based Red Hat Certified Engineer exam (EX300) tests to determine if your knowledge, skill, and ability meet those required of a senior system administrator responsible for Red Hat Enterprise Linux (RHEL) systems. [Red Hat Website](https://www.redhat.com/en/services/training/ex300-red-hat-certified-engineer-rhce-exam) <!-- TOC --> - [Network Time Protocol Service](#network-time-protocol-service) - [Setting up FirewallD](#setting-up-firewalld) - [Predefined Zones](#predefined-zones) - [Installation](#installation) - [Setting rules for applications](#setting-rules-for-applications) - [Predefined Services](#predefined-services) - [Adding Ports to Firewall Rules](#adding-ports-to-firewall-rules) - [Creating a Custom Service](#creating-a-custom-service) - [Creating a Custom Zone](#creating-a-custom-zone) - [Setting Rich Rules](#setting-rich-rules) - [NAT and Port Forwarding](#nat-and-port-forwarding) - [Samba File sharing](#samba-file-sharing) - [Email Server](#email-server) - [Installing Postfix and Dovecot](#installing-postfix-and-dovecot) - [Setting up SSL certificate](#setting-up-ssl-certificate) - [Postfix Configuration](#postfix-configuration) - [Network Interface Bonding](#network-interface-bonding) - [Types of Bonding](#types-of-bonding) - [Enabling Bonding on CentOS 7](#enabling-bonding-on-centos-7) - [Configure multiple IP addresses for bond0](#configure-multiple-ip-addresses-for-bond0) - [Network Interface Teaming](#network-interface-teaming) <!-- /TOC --> ## Network Time Protocol Service NTP- is a protocol which runs over port 123 UDP at Transport Layer and allows computers to synchronize time over networks for an accurate time. This service is handled by __Chrony.d__ on CentOS 7 and higher. We can edit this service via `nano /etc/chrony.conf`: --- ![Red Hat Certified Engineer](./CentOS_15.png) --- As you can see, the service is pre-configured with 4 NTP server - that can be replaced as needed: ``` server 0.centos.pool.ntp.org iburst server 1.centos.pool.ntp.org iburst server 2.centos.pool.ntp.org iburst server 3.centos.pool.ntp.org iburst ``` The service can then be enabled `systemctl enabled chronyd.service`, restarted `systemctl restart chronyd.service` and it's status be checked `systemctl status chronyd.service`. To check the current output of the NTP service type `chronyc tracking`: --- ![Red Hat Certified Engineer](./CentOS_16.png) --- Current sources can be checked by `chronyc sources -v ` --- ![Red Hat Certified Engineer](./CentOS_17.png) --- To check your current selected timezone run `timedatectl | grep "Time zone"`. To set a different timezone, e.g. __UTC__, run `timedatectl set-timezone UTC`. ## Setting up FirewallD Firewalld provides a dynamically managed firewall with support for network/firewall __zones__ that define the trust level of network connections or interfaces. ### Predefined Zones These are the zones provided by firewalld sorted according to the default trust level of the zones from untrusted to trusted - to see all available zones type `firewall-cmd --get-zones` / to see the configuration of a specific zone, e.g. __public__, type `firewall-cmd --zone=public --list-all` * __drop__: Any incoming network packets are dropped, there is no reply. Only outgoing network connections are possible. * __block__: Any incoming network connections are rejected with an icmp-host-prohibited message for IPv4 and icmp6-adm-prohibited for IPv6. Only network connections initiated within this system are possible. * __public__: For use in public areas. You do not trust the other computers on networks to not harm your computer. Only selected incoming connections are accepted. * __external__ : For use on external networks with masquerading enabled especially for routers. You do not trust the other computers on networks to not harm your computer. Only selected incoming connections are accepted. * __dmz__: For computers in your demilitarized zone that are publicly-accessible with limited access to your internal network. Only selected incoming connections are accepted. * __work__ : For use in work areas. You mostly trust the other computers on networks to not harm your computer. Only selected incoming connections are accepted. * __home__ : For use in home areas. You mostly trust the other computers on networks to not harm your computer. Only selected incoming connections are accepted. * __internal__: For use on internal networks. You mostly trust the other computers on the networks to not harm your computer. Only selected incoming connections are accepted. * __trusted__: All network connections are accepted. ### Installation [FirewallD](https://firewalld.org/documentation/) is most likely already installed on your system. You can check the service status by typing `systemctl status firewalld`. If the service is missing, install FirewallD via `yum install -y firewalld` + `systemctl enable firewalld`: --- ![Red Hat Certified Engineer](./CentOS_18.png) --- You can check the default zone that is configured for system by typing `firewall-cmd --get-default-zone` or check active zones in general via `firewall-cmd --get-active-zone`: --- ![Red Hat Certified Engineer](./CentOS_19.png) --- ### Setting rules for applications #### Predefined Services A firewalld service can be a list of local ports and destinations and additionally also a list of firewall helper modules automatically loaded if a service is enabled. The use of predefined services makes it easier for the user to enable and disable access to a service. To see all predefined services that come with FirewallD type `firewall-cmd --get-services`: --- ![Red Hat Certified Engineer](./CentOS_20.png) --- If we want to allow __http__ traffic through our firewall, all we have to do is to activate the corresponding service for the active zone - in our case this is __public__: `firewall-cmd --zone=public --add-service=http`. You can check if adding the service was successful by typing `firewall-cmd --zone=public --list-services`: --- ![Red Hat Certified Engineer](./CentOS_21.png) --- To make a change persist after a reboot, you need to add the __--permanent__ flag to the add-command, e.g. `firewall-cmd --zone=public --permanent --add-service=http`. To list all permanent active services type `firewall-cmd --zone=public --permanent --list-services`. #### Adding Ports to Firewall Rules A simple port number or port range can be used in the cases where no additional changes are needed. For example with opening port 80/tcp to allow access to a local http servicer on the standard port - which would be identical to using the __http service__ we activated above `firewall-cmd --permanent --zone=public --add-port=80/tcp`. This can be tested by typing `firewall-cmd --zone=public --permanent --list-ports`. To add a range of ports, define the beginning and end port like this `firewall-cmd --permanent --zone=public --add-port=40999-41001/udp`. #### Creating a Custom Service Adding Ports manually is highly flexible but might make you loose track after a while - which port belonged to that application I just deleted? Grouping ports for applications in services makes it much easier to handle deployments. All predefined services can be found under `/usr/lib/firewalld/services` in form of __XML Files__ that we can use as templates for our services: --- ![Red Hat Certified Engineer](./CentOS_22.png) --- We can copy a file `cp ssh.xml example.xml` and edit the copy according to our needs: --- ![Red Hat Certified Engineer](./CentOS_23.png) --- Now reload FirewallD `firewall-cmd --reload`, add the example services you created `firewall-cmd --zone=public --add-service=example` and check if it is running `firewall-cmd --zone=public --list-services`: --- ![Red Hat Certified Engineer](./CentOS_24.png) --- #### Creating a Custom Zone To configure or add zones you can either use one of the firewalld interfaces to handle and change the configuration. These are the graphical configuration tool firewall-config, the command line tool firewall-cmd or the D-BUS interface. Or you can create or copy a zone file in one of the configuration directories. `/usr/lib/firewalld/zones` is used for default and fallback configurations and `/etc/firewalld/zones` is used for user created and customized configuration files. A new zone can be added with the following command, e.g. __example zone__: `firewall-cmd --permanent --new-zone=example`. To use your custom zone, you first need to reload FirewallD `firewall-cmd --reload`. Just as with custom services, we can use the predefined zones in `/usr/lib/firewalld/zones` as a template for our custom zone: --- ![Red Hat Certified Engineer](./CentOS_25.png) --- Or use the command `firewall-cmd --zone=example --permanent --add-service=http` to manually add default services to your zone. To assign your custom zone to the correct network interface - in my case __enp0s3__ (use `ip addr` to find your interface name) - type `firewall-cmd --zone=example --permanent --change-interface=enp0s3`. To change the default zone to your custom zone type `firewall-cmd --set-default-zone=example`. ### Setting Rich Rules With the [rich language](https://firewalld.org/documentation/man-pages/firewalld.richlanguage.html) more complex firewall rules can be created in an easy to understand way. The language uses keywords with values and is an abstract representation of ip*tables rules. The rich language extends the [current zone elements](https://firewalld.org/documentation/man-pages/firewalld.zone.html) (service, port, icmp-block, icmp-type, masquerade, forward-port and source-port) with additional source and destination addresses, logging, actions and limits for logs and actions. __Allow Traffic__: Allow traffic from an _10.0.0.0/24_ network to a specific client with an IP address of _192.168.2.110/24_ via the ports _8080_ to _8090_. ```bash firewall-cmd --permanent --zone=example --add-rich-rule='rule family=ipv4 source address=10.0.0.0/24 destination address=192.168.2.110/24 port port=8080-8090 protocol=tcp accept' ``` You can test your rich rules via `firewall-cmd --permanent --zone=example --list-rich-rules`. To __remove__ a rich rule type: ```bash firewall-cmd --permanent --zone=example --remove-rich-rule='rule family=ipv4 source address=10.0.0.0/24 destination address=192.168.2.110/24 port port=8080-8090 protocol=tcp accept' ``` __Reject Traffic__: We want to block all traffic that we receive from the IP address _192.168.2.44/24_. ```bash firewall-cmd --permanent --zone=example --add-rich-rule='rule family=ipv4 source address=192.168.2.44/24 reject' ``` This rule will now actively reject all incoming traffic from the given IP address (not only silently dropping it). __Limit Sessions__: Limit the connection time for incomming _SSH_ connections to 10min: ```bash firewall-cmd --permanent --zone=example --add-rich-rule='rule service name=ssh limit value=10/m accept' ``` __Logging Sessions__: Log all connections from a specific IP address: ```bash firewall-cmd --permanent --zone=example --add-rich-rule='rule family=ipv4 source address="192.168.0.0/24" service name="ssh" log prefix="ssh" level="info" limit value="50/m" accept' ``` ### NAT and Port Forwarding __Masquerading a Connection__: ``` firewall-cmd --permanent --zone=example --add-masquerade firewall-cmd --permanent --zone=example --add-rich-rule='rule family=ipv4 source address=192.168.2.0/24 masquerade' ``` __Forwarding a Port__: ``` firewall-cmd --permanent --zone=example --add-forward-port=port=22:proto=tcp:toport=2222:toaddress=10.0.0.10 firewall-cmd --permanent --zone=example --query-forward-port=port=22:proto=tcp:toport=2222:toaddress=10.0.0.10 firewall-cmd --permanent --zone=example --list-all ``` ``` firewall-cmd --permanent --zone=example --add-rich-rule='rule family=ipv4 source address=192.168.2.0/24 forward-port port=22 protocol=tcp to-port=2222 to-addr=10.0.0.0/24' ``` ## Samba File sharing [Samba](https://www.samba.org/samba/docs/SambaIntro.html) is the standard Windows interoperability suite of programs for Linux and Unix. Samba is an important component to seamlessly integrate Linux/Unix Servers and Desktops into Active Directory environments. It can function both as a domain controller or as a regular domain member. To install Samba on your CentOS 7 server run the following command `yum install samba samba-client samba-common`. To allow Samba through your firewall, we first have to activate the according service in FirewallD `firewall-cmd --zone=public --permanent --add-service=samba` and reload the firewall `firewall-cmd --reload` to load the service. To use the anonymous filesharing, we first need to create a folder for it from which files will be shared: ``` mkdir -p /srv/samba/anonymous chmod -R 0775 /srv/samba/anonymous chcon -t samba_share_t /srv/samba/anonymous ``` You can find the configuration file for the Samba service under `/etc/samba/smb.conf` (make a copy before starting to edit it `cp /etc/samba/smb.conf /etc/samba/smb.conf.bak`). The file should look like this: --- ![Red Hat Certified Engineer](./CentOS_26.png) --- ``` [global] workgroup = WORKGROUP server string = Samba Server %v netbios name = centos security = user map to guest = bad user dns proxy = no [sambashare] comment= Samba Share path= /srv/samba/anonymous browsable = yes writable = yes guest ok = yes read only = no create mode = 0775 directory mode = 0775 force user = root ``` You can test your configuration with the `testparm` command: --- ![Red Hat Certified Engineer](./CentOS_27.png) --- __Note__ The `rlimit_max: increasing rlimit_max (1024) to minimum Windows limit (16384)` warning can be ignored as Samba already adjusts the limit automatically. To do this manually add the line `* – nofile 16384` to `/etc/security/limits.conf`. Now run the Samba Service with ```bash systemctl enable smb.service systemctl enable nmb.service systemctl restart smb.service systemctl restart nmb.service ``` The share should now be discoverable on the Windows Network Search: --- ![Red Hat Certified Engineer](./CentOS_28.png) --- As well as under Android with Apps like the [X-plore File Manager](https://play.google.com/store/apps/details?id=com.lonelycatgames.Xplore): --- ![Red Hat Certified Engineer](./CentOS_29.jpg) --- ## Email Server To use an email service we first have to set a hostname for our CentOS server. You can check for your servers hostname with `hostnamectl`: --- ![Red Hat Certified Engineer](./CentOS_30.png) --- For internal networking, change the host that is associated with the main IP address for your server found at `/etc/hosts` - make sure to set a static IP address for your server first ( -> `nano /etc/sysconfig/network-scripts/ifcfg-enp3s0`) : --- ![Red Hat Certified Engineer](./CentOS_31.png) --- To set a hostname use `hostnamectl set-hostname your-new-hostname`, e.g. : ``` hostnamectl set-hostname instar.centos.mail systemctl reboot ``` ### Installing Postfix and Dovecot [Postfix](http://www.postfix.org/postconf.5.html) architecture is based on a loose composition of services that receive emails and pass them on to other services (with services like __SMTP__ on the receiving outer edge). Postfix itself implements the core requirements to receive, route, and deliver mail, and relies on third-party extensions to do the rest. [Dovecot](https://wiki2.dovecot.org) is an open source IMAP and POP3 email server for Linux/UNIX-like systems, written with security primarily in mind. Dovecot is an excellent choice for both small and large installations. It's fast, simple to set up, requires no special administration and it uses very little memory. ``` yum install -y postfix dovecot ``` ### Setting up SSL certificate For SSL, you need a certificate and a private key. In this tutorial, we're going to assume that the certificate is saved in `/etc/ssl/certs/mailcert.pem` and the key is saved in `/etc/ssl/private/mail.key`. Make sure the key is only readable by the root user! Creating a self-signed test certificate is as easy as executing: ```bash openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/ssl/private/mail.key -out /etc/ssl/certs/mailcert.pem ``` and leaving the default values in by just hitting enter on all questions asked. Most CAs will require you to submit a certificate signing request. (CSR) You can generate one like this: ```bash openssl req -nodes -days 365 -newkey rsa:2048 -keyout /etc/ssl/private/mail.key -out mailcert.csr ``` ### Postfix Configuration Postfix has two main config files: `/etc/postfix/main.cf`, which specifies what you would think of as config options, and `/etc/postfix/master.cf`, which specifies the services postfix should run. First, configure the master.cf file (`nano /etc/postfix/master.cf`). Uncomment the "smtpd" instance called "submission" that will take mail from trusted clients for delivery to the world at large, which we don't allow for anyone else. And add options to enable SASL : ``` submission inet n - n - - smtpd -o syslog_name=postfix/submission -o smtpd_tls_wrappermode=no -o smtpd_tls_security_level=encrypt -o smtpd_sasl_auth_enable=yes -o smtpd_reject_unlisted_recipient=no -o smtpd_recipient_restrictions=permit_sasl_authenticated,reject -o milter_macro_daemon_name=ORIGINATING -o smtpd_sasl_type=dovecot -o smtpd_sasl_path=private/auth ``` The -o ... options override the settings that are taken from defaults or define in the config, which we'll set later. This enables the "submission" daemon with TLS to secure the outer connection, and dovecot-mediated SASL to check the username and password of connecting clients. The important detail is one that can't be seen: The `smtpd_recipient_restrictions` is missing `reject_unauth_destination`, which is present as a default and restricts relaying. --- ![Red Hat Certified Engineer](./CentOS_32.png) --- Now open the main configuration file `nano /etc/postfix/main.cf` and delete its content - better make a copy before you do that `cp /etc/postfix/main.cf /etc/postfix/main.cf.bak`. Let's first set the network information: <!-- tbc -> https://www.digitalocean.com/community/tutorials/how-to-set-up-a-postfix-e-mail-server-with-dovecot --> ## Network Interface Bonding Use network teaming or bonding to combine two or more network interfaces for higher throughput and redundancy or load balancing. ### Types of Bonding The behaviour of bonded interfaces depend on the __mode__ they are set to - either __hot-standby__ or __load-balancing__. * __mode=0 (balance-rr)__: The RR stands for _Round-Robin_ and it is a pure __load-balancing__ and __fault tolerance__ mode. It transfers packets in sequential order, from the the first to the last available interface. * __mode=1 (active-backup)__: In this mode _only one interface is active_. The next one is only activated, if the first one fails. The _bonds MAC address is only visiable externally on one port_. This setup only provides __fault tolerance__. * __mode=2 (balance-xor)__: All network interfaces of the bond are used. But source and destination MAC addresses are paired (Source MAC address is XOR’d with destination MAC address) - packets send to a specific destination will always be send from the same source interface. You get __load-balancing__ and __fault tolerance__. * __mode=3 (broadcast)__: The broadcast mode transmits everything on all slave interfaces and provides only __fault tolerance__. * __mode 4 (802.3ad)__: The 802.3ad mode is known as Dynamic Link Aggregation mode. It creates aggregation groups that share the same speed and duplex settings. This mode requires a switch that supports IEEE 802.3ad Dynamic link. It offers __fault tolerance__ by using all slave interfaces. * __mode 5 (balance-tlb)__: This is called Adaptive transmit __load balancing__. The outgoing traffic is distributed according to the current load and queue on each slave interface. Incoming traffic is received by the current slave. *__mode 6 (balance-alb)__: This is Adaptive load balancing mode. This includes balance-tlb + receive load balancing (rlb) for IPV4 traffic. The receive load balancing is achieved by ARP negotiation. The bonding driver intercepts the ARP Replies sent by the server on their way out and overwrites the src hw address with the unique hw address of one of the slaves in the bond such that different clients use different hw addresses for the server. ### Enabling Bonding on CentOS 7 The __Bonding__ is not enabled by default and has to be loaded using the __modprobe__ command. __modprobe__ is an intelligent command for listing, inserting as well as removing modules from the kernel. It searches in the module directory _/lib/modules/$(uname -r)_ for all the modules and related files. To insert a module, simply provide its name as follows: ```bash modprobe bonding ``` You can add the __--first-time__ flag to be alerted if loading the module fails. To remove a module use the __-r__: --- ![Red Hat Certified Engineer](./CentOS_01.png) --- Let's create a configuration file to have the bonding module loaded at boot by typing `nano /etc/modules-load.d/bonding.conf` and adding the following lines: ```bash # Load the bonding kernel module at boot bonding ``` --- ![Red Hat Certified Engineer](./CentOS_02.png) --- We can reboot the system now and check if the module is being loaded with `lsmod | grep bonding`: --- ![Red Hat Certified Engineer](./CentOS_03.png) --- We can check which driver version the module is using with `modinfo bonding` - in our case it is _v3.7.1_: --- ![Red Hat Certified Engineer](./CentOS_04.png) --- To create the __Bond Interface__ _cd_ into `/etc/sysconfig/network-scripts/` - this folder contains the configuration (_ifconfig_) files for all your network interfaces. As seen below, this machine has a default LAN interface `enp3s0`, which defines the LAN IP address, gateway, DNS Server, etc: --- ![Red Hat Certified Engineer](./CentOS_05.png) --- We can now create a config file for our bonding interface by typing `nano ifcfg-bond0`: ``` DEVICE=bond0 TYPE=Bond NAME=bond0 BONDING_MASTER=yes BOOTPROTO=none ONBOOT=yes IPADDR=192.168.2.110 PREFIX=24 NETMASK=255.255.255.0 GATEWAY=192.168.2.1 DNS1=192.168.2.1 BONDING_OPTS="mode=1 miimon=100" ``` The interface is now now set up to be loaded on-boot and boot protocol is set to none, to _deactivate DHCP_.Make sure to change the network information according to your local network setup. The type of bonding is set to __Mode 1__ as specified earlier. __miimon__ specifies the MII link monitoring frequency in milliseconds. This determines how often the link state of each slave is inspected for link failures. A value of zero disables MII link monitoring. A value of 100 is a good starting point. --- ![Red Hat Certified Engineer](./CentOS_06.png) --- We now have to add the NIC Bonding inside our network interface configuration file `nano ifcfg-enp3s0` - we need to add the following lines: ``` MASTER=bond0 SLAVE=yes ``` As well as removing the IP configuration, that is now handled by our bonding interface: --- ![Red Hat Certified Engineer](./CentOS_07.png) --- We now have to repeat this step for the secondary network interface of the system - which in my case is a wifi interface on a different network `ifcfg-TP-Link_BB2D_5G` and gave me some problems. _The interface was not added to the bond with the configuration below - the installation later worked on a virtual machine with 2 LAN interfaces that were on the same network. Right now I am not sure what the limitation is that I hit here - is the Wifi module deactivated once Ethernet is connected?_: --- ![Red Hat Certified Engineer](./CentOS_08.png) --- We can now restart the network service with `systemctl restart network.service` and check the status with `systemctl status network.service`: --- ![Red Hat Certified Engineer](./CentOS_09.png) --- The network service is back up and we can check if the Master/Slave configuration worked by typing `ip link show`: --- ![Red Hat Certified Engineer](./CentOS_10.png) --- As we can see the configuration is working - the Ethernet interface is in a slave configuration to our `bond0` interface. But the Wifi interface __was not added__. We can get more information about our bond interface under `cat /proc/net/bonding/bond0`: --- ![Red Hat Certified Engineer](./CentOS_11.png) --- ### Configure multiple IP addresses for bond0 Right now the bonding interface uses only one IP address - to assign multiple IP addresses we need to create an alias for the bond0 interface. To create an alias for bond0, copy the existing configuration file(ifcfg-bond0) to a new configuration file(ifcfg-bond0:1) and edit file: ```bash cp /ifcfg-bond0 /ifcfg-bond0:1 nano ifcfg-bond0:1 ``` --- ![Red Hat Certified Engineer](./CentOS_12.png) --- As you see we made the following changes to the configuration file to assign `192.168.2.118` as a secondary IP address to our bond: ``` DEVICE=bond0.1 IPADDR=192.168.2.118 ``` Save your changes and reload the network service `systemctl restart network`. ## Network Interface Teaming The term __Network Teaming__ has been chosen to refer to this new implementation of the concept. The existing bonding driver is unaffected, Network Teaming is offered as an alternative and does not replace bonding in Red Hat Enterprise Linux 7 with a small kernel driver and a user-space daemon, __teamd__. Software, called runners, implement load balancing and active-backup logic, such as roundrobin. The following runners are available to teamd: | Runner | Explanation | |---|---| | broadcast | A simple runner which transmit packet from all ports | | roundrobin | A simple runner which transmits packets in a round-robin fashion from each of the ports. | | activebackup | This is a failover runner which watches for link changes and selects an active port for data transfers | | loadbalance | This runner monitors traffic and uses a hash function to try to reach a perfect balance when selecting ports for packet transmission. | | lacp | Implements the 802.3ad LinkAggregation Control Protocol. Can use the same transmit port selection possibilities as the load balance runner. | We can install __teamd__ with the `yum -y install teamd` command - but it will most likely already be installed on your CentOS 7 server. Now go to `cd /etc/sysconfig/network-scripts` and create a file __ifcfg-team0__: ``` DEVICE=team0 NAME=team0 DEVICETYPE=Team TEAM_CONFIG='{"runner": {"name": "activebackup"}, "link_watch": {"name": "ethtool"}}' BOOTPROTO=none IPADDR=192.168.2.110 PREFIX=24 GATEWAY=192.168.2.1 DEFROUTE=yes IPV4_FAILURE_FATAL=no IPV6INIT=yes IPV6_AUTOCONF=yes IPV6_DEFROUTE=yes IPV6_FAILURE_FATAL=no IPV6_ADDR_GEN_MODE=stable-privacy ONBOOT=yes ``` --- ![Red Hat Certified Engineer](./CentOS_13.png) --- We now need to add three lines to our Ethernet interface config file `nano ifcfg-enp3s0`: ``` TEAM_MASTER=team0 TEAM_PORT_CONFIG='{"prio":100}' DEVICETYPE=TeamPort ``` --- ![Red Hat Certified Engineer](./CentOS_14.png) --- Now edit your secondary network interface by adding the following 3 lines - note that the __Priority__ is set to a lower value then in our first interface above: ``` TEAM_MASTER=team0 TEAM_PORT_CONFIG='{"prio":99}' DEVICETYPE=TeamPort ``` We can now load our new team configuration by typing `ifdown ifcfg-team0; ifup ifcfg-team0`. The teaming can be verified with `teamnl team0 ports` and `teamdctl team0 state`. This process can be simplified by using the __nmcli__: ``` nmcli con add type team con-name team0 ifname ifcfg-team0 ip4 192.168.2.110 gw4 192.168.2.1 nmcli con add type team-slave con-name enp0s3 ifname enp0s3 master team0 ```<file_sep>--- date: "2018-11-23" title: "Node-RED Flows and how to Import them" categories: - IoT - Node-RED --- ![Abashiri, Japan](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Import the Flow into Node-RED](#import-the-flow-into-node-red) <!-- /TOC --> Node-RED allows you to export/import your flows in a JSON format. This makes it very easy to share your work. ## Import the Flow into Node-RED Download this JSON data into a text editor and edit the default parameter - IP addresses, logins, etc. - to match your personal setup. You can simply search & replace it. To import it to your Node-RED installation, open the hamburger menu in the top right, choose __Import__ and __Clipboard__: ![Alarmserver Queries in Node-RED](./Node-RED_Flow_Import_01.png) Copy & paste the data into the text field and click __Import__: ![Alarmserver Queries in Node-RED](./Node-RED_Flow_Import_02.png) And you are all set!<file_sep>const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const baseUrl = "https://randomuser.me/api/" btn.addEventListener("click", getInput) function getInput() { let userNumber = intake.value; let url = baseUrl + "?results=" + userNumber; fetch(url) .then(function (response) { return response.json(); }) .then(function (data) { outputHTML(data); }) } function outputHTML(data) { console.log(data.results) for (let i = 0; i < data.results.length; i++) { output.innerHTML += "<br>" + data.results[i].name.last + ", " + data.results[i].name.first + "<br>" } } <file_sep>--- date: "2019-06-10" title: "Windows Server 2019 - Scripting and Sharing" categories: - Windows --- ![Wan Chai, Hong Kong](./photo-fd6d_gj62544ethg4_d.jpg) <!-- TOC --> - [Windows Powershell](#windows-powershell) - [Running Powershell Commands](#running-powershell-commands) - [Powershell Scripting](#powershell-scripting) <!-- /TOC --> ## Windows Powershell We can access the Microsoft Windows Powershell directly from the [Windows Admin Center](/getting-started-with-windows-server-2019#windows-admin-center). Connect the Admin Center to your Windows Server and search for __Powershell__ under __Tools__ you will be asked to log in with your account. ### Running Powershell Commands You can all [Powershell Commands](https://devblogs.microsoft.com/scripting/table-of-basic-powershell-commands/) directly from this web interface, e.g. creating a new user on your server with `New-LocalUser -Name Peter`: ![Windows Server 2019](./Windows_Server_2019_01.png) Search for __Local user & groups__ under tools inside the Admin Center and _right-ctrl-click_ it to open the user management in a new tab. You will see that the user has been created: ![Windows Server 2019](./Windows_Server_2019_02.png) Working with the filesystem works the same way - listing directories, creating folders and files and copying them to other locations: ```powershell Get-ChildItem -Path C:\ New-Item -Path 'C:\scripts' -ItemType Directory New-Item -Path 'C:\scripts\test' -ItemType Directory New-Item -Path 'C:\scripts\test\helloworld.ps1' -ItemType File Copy-Item -Path 'C:\scripts\test\helloworld.ps1' -Destination 'C:\scripts\hello.ps1' Remove-Item -Path 'C:\scripts\test\helloworld.ps1' ``` ![Windows Server 2019](./Windows_Server_2019_03.png) Run the following commands to get an overview over all processes and services: ```powershell Get-Process Get-Service ``` ### Powershell Scripting We can also run local shell scripts through Powershell on our server. Those scripts can be stored on our server with the `.ps1` file extension. Here is a __Hello World__: ```powershell echo 'Hello World' sleep 10 ``` Save the file with the name `helloworld.ps1`, navigate your into the directory and run the script as `.\helloworld.ps1`: ![Windows Server 2019](./Windows_Server_2019_04.png) __Hello World__ will be displayed for 10 seconds and then disappear. An example for a useful shell script would be: ```powershell Class DirToBackup { [String]$path DirToBackup([String]$path) { $this.path = $path } } $defaultListOfExcluded = "C:\inetpub\wwwroot\just_testing\listOfBackupExcluded.txt" $pathFromPrefix = "C:\inetpub\wwwroot\just_testing\test_data\" $pathToPrefix = "C:\inetpub\wwwroot\just_testing\backup_data\" Write-Output "Plug external disk drive." pause $dirsToBackup = @( New-Object DirToBackup "backup" New-Object DirToBackup "development" ) $dirsToBackup | ForEach-Object { mkdir -Path $($pathToPrefix + $_.path) -Force xcopy $($pathFromPrefix + $_.path) $($pathToPrefix + $_.path) /D /S /Y /H /EXCLUDE:$defaultListOfExcluded } pause ``` The file `listOfBackupExcluded.txt` in the same directory as our script lists all files, file extensions and directories that we want to exclude from our backup: ``` \build\ \.gitignore\ .secret\ \notes \node_modules\ ``` This will exclude the directories `build` and `node_modules`, the `.gitignore` file, every file with the extension `.secret` and files who's name starts with `notes`. The script will then copy the content of the directory `test_data` to `backup_data`. Ideally `backup_data` has to be on an external drive - so we added a prompt for the user to plugin the backup drive. But for testing we will leave both directories on the same harddrive. The main part of the script is a loop that goes through all dirsToBackup elements and executes mkdir and xcopy functions. mkdir creates directories in the target location. Flag -Force makes possible to not error the script when directory’s already exists. There are few flags that are very important: * __/D__ — Copies only those files whose source time is newer than the destination time. * __/S__ — Copies directories and subdirectories except empty ones. * __/Y__ — Suppresses prompting to confirm you want to overwrite an existing destination file. * __/H__ — Copies hidden and system files also. ![Windows Server 2019](./Windows_Server_2019_05.png) ## Creating a Samba Share This time we want to use the [Microsoft Remote Desktop App](https://www.microsoft.com/en-us/p/microsoft-remote-desktop/9wzdncrfj3ps) to connect to our Windows Server and enable the __Samba Share Functionality__. To be able to connect we first have to ensure that __Remote Desktop__ is enabled on our server: ![Windows Server 2019](./Windows_Server_2019_06.png) Now we can __Add a Desktop__ on our host machine inside __Remote Desktop__. Type in the server name or IP address and add the user account you want to use to connect to the remote server: ![Windows Server 2019](./Windows_Server_2019_07.png) Then click to connect to the remote server. If the connection fails, try to ping your server from your command prompt, e.g. `ping WINSERVER2019`. You should be able to get a ping here - if not you need to troubleshoot your network: ![Windows Server 2019](./Windows_Server_2019_08.png) Accept the self-signed certificate we used for our server and you should be able to see the server desktop in front of you: ![Windows Server 2019](./Windows_Server_2019_10.png) On the server right-click the directory you want to share, choose __Properties__, __Sharing__ __Advanced Sharing__: ![Windows Server 2019](./Windows_Server_2019_11.png) Name the share and click on __Permissions__: ![Windows Server 2019](./Windows_Server_2019_12.png) Add the user you want to use to connect to the share: ![Windows Server 2019](./Windows_Server_2019_13.png) And give him the __Full Control__ over the share: ![Windows Server 2019](./Windows_Server_2019_14.png) Everything is set up and you should be able to discover the share from your host machine and connect to it with the user you specified: ![Windows Server 2019](./Windows_Server_2019_15.png) Back on your host machine right-click __This PC__ and click on __Add a network location__ and follow the assistant: ![Windows Server 2019](./Windows_Server_2019_16.png) ![Windows Server 2019](./Windows_Server_2019_17.png) ![Windows Server 2019](./Windows_Server_2019_18.png) Type in the network address as follows: 1. 2 x `\` 2. Your server name in capital letters, e.g. `WINSERVER2019` followed by a `\` 3. The name you gave the share, e.g. `WinServer2019` ![Windows Server 2019](./Windows_Server_2019_19.png) After confirming you will be asked to sign in - use the user login that you specified in the share options and click on __Finish__: ![Windows Server 2019](./Windows_Server_2019_20.png) ![Windows Server 2019](./Windows_Server_2019_21.png) The share should now be listed under __Network__: ![Windows Server 2019](./Windows_Server_2019_22.png)<file_sep>--- date: "2017-07-22" title: "Food Caloric Table App" categories: - Javascript - React --- import GifContainer from "../../src/components/ImageContainer"; ![Port Vila, Vanuatu](./photo-34221441540_627d018c4b_o.png) > This app is based on the [food-lookup-demo](https://www.fullstackreact.com/articles/using-create-react-app-with-a-server/) from fullstackreact.com. > > Our sample app will be a simple food nutrition lookup table. The data driving the app is supplied by the USDA's [National Nutrient Database](https://www.ars.usda.gov/northeast-area/beltsville-md/beltsville-human-nutrition-research-center/nutrient-data-laboratory/docs/usda-national-nutrient-database-for-standard-reference/). [Github](https://github.com/mpolinowski/caloric-burn) <!-- TOC --> - [Server Setup](#server-setup) - [Server Dependencies Installation](#server-dependencies-installation) - [Test the Server](#test-the-server) - [Frontend Client Setup](#frontend-client-setup) - [create-react-app](#create-react-app) - [react-scripts](#react-scripts) - [Concurrently](#concurrently) - [React Interface](#react-interface) - [Setting up the proxy](#setting-up-the-proxy) - [Test your App](#test-your-app) - [Deployment](#deployment) <!-- /TOC --> First, git clone this repository and cd into that directory. This is where the server lives (server.js). Inside of the db folder is a sqlite database containing the nutrition data. # Server Setup We now use [Node.js](https://nodejs.org/en/) and the Node Package Manager to install all dependencies for our app. Make sure that you install the latest version of node first. Then use your Terminal, or [Git Bash](https://git-scm.com) under Windows, to run the following npm commands. ## Server Dependencies Installation Use ```bash npm install ``` to install all dependencies & dev-dependecies in a development environment (the default). Later you can use ```bash npm install --production ``` or set the NODE_ENV environment variable to production to avoid installing dev-dependencies. ## Test the Server Let's boot the server: ```bash npm run server ``` This server provides a single API endpoint, /api/food. It expects a single parameter, q, the food we are searching for. You can test it with your browser or use the CURL command inside your console: ```bash curl localhost:3001/api/food?q=mcflurry ``` Now that we understand how this endpoint works, let's build the front-end application. Kill the server with CTRL+C. # Frontend Client Setup Ensure that you have create-react-app installed globally: ```bash npm install -g create-react-app ``` ## create-react-app At the top-level directory of the project we'll create our client app. We want the React app to be in a folder called client, so we'll just use that name in the create-react-app command (if you are working inside a clone of this repository, please make sure to remove the already existing /client folder first! Don't delete it completely - you will need some of the files later on): ```bash create-react-app client ``` This creates a new directory with the following file structure: ```bash ls client README.md node_modules/ package.json public/ src/ ``` Taking a look at client/package.json, we can see that we just installed react, react-dom, and react-scripts to the /client directory: ```json { "name": "client", "version": "0.1.0", "private": true, "dependencies": { "react": "^15.6.1", "react-dom": "^15.6.1", "react-scripts": "1.0.12" }, "scripts": { "start": "react-scripts start", "build": "react-scripts build", "test": "react-scripts test --env=jsdom", "eject": "react-scripts eject" } } ``` Inside that directory, we can now run several commands: > - npm start > - Starts the development server. > > - npm run build > - Bundles the app into static files for production. > > - npm test > - Starts the test runner. > > - npm run eject > - Removes this tool and copies build dependencies, configuration files and scripts into the app directory. If you do this, you can’t go back! ### react-scripts react-scripts is an NPM package specifically for use with create-react-app. It's the "black box" which contains the essentials: > - Dependencies > - Like Babel, ESLint, and Webpack. > - Configuration > - Config files for Webpack, Babel and ESLint, both for development and production. > - Scripts > - For instance, the command react-scripts start runs a script shipped with this package. It's responsible for ultimately booting the > Webpack development server. To see it in action, we can run npm start from inside of this folder: ```bash cd client && npm start ``` This will launch a Webpack dev server and should also open localhost:3000 in your browser: ![Javascript Tutorial](./boilerplate-page.png) We have our API server in the top-level directory and we were able to boot that. And we have our client app down here in client and we're able to boot a server for this. So the user will direct their browser to localhost:3000, hitting the Webpack dev server. But then how will the React app communicate with our API server? create-react-app provides a mechanism for working with an API server in development. We can have the Webpack development server proxy requests intended for our API server, like this: ![Javascript Tutorial](./flow-diagram.png) In this flow, React makes an API request to localhost:3000, the Webpack development server. And then the development server simply proxies that request to the API server, negating any CORS issues. we need to: 1. launch both the Webpack dev server and the API server in order to run the app locally. 1. we need to get the Webpack dev server to proxy requests intended for our API server. ## Concurrently [Concurrently](https://github.com/kimmobrunfeldt/concurrently) is a utility for running multiple processes. Taking a look at our package.json file inside the top-level directory will show you that we already installed Concurrently as a dev-dependency earlier. We want concurrently to execute two commands, one to boot the API server and one to boot the Webpack development server. You boot multiple commands by passing them to concurrently in quotes like this: ```bash concurrently "npm run server" "cd client && npm start" ``` However, the && operator is not cross-platform (doesn't work on Windows). As such, we've included a start-client.js script with the project. This script will boot the client from the top-level directory in a manner that is cross-platform. Ultimately, we'll want to boot concurrently like this: ```bash concurrently "npm run server" "npm run client" ``` This will be our start command. Let's add the start and client commands to our package.json now: ```json "scripts": { "start": "concurrently \"npm run server\" \"npm run client\"", "server": "node server.js", "client": "node start-client.js" }, ``` For start, we execute both commands, escaping the quotes because we're in a JSON file. For client, we execute the start-client.js script with node. Now we can boot both servers by running **npm start**. ## React Interface Now we will add the food lookup React components which will make requests against our API server. The components are located in the **/client/src** folder. You can copy them over the auto-generated content by create-react-app, overwriting the original *App.js*, *index.js* and *index.css* files. We use [Semantic UI](https://semantic-ui.com/introduction/getting-started.html) for styling the app - the files can be found in **/client/src/semantic** as well as **/client/semantic.json** inside this repository - just copy them into your client directory. It's loaded inside of **/client/src/index.js**. **/client/src/index.css** contains a few margins. Changing the value of the search bar (the FoodSearch component) ultimately calls search() on Client. **Client.js** contains a Fetch call to our API endpoint: ```javascript function search(query, cb) { return fetch(`api/food?q=${query}`, { accept: "application/json" }) .then(checkStatus) .then(parseJSON) .then(cb); } ``` This is the one touch point between our React web app and the API server. Notice how the URL *does not include* the base **localhost:3001**. That's because, as noted earlier, we want this request to be made to the Webpack development server. Thanks to the configuration established by create-react-app, the Webpack dev server will infer what traffic to proxy. It will proxy a request if the URL is not recognized or if the request is not loading static assets (like HTML/CSS/JS). We just need to instruct Webpack to use the proxy. ## Setting up the proxy To have the Webpack development server proxy our API requests to our API server, we just need to add the following line to client/package.json: ```json // Inside client/package.json "proxy": "http://localhost:3001/", ``` # Test your App Our React app is ready and in place in client/. We have concurrently setup to boot both our Webpack dev server and our API server together. And we've specified the route that Webpack should proxy API traffic to. Let's boot both servers: ```bash npm start ``` <GifContainer gifUrl="/assets/gif/usage-demo.gif" alt="Javascript Tutorial" /> # Deployment create-react-app comes with a build command that you can use to create a static bundle of the React app: ```bash cd client npm run build ``` This produces a build/ folder which you can serve with any static asset host. It contains all your app's HTML, JavaScript, and CSS files. This command is intended for production use. It does all kinds of optimization, to create a fast loading static page.<file_sep>--- date: "2019-09-17" title: "Creating Magento 2 Modules" categories: - Magento --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Creating Custom Modules](#creating-custom-modules) - [Module Install Scripts](#module-install-scripts) - [Module Updates](#module-updates) - [Model Layer](#model-layer) - [Controller Layer](#controller-layer) - [View Layer](#view-layer) - [Blocks](#blocks) <!-- /TOC --> ![Magento 2 Modules](./Magento2_Modules_00.png) ## Creating Custom Modules The default directory for Magento 2 modules is `/var/www/html/magento/app/code`. If the theme you are using already comes with custom module you will be able to find them there. Start by creating a folder there for your personal modules and add another folder inside this one named __SampleModule__. To create a new module start with a `registration.php` file: ```php <?php \Magento\Framework\Component\ComponentRegistrar::register( \Magento\Framework\Component\ComponentRegistrar::MODULE, 'INSTAR_SampleModule', __DIR__ ); ``` This tells Magento to register your component, that is of type __Module__ and will be called `INSTAR_SampleModule`. Now continue by creating a directory `etc` inside the __SampleModule__ folder and add a `module.xml` file: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:Module/etc/module.xsd"> <module name="INSTAR_SampleModule" setup_version="1.0.0"> <sequence> <module name="Magento_Catalog"/> </sequence> </module> </config> ``` This file declares our __INSTAR\_SampleModule__ and it's dependency on the __Magento\_Catalog__ module. This makes sure that it is ensured that the latter is installed and loaded before Magento tries loading our module. To enable the module go to the Magento root and run the command: ```bash cd /var/www/html/magento php bin/magento module:enable INSTAR_SampleModule The following modules have been enabled: - INSTAR_SampleModule To make sure that the enabled modules are properly registered, run 'setup:upgrade'. Cache cleared successfully. Generated classes cleared successfully. Please run the 'setup:di:compile' command to generate classes. Info: Some modules might require static view files to be cleared. To do this, run 'module:enable' with the --clear-static-content option to clear them. ``` You can verify that the module is now enabled by: ```bash cat /var/www/html/magento/app/etc/config.php <?php return [ 'modules' => [ ... 'INSTAR_SampleModule' => 1, ... ] ]; ``` The order in which modules are listed here, is the order in which they are loaded - modules lower in the list can modify configurations set by earlier modules. We now have to upgrade our database. The following command will go through all loaded modules and check if changes to the database have to be made: ```bash php bin/magento setup:upgrade ``` We can check our database to see if our module was added to the `setup_module` table: ```sql mysql> USE magento; Reading table information for completion of table and column names You can turn off this feature to get a quicker startup with -A Database changed mysql> SELECT * FROM setup_module WHERE module = 'INSTAR_SampleModule'; +---------------------+----------------+--------------+ | module | schema_version | data_version | +---------------------+----------------+--------------+ | INSTAR_SampleModule | 1.0.0 | 1.0.0 | +---------------------+----------------+--------------+ 1 row in set (0.00 sec) ``` ## Module Install Scripts For installing a database schema and subsequently data for our module, we need to create installation scripts. For this create a folder called `Setup` next to the `etc` folder in your module root directory and add a file called `InstallSchema.php`: ```php <?php namespace INSTAR\SampleModule\Setup; use Magento\Framework\Setup\InstallSchemaInterface; use Magento\Framework\Setup\ModuleContextInterface; use Magento\Framework\Setup\SchemaSetupInterface; use Magento\Framework\DB\Ddl\Table; class InstallSchema implements InstallSchemaInterface { /** * {@inheritdoc} */ public function install(SchemaSetupInterface $setup, ModuleContextInterface $context) { $setup->startSetup(); $table = $setup->getConnection()->newTable( $setup->getTable('instar_sample_item') )->addColumn( 'id', Table::TYPE_INTEGER, null, ['identity' => true, 'nullable' => false, 'primary' => true], 'Item ID' )->addColumn( 'name', Table::TYPE_TEXT, 255, ['nullable' => false], 'Item Name' )->addIndex( $setup->getIdxName('instar_sample_item', ['name']), ['name'] )->setComment( 'Sample Items' ); $setup->getConnection()->createTable($table); $setup->endSetup(); } } ``` This defines the `instar_sample_item` database table for our module - consisting of an integer `id` as primary key and `name` of type text with a max. length of 255 characters. We can now add a second script to fill our new table with some default data for our module. For this create a second file called `InstallData.php`: ```php <?php namespace INSTAR\SampleModule\Setup; use Magento\Framework\Setup\InstallDataInterface; use Magento\Framework\Setup\ModuleContextInterface; use Magento\Framework\Setup\ModuleDataSetupInterface; class InstallData implements InstallDataInterface { /** * {@inheritdoc} */ public function install(ModuleDataSetupInterface $setup, ModuleContextInterface $context) { $setup->startSetup(); $setup->getConnection()->insert( $setup->getTable('instar_sample_item'), [ 'name' => 'Item 1' ] ); $setup->getConnection()->insert( $setup->getTable('instar_sample_item'), [ 'name' => 'Item 2' ] ); $setup->endSetup(); } } ``` This will add two items with the names `Item 1` and `Item 2` to our table. The `id` field will be created by MySQL automatically. To try the installation scripts we first have to delete the MySQL table that has already been created in the earlier step: ```sql DELETE FROM setup_module WHERE module = 'INSTAR_SampleModule'; ``` And re-run the Magento database upgrade script to add the module: ```bash php bin/magento setup:upgrade ``` Once the upgrade is done your scripts should have been executed and the `instar_sample_item` table been created: ```sql mysql> SELECT * FROM instar_sample_item; +----+--------+ | id | name | +----+--------+ | 1 | Item 1 | | 2 | Item 2 | +----+--------+ 2 rows in set (0.00 sec) ``` ## Module Updates To update our database table during a module update we need to create two more scripts - `UpgradeSchema.php` and `UpgradeData.php`. Let's start with the first: ```php <?php namespace INSTAR\SampleModule\Setup; use Magento\Framework\DB\Ddl\Table; use Magento\Framework\Setup\UpgradeSchemaInterface; use Magento\Framework\Setup\ModuleContextInterface; use Magento\Framework\Setup\SchemaSetupInterface; class UpgradeSchema implements UpgradeSchemaInterface { /** * {@inheritdoc} */ public function upgrade(SchemaSetupInterface $setup, ModuleContextInterface $context) { $setup->startSetup(); if (version_compare($context->getVersion(), '1.0.1', '<')) { $setup->getConnection()->addColumn( $setup->getTable('instar_sample_item'), 'description', [ 'type' => Table::TYPE_TEXT, 'nullable' => true, 'comment' => 'Item Description' ] ); } $setup->endSetup(); } } ``` This script extends the `instar_sample_item` with an optional column `description` of type text. The script will first check if the installed version is smaller `1.0.1` and only add the column if this condition is true. Now we can add our default data to the new column with the `UpgradeData.php` script: ```php <?php namespace INSTAR\SampleModule\Setup; use Magento\Framework\Setup\ModuleContextInterface; use Magento\Framework\Setup\ModuleDataSetupInterface; use Magento\Framework\Setup\UpgradeDataInterface; class UpgradeData implements UpgradeDataInterface { /** * {@inheritdoc} */ public function upgrade(ModuleDataSetupInterface $setup, ModuleContextInterface $context) { $setup->startSetup(); if (version_compare($context->getVersion(), '1.0.1', '<')) { $setup->getConnection()->update( $setup->getTable('instar_sample_item'), [ 'description' => 'Default description' ], $setup->getConnection()->quoteInto('id = ?', 1) ); } $setup->endSetup(); } } ``` This will add the string `Default description` into the `description` field of __Item 1__ (`id=?, 1`). The second item is not going to receive a default description, which is ok since we defined this field as optional (`'nullable' => true`). Again, this script only executes if the installed version is older then version `1.0.1`. To simulate that we just updated the code for our module, we need to edit the `./etc/module.xml` file and change the setup version to `1.0.1`: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:Module/etc/module.xsd"> <module name="INSTAR_SampleModule" setup_version="1.0.1"> <sequence> <module name="Magento_Catalog"/> </sequence> </module> </config> ``` Now we can re-run the database upgrade script. This time the install scripts won't be executed - only the update scripts should run and add the additional column to our database table `instar_sample_item`: ```bash php bin/magento setup:upgrade ``` We can verify the update by checking the table: ```sql mysql> SELECT * FROM instar_sample_item; +----+--------+---------------------+ | id | name | description | +----+--------+---------------------+ | 1 | Item 1 | Default description | | 2 | Item 2 | NULL | +----+--------+---------------------+ 2 rows in set (0.00 sec) ``` Or by reading the installed version of our module: ```sql mysql> SELECT * FROM setup_module WHERE module = 'INSTAR_SampleModule'; +---------------------+----------------+--------------+ | module | schema_version | data_version | +---------------------+----------------+--------------+ | INSTAR_SampleModule | 1.0.1 | 1.0.1 | +---------------------+----------------+--------------+ 1 row in set (0.00 sec) ``` ## Model Layer The most basic model is the `ResourceModel` - a model to communicate directly to your database. To create a resource model for your module create the following file in the module root `Model/ResourceModel/item.php`: ```php <?php namespace INSTAR\SampleModule\Model\ResourceModel; use Magento\Framework\Model\ResourceModel\Db\AbstractDb; class Item extends AbstractDb { protected function _construct() { $this->_init('instar_sample_item', 'id'); } } ``` To create the model we have to add another file `item.php` one folder up pointing to our resource model: ```php <?php namespace INSTAR\SampleModule\Model; use Magento\Framework\Model\AbstractModel; class Item extends AbstractModel { protected function _construct() { $this->_init(\INSTAR\SampleModule\Model\ResourceModel\Item::class); } } ``` Now we can create a collection inside the resource model in an sub-folder __Item__: ```php <?php namespace INSTAR\SampleModule\Model\ResourceModel\Item; use Magento\Framework\Model\ResourceModel\Db\Collection\AbstractCollection; use INSTAR\SampleModule\Model\Item; use INSTAR\SampleModule\Model\ResourceModel\Item as ItemResource; class Collection extends AbstractCollection { protected $_idFieldName = 'id'; protected function _construct() { $this->_init(Item::class, ItemResource::class); } } ``` ## Controller Layer Let's start by adding routing both for our backend and frontend. For the frontend create the following file inside your module root `./etc/frontend/routes.xml`: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:App/etc/routes.xsd"> <router id="standard"> <route id="instar" frontName="instar"> <module name="INSTAR_SampleModule"/> </route> </router> </config> ``` The router ID is set to `standard` - this means this route can be accessed on the base URL of your Magento installation __my-magento/:frontName__ e.g. `http://my-server.com/instar`. And do the same for your backend routing in `./etc/adminhtml/routes.xml`: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:App/etc/routes.xsd"> <router id="admin"> <route id="instar" frontName="instar"> <module name="INSTAR_SampleModule"/> </route> </router> </config> ``` The router ID is set to `admin` - this means this route can be accessed on the admin URL of your Magento installation __my-magento/:adminURL/:frontName__ e.g. `http://my-server.com/admin_khf32984/instar` The controller itself is now stored in `./Controller` in the module root directory. Inside this folder create another directory called `Index` and add a file `Index.php` to control our frontend: ```php <?php namespace INSTAR\SampleModule\Controller\Index; use Magento\Framework\Controller\ResultFactory; class Index extends \Magento\Framework\App\Action\Action { public function execute() { /** @var \Magento\Framework\Controller\Result\Raw $result */ $result = $this->resultFactory->create(ResultFactory::TYPE_RAW); $result->setContents('Hello from the Frontend!'); return $result; } } ``` For the backend create another folder right next to `Index` and add the following file `./Controller/Adminhtml/Index/Index.php`: ```php <?php namespace INSTAR\SampleModule\Controller\Adminhtml\Index; use Magento\Framework\Controller\ResultFactory; class Index extends \Magento\Backend\App\Action { public function execute() { /** @var \Magento\Framework\Controller\Result\Raw $result */ $result = $this->resultFactory->create(ResultFactory::TYPE_RAW); $result->setContents('Hello from the Backend!'); return $result; } } ``` Now flush the Magento cache (all XML files are cached by Magento - changing them always requires a flush): ```bash bin/magento cache:flush ``` To test that the routing is working we can open the URLs inside our browser: * http://my-server/instar * http://my-server/admin/instar __Note__: a security feature in Magento will prevent you from accessing the Admin URL - for testing purposes you can deactivate this feature (re-activate afterwards): ![Magento 2 Modules](./Magento2_Modules_01.png) ![Magento 2 Modules](./Magento2_Modules_02.png) ## View Layer ### Blocks The default directory for building blocks is `./Block` inside your module root. Create this folder and add a file called `Hello.php`: ```php <?php namespace INSTAR\SampleModule\Block; use Magento\Framework\View\Element\Template; use INSTAR\SampleModule\Model\ResourceModel\Item\Collection; use INSTAR\SampleModule\Model\ResourceModel\Item\CollectionFactory; class Hello extends Template { private $collectionFactory; public function __construct( Template\Context $context, CollectionFactory $collectionFactory, array $data = [] ) { $this->collectionFactory = $collectionFactory; parent::__construct($context, $data); } /** * @return \INSTAR\SampleModule\Model\Item[] */ public function getItems() { return $this->collectionFactory->create()->getItems(); } } ``` This block uses a template that has to be created in `./view/frontend/templates`. Create those folders and add `hello.phtml`: ```php <?php /** @var \INSTAR\SampleModule\Block\Hello $block */ ?> <?php foreach ($block->getItems() as $item): ?> <p> <?php echo $item->getName(); ?>: <?php echo $item->getDescription(); ?> </p> <?php endforeach; ?> ``` This template uses our Hello building block that provides the collection of our database table. It then loops over the results and prints the description field of each item inside the table. Now we have to define where we want to render the output. This will be done in `./view/frontend/layout/instar_index_index.xml` (the name has to be set by the frontName you set in `./etc/frontend/routes.xml` "instar", the controller name `./Controller/Index` "index", and the action name `./Controller/Index/index.php` "index" ): ```xml <?xml version="1.0"?> <page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" layout="1column" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd"> <body> <referenceContainer name="content"> <block name="instar_hello" class="INSTAR\SampleModule\Block\Hello" template="hello.phtml"/> </referenceContainer> </body> </page> ``` Now flush the Magento cache (all XML files are cached by Magento - changing them always requires a flush): ```bash bin/magento cache:flush ``` If you now visit the frontend URL inside your browser `/instar`, you will see that nothing changed. This because our controller `Controller/Index/Index.php` is still set to output RAW data - `ResultFactory::TYPE_RAW`. This means it will ignore the layout we just created. Let's change that: ```php <?php namespace INSTAR\SampleModule\Controller\Index; use Magento\Framework\Controller\ResultFactory; class Index extends \Magento\Framework\App\Action\Action { public function execute() { return $this->resultFactory->create(ResultFactory::TYPE_PAGE); } } ``` ![Magento 2 Modules](./Magento2_Modules_03.png)<file_sep>--- date: "2019-08-04" title: "MQTT Android Dashboards" categories: - MQTT - IoT --- ![Shanghai, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [MQTT Dash (IoT, Smarthome)](#mqtt-dash-iot-smarthome) - [Mqtt Dashboard IoT and Node-RED controller](#mqtt-dashboard-iot-and-node-red-controller) <!-- /TOC --> There are a couple of MQTT apps available on the [Google Play Store](https://play.google.com/store/apps/collection/cluster?clp=ggEGCgRNUVRU:S:ANO1ljJ06xs&gsr=CgmCAQYKBE1RVFQ%3D:S:ANO1ljJL5rU) that allow you to control your INSTAR Full HD IP camera. They all offer, more or less, the same functions and similar interfaces. In the following we are going to take a look at two of them - the [MQTT Dash (IoT, Smarthome)](https://play.google.com/store/apps/details?id=net.routix.mqttdash) and [MQTT Dashboard - IoT and Node-RED controller](https://play.google.com/store/apps/details?id=com.app.vetru.mqttdashboard). ![INSTAR MQTT on Android](./MQTT_Dash_Android_01.png) ## MQTT Dash (IoT, Smarthome) ![INSTAR MQTT on Android](./MQTT_Dash_Android_02.png) Once you [downloaded](https://play.google.com/store/apps/details?id=net.routix.mqttdash), installed and started the app click on the __Plus__ icon in the top right to add your MQTT broker - in our case, our INSTAR Full HD camera. ![INSTAR MQTT on Android](./MQTT_Dash_Android_03.jpg) First, name your broker and type in the IP address of your camera. The default port to communicate with the MQTT Broker on your camera is __1883__. Add the username and password you set for your broker. ![INSTAR MQTT on Android](./MQTT_Dash_Android_04.png) Once you saved your settings you will now see your new broker on the frontpage of your dashboard. Select it to continue. ![INSTAR MQTT on Android](./MQTT_Dash_Android_05.png) We can now add an UI element to interact with our camera. In this first example we will choose a __Switch/Button__. ![INSTAR MQTT on Android](./MQTT_Dash_Android_06.jpg) After naming your switch, you have to add the MQTT Topic that you want to update every time the switch is used. In this case we choose to activate/deactivate the __Alarm Area 1__ of our camera. The corresponding MQTT Topic for this function is `alarm/area1/enable`. To access this topic on our broker we have to add the keyword `instar` and the __LAN MAC Address__ of our camera - in my case this is `000389888811` (you can find your cameras LAN MAC address in the Web User Interface. Now we have to __subscribe__ to the __Status Topic__ and __publish__ Updates to the __Command Topic__ (every time an topic is updated and the function on our camera triggered we receive an status update from MQTT broker to see that everything had worked.) The resulting topics look the following: * __Command Topic__: `instar/000389888811/alarm/area1/enable` (_for publishing_) * __Status Topic__: `instar/000389888811/status/alarm/area1/enable` (_for subscriptions_) In some cases we have to use a different __Command Topic__ that allows us to send the message payload raw, without the JSON formating. So we are just sending `1` as the message payload instead of the `{"val":"1"}`. For this we have to add `raw` to the topic that we want to update: * __Command Topic__ (RAW): `instar/000389888811/alarm/area1/enable/raw` (_for publishing_) The switch now can have 2 states - the area 1 is either active or not. You can check what values are available for a specific MQTT topic in our MQTT API documentation. The topic `alarm/area1/enable` can either be `{"val":"0"}` or `{"val":"1"}`. We have to assign the first one to the _OFF State_ of our switch and the second to the _ON State_. ![INSTAR MQTT on Android](./MQTT_Dash_Android_07.png) Save those settings and repeat it for all 4 areas (Tip: press & hold the switch for area 1 to clone it 3 times. Now you just have to edit each clone and replace every `area1` with the `area2`, `area3` or `area4`, respectively). ![INSTAR MQTT on Android](./MQTT_Dash_Android_08.png) To show a different UI element we now want to add the alarm detection area __Sensitivity__ to our UI. For this click on __Plus__ and select __Range/progress__. ![INSTAR MQTT on Android](./MQTT_Dash_Android_09.jpg) The sensitivity can be accessed by the MQTT Topic `alarm/area1/sensitivity`. The value here can be between `{"val":"1"}` and `{"val":"100"}`. To extract the number from this JSON expression we have to define the __JSON Path__ `$.val`. Now add the minimal (1) and maximal (100) value and make sure that precision is set to `0`! Note that we are using the Status Topic `instar/000389888811/status/alarm/area1/sensitivity`, that will give use the current state in the JSON format. For the Command Topic we have to use `instar/000389888811/alarm/area1/sensitivity/raw` - the __raw__ at the end means that we are able to send our data non-JSON formated. ![INSTAR MQTT on Android](./MQTT_Dash_Android_10.png) After saving your settings you can now again clone the UI element for all 4 areas. ![INSTAR MQTT on Android](./MQTT_Dash_Android_11.png) In a last step we can now add our cameras live image to our dashboard. ![INSTAR MQTT on Android](./MQTT_Dash_Android_12.jpg) Here we simply have to add our cameras snapshot path (just add your IP address and camera login): * `http://IP-Address:Port/tmpfs/snap.jpg?usr=admin&pwd=<PASSWORD>` Snapshot (1080p) * `http://IP-Address:Port/tmpfs/auto.jpg?usr=admin&pwd=<PASSWORD>` Snapshot (320p) * `http://IP-Address:Port/tmpfs/auto2.jpg?usr=admin&pwd=<PASSWORD>` Snapshot (160p) ![INSTAR MQTT on Android](./MQTT_Dash_Android_13.png) After setting the update interval and saving your settings, you should now be able to see your cameras live image on your MQTT dashboard. If you don't like the arrangement of the UI elements on your dash, click on the lock icon in the top right. When unlocked, you are able to rearrange each element. ## Mqtt Dashboard IoT and Node-RED controller ![INSTAR MQTT on Android](./MQTT_Dashboard_Android_01.png) As mentioned above, the functions as well as the UI of all those MQTT apps is very similar. Just click to add your broker. ![INSTAR MQTT on Android](./MQTT_Dashboard_Android_02.png) Add your cameras MQTT service. ![INSTAR MQTT on Android](./MQTT_Dashboard_Android_03.png) Now add a switch (__Toogle__) to switch on/off your cameras alarm areas. ![INSTAR MQTT on Android](./MQTT_Dashboard_Android_04.png) Add the MQTT Topic and the available values this switch can have. ![INSTAR MQTT on Android](./MQTT_Dashboard_Android_05.png) For the Area sensitivity we can use the __Progress__ UI element. And again, add the MQTT Topic and the available values this switch can have. ![INSTAR MQTT on Android](./MQTT_Dashboard_Android_06.png)<file_sep>--- date: "2020-06-19" title: "Salt Mine & Orchestrate" categories: - LINUX --- ![<NAME>, Hong Kong](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [](#) <!-- /TOC --> ## <file_sep>--- date: "2019-06-13" title: "Windows Server 2019 - Docker Daemon" categories: - Windows - Docker --- ![Shanghai, China](./photo-f654_gfdgbg4e345g4_sf.png) <!-- TOC --> - [Installing Docker](#installing-docker) - [Downloading Docker Manually](#downloading-docker-manually) <!-- /TOC --> Windows containers allow users to package applications with their dependencies and leverage operating system-level virtualization to provide fast, fully isolated environments on a single system. Learn how to use Windows containers with our quick start guides, deployment guides, and samples. ## Installing Docker Docker is required in order to work with Windows containers. Docker consists of the Docker Engine and the Docker client. To install Docker, we'll use the OneGet provider PowerShell module. The provider will enable the containers feature on your machine and install Docker, which will require a reboot. 1. Install the OneGet PowerShell module. ```powershell Install-Module -Name DockerMsftProvider -Repository PSGallery -Force ``` 2. Use OneGet to install the latest version of Docker. ```powershell Install-Package -Name docker -ProviderName DockerMsftProvider ``` 3. When the installation is complete, reboot the computer. ```powershell Restart-Computer -Force ``` In countries that fears the internet you might end up with a screaming powershell at this point though: ![Windows Server 2019](./Windows_Server_2019_01.png) ### Downloading Docker Manually Use the [following guide](https://docs.docker.com/install/windows/#use-a-script-to-install-docker-ee) if you wanted to install the Docker Engine - Enterprise manually, via a script, or on air-gapped systems. 1. In a PowerShell command prompt, download the installer archive on a machine that has a connection. ```powershell # On an online machine, download the zip file. Invoke-WebRequest -UseBasicParsing -OutFile docker-19.03.3.zip https://download.docker.com/components/engine/windows-server/19.03/docker-19.03.3.zip ``` This installs the latest version of docker at the moment - `19.03.3`. If you need to download a specific Docker EE Engine release, all URLs can be found on this [JSON index](https://dockermsft.blob.core.windows.net/dockercontainer/DockerMsftIndex.json). 2. In a PowerShell command prompt, use the following commands to extract the archive, register, and start the Docker service. ```powershell # Stop Docker service if eralier version of Docker is already installed Stop-Service docker # Extract the archive. Expand-Archive docker-19.03.3.zip -DestinationPath $Env:ProgramFiles -Force # Clean up the zip file. Remove-Item -Force docker-19.03.3.zip # Install Docker. This requires rebooting. $null = Install-WindowsFeature containers Restart-Computer -Force # Add Docker to the path for the current session. $env:path += ";$env:ProgramFiles\docker" # Optionally, modify PATH to persist across sessions. $newPath = "$env:ProgramFiles\docker;" + [Environment]::GetEnvironmentVariable("PATH", [EnvironmentVariableTarget]::Machine) [Environment]::SetEnvironmentVariable("PATH", $newPath, [EnvironmentVariableTarget]::Machine) # Register the Docker daemon as a service. dockerd --register-service # Start the Docker service. Start-Service docker ``` 3. Test your Docker EE installation by running the `hello-world` container. ```powershell docker pull hello-world:nanoserver docker images docker container run hello-world:nanoserver ``` ![Windows Server 2019](./Windows_Server_2019_02.png)<file_sep>--- date: "2019-02-11" title: "Loxone Miniserver" categories: - IoT - Smarthome --- ![Shenzhen, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Homeautomation Miniserver](#homeautomation-miniserver) - [Installation of the Miniserver](#installation-of-the-miniserver) - [Smart Home App and WebUI](#smart-home-app-and-webui) - [Control INSTAR IP camera via the Loxone Miniserver](#control-instar-ip-camera-via-the-loxone-miniserver) - [Example 1: Triggering an alarm](#example-1-triggering-an-alarm) - [Example 2: Go to a saved position](#example-2-go-to-a-saved-position) - [Display your camera video on alarm](#display-your-camera-video-on-alarm) - [Integrating the alarm output relay of the camera](#integrating-the-alarm-output-relay-of-the-camera) - [Integrating the alarm input of the camera](#integrating-the-alarm-input-of-the-camera) - [Use the Loxone Miniserver as alarm server](#use-the-loxone-miniserver-as-alarm-server) <!-- /TOC --> ## Homeautomation Miniserver The home automation [miniserver from Loxone](https://www.loxone.com/dede/produkte/hausautomation-miniserver/) connects all smart components in your apartment or house. All strings run together to the Miniserver to enable communication between the individual components: Light, shading, heating, buttons as well as your INSTAR IP camera. ![Loxone Homeautomation](./Loxone_01.png) ### Installation of the Miniserver 1. First, the Miniserver must be installed and connected in your distributor. The LAN cable must be [connected](https://www.loxone.com/dede/kb/inbetriebnahme-miniserver/) to the network router (e.g. Internet W-LAN router) and the miniserver must be connected to power. After approx. 1 minute the Miniserver should [be operational](https://www.loxone.com/dede/kb/checkliste-miniserver-startet-nicht-mehr/): __Miniserver: The left LED flashes green regularly. The right LED is off.__ 1. To configure the Miniserver you need a PC on which the program [Loxone Config](https://www.loxone.com/dede/produkte/loxone-config/) is installed. This PC must be connected to the same network router (W-LAN or LAN) to which the Miniserver was connected. 2. To create a new project in the software by clicking on __New Project__, select the miniserver and enter the appropriate project data in the following input masks. 3. By clicking on the arrow below the _Connect_ button in the software, the search dialog can be opened. After the [Miniserver appears](https://www.loxone.com/dede/kb/miniserver-erstkonfiguration/#diagnosenetzwerk), it must be selected and enter the user data - __Default: user name: `admin` and password: `<PASSWORD>`__. By clicking on _Setup_ you get an overview of the configuration. After confirming this, you will come to the __Network Settings__ - specify here a static IP address that is available on your local network. 4. As a last point of the initial configuration the just created project must be saved in the Miniserver: ![Loxone Homeautomation](./Loxone_01.png) ### Smart Home App and WebUI From this point it is possible to use the [Smart Home App](https://www.loxone.com/dede/produkte/apps/) to connect to the Miniserver with your mobile phone, tablet, PC as long as you are in your home network. Download the apps directly from the [here](https://www.loxone.com/dede/support/downloads/), or in the respective APP Store and log in to the Miniserver with the user data that you specified in the steps before during the initial configuration. ![Loxone Homeautomation](./Loxone_02.png) The web interface of the miniserver is via the local IP of the server in your network and the login you entered in the setup for the item __Visualizations__: ![Loxone Homeautomation](./Loxone_03.png) ### Control INSTAR IP camera via the Loxone Miniserver In order to control our IP camera via the Loxone server, we need to create a virtual output and assign it a button in the UI and a CGI command for the camera: #### Example 1: Triggering an alarm ![Loxone Homeautomation](./Loxone_04.png) 1. Access your __Project__ on the Loxone Miniserver and click on __Virtual Outputs__. 2. Now a button will appear at the top of the menu bar where you can create a __Virtual Output__. 3. Clicking on this button opens the configuration page where you can assign a camera to the output via its IP address. 4. Then click __Virtual Exit Command__ to assign the output a command that you want to send to the camera when you press the button. ![Loxone Homeautomation](./Loxone_05.png) 1. Enter a name for the function you want to use with the CGI command. In the example we take the command that triggers an alarm on the camera. __Please note__ that this command only works with INSTAR Full HD cameras. However, if you have a camera from another series, you can easily exchange the command for another CGI command for your camera. The command requires the username and password of the administrator user to be appended to the back of the camera - in the example, this is _username = admin_ and _password = <PASSWORD>_ (if you select a CGI command here that can turn something on or off on the camera) these are to be entered accordingly with __command at ON__ and at __command at OFF__, with the respective parameter.) ``` /param.cgi?cmd=pushhostalarm&-usr=admin&-pwd=<PASSWORD> ``` 2. Then click on the exit, hold down the left mouse button and drag the object onto the work surface on the right. 3. Afterwards we can create a __Virtual input__ with which we want to trigger the virtual output. ![Loxone Homeautomation](./Loxone_06.png) 1. Click __Virtual input__ to create an input. 2. In the opening configuration window you can give a __name__ to the input, select it in the __Visualization__ (WebUI) and as __Input Type__ choose __key__ (if you have chosen another CGI command next to the one on the camera) and turns off, you should instead choose a __switch__ here). 3. Then pull the Input onto the work area. 4. And connect the two elements. 5. Save the change by clicking on the floppy disk icon in the top left corner and transfer the program with the __Save in Miniserver__ button. ![Loxone Homeautomation](./Loxone_07.png) Now when we access the web interface of the miniserver again, we can see that a button has appeared there. If we select this and press it, an __Audio Alarm__ is triggered on our camera and thus all programmed alarm actions are started - in the example the Alarm server was contacted. #### Example 2: Go to a saved position For the second example, we can now recycle a lot of the previous one. ![Loxone Homeautomation](./Loxone_08.png) 1. First click on the __Virtual output__ that we created for our camera. 2. And then on the __Virtual Output command__ to add another command. ![Loxone Homeautomation](./Loxone_09.png) 1. Here you can store the stored position 1 for the __command for ON__ and the stored position 2 for __command for OFF__: ``` /param.cgi?cmd=preset&-act=goto&-number=0&-usr=admin&-pwd=<PASSWORD> /param.cgi?cmd=preset&-act=goto&-number=1&-usr=admin&-pwd=instar ``` 2. And drag the configured output to the work area. ![Loxone Homeautomation](./Loxone_10.png) 1. Now click on __Virtual Inputs__. 2. And on __Virtual Inputs__. 3. As display in the visualization (WebUI) we select a __Switch__ here. 4. Then pull the entrance onto the work surface. 5. And connect both outputs (from the virtual input) to the previously created virtual output. 6. Save the settings. 7. And transfer the program to the miniserver. ![Loxone Homeautomation](./Loxone_11.png) 1. Click the button in the WebUI to pan the camera between the two positions. ### Display your camera video on alarm To display the camera image in the Loxone Miniservers WebUI, we can add a __Custom Intercom__ from the list of __Networks__: ![Loxone Homeautomation](./Loxone_12.png) In the configuration of the _Intercoms_ enter the JPG path of your camera - this is the IP address of the camera followed by `/tmpfs/snap.jpg` and the administrator login to the camera (example s. below) and then drag it to the desktop: ``` http://192.168.2.116/tmpfs/snap.jpg?usr=admin&pwd=<PASSWORD> ``` ![Loxone Homeautomation](./Loxone_13.png) For testing, we create again a __Virtual input__ in the form of a _Button_ and connect it to our _Intercom_. Save the setting and transfer the program to the Miniserver: ![Loxone Homeautomation](./Loxone_14.png) We now have a virtual bell button that when pressed shows the live video of the camera: ![Loxone Homeautomation](./Loxone_15.png) ![Loxone Homeautomation](./Loxone_16.png) Now all you have to do is replace this virtual bell with Loxone physical accessories. In the following example we have connected the [motion detector](https://shop.loxone.com/dede/bewegungsmelder.html) and the [Touch Control Element](https://shop.loxone.com/dede/loxone-touch.html) with the _Intercom_. If you operate the control or be detected by the motion detector, the ringer will be immediately resolved and all displays running the Loxone software will show you the live video of the embedded camera: ![Loxone Homeautomation](./Loxone_17.png) The output on the right side of the _Intercoms_ can in turn connect other devices that are to be triggered when ringing. To the output on the right side of the _Intercom_ you can connect further devices, which are to be triggered during the ringing. In the example we have connected the orientation light of the Touch control. The end result is: 1. You press the virtual bell button in the WebUI __OR__ you click on the Loxone Touch control __OR__ you trigger the Loxone motion detector and the _Intercom module_ is activated. 2. When activated, the miniserver WebUI and the Loxone Smartphone Apps will automatically display the video image of the camera and play a ringtone. 3. As an example for another action we have connected the orientation light of the touch control with the _Intercom_ and see that it is activated when the _Intercom_ is activated (e.g. you could install a gong here). --- <br/><br/> ## Integrating the alarm output relay of the camera The __most__ INSTAR IP cameras have an alarm output: 1. For indoor cameras (such as the INSTAR IN-8015 FullHD, this is usually port 1 and 2 of the four green connectors on the rear panel of the camera. 2. For outdoor cameras, you will usually find the connection on the cable harness of the camera. We would now like to use this alarm output to report an alarm on the camera in the [Loxone Miniserver](https://www.loxone.com/dede/kb/inbetriebnahme-miniserver/) and trigger a reaction there. In the following example we take the IN-8015 Full HD indoor camera and connect it to the [digital input (potential free)](https://www.loxwiki.eu/display/LOX/Potentialfrei+und+potentialbehaftet) of the Loxone Miniserver: ![Loxone Homeautomation](./Loxone_18.png) In the [Loxone Config](https://www.loxone.com/dede/produkte/loxone-config/) Software, we can now drag the __first digital input__ onto the work surface and - for testing - connect it to the orientation light of the [Touch control](https://shop.loxone.com/dede/loxone-touch.html) we had previously used: ![Loxone Homeautomation](./Loxone_19.png) If we now trigger an alarm on the camera via the [Loxone WebUI or App](#example-1-triggering-an-alarm) we want the camera to send a pulse via the __alarm output__ to the __digital input__ of the Loxone Miniserver. We can set this in the alarm actions in the WebUI of the camera: ![Loxone Homeautomation](./Loxone_20.png) The __duration in seconds__ indicates how long the alarm output relay remains open after an alarm. The 5s set here mean that the orientation lamp lights up for `5s` after an alarm and then goes out again. --- <br/><br/> ## Integrating the alarm input of the camera We can now look at the opposite way and address the alarm input of the INSTAR IP camera via the [Loxone Miniserver](https://www.loxone.com/dede/kb/inbetriebnahme-miniserver/). Therefore we connect the first [digital output](https://www.loxwiki.eu/display/LOX/Potentialfrei+und+potentialbehaftet) of the miniserver with the alarm input of the IN-8015 Full HD: ![Loxone Homeautomation](./Loxone_21.png) In the [Loxone Config](https://www.loxone.com/dede/produkte/loxone-config/) software we now have to add a __button module__. Make sure that you are on the _My Project_ tab (1) and then select the button under __Insert button__ in the Operation category and place it on the desktop: ![Loxone Homeautomation](./Loxone_22.png) Then pull the first __Digital output__ onto the work surface and connect it to the output of the button: ![Loxone Homeautomation](./Loxone_23.png) You can now use any actuator or sensor as an input - as well as virtual inputs that can be operated via the Loxone App or WebUI. In the example we have - as before - connected the [Motion detector](https://shop.loxone.com/dede/bewegungsmelder.html) and the [Touch control](https://shop.loxone.com/dede/loxone-touch.html) as well as our virtual bell button to the input of the button. If one of these three elements is now triggered, a pulse is sent to the alarm input of the camera. We can now activate the alarm input again in the Alarm actions in the camera's WebUI: ![Loxone Homeautomation](./Loxone_24.png) Note that the relay works in the [Loxone Miniserver](https://www.loxone.com/dede/kb/inbetriebnahme-miniserver/) __N.O.__ (_normally open_). If you switch the input mode to N.C., you would receive a continuous alarm accordingly! Accept these settings and trigger one of the 3 inputs (motion detector, touch control or the virtual bell) and you will see an alarm input in the Logbook of your camera: ![Loxone Homeautomation](./Loxone_25.png) The alarm input then triggers all alarm actions that you have activated in the Alarm actions in the camera's WebUI - in the example above, the Alarm server was contacted. --- <br/><br/> ## Use the Loxone Miniserver as alarm server All INSTAR HD and Full HD cameras have an Alarm Server Function which can be used to contact an external alarm server via a web request (_HTTP GET_ to the [REST API of the server](https://www.loxwiki.eu/display/LOX/REST+Webservice)) in case of an alarm on the camera. As receiver for the alarm server request of the camera we can use in the [Loxone Config](https://www.loxone.com/dede/produkte/loxone-config/) software a __Virtual input__: ![Loxone Homeautomation](./Loxone_26.png) 1. select the __Virtual inputs__ for this. 2. click on __Virtual input__ to create a new one. 3. give the new input a name - this will be needed afterwards for the alarm server request (you should use a simple name, without spaces - or special characters). 4. drag the new input onto the desktop. You can now connect the new input to anything you want to trigger by the alarm event on the camera. We will use the bell/Intercom we created in a [previous step](#display-your-camera-video-on-alarm): ![Loxone Homeautomation](./Loxone_27.png) Connect the input to the input of the _Intercom Module_, save the configuration and transfer it to the Loxone Miniserver. To use this input, we must now configure the Alarm Server on the camera. Therefore we have to know that the __Virtual Input__ in the Loxone Miniserver can be reached via the [REST API](https://www.loxwiki.eu/display/LOX/REST+Webservice) under the path `/dev/sps/io/<Name of the Virtual Input>/`. This path must be preceded by the IP address of the Loxone Miniserver - e.g. `192.168.2.58` - and the state we want to switch the input to must be appended at the end: * `http://192.168.2.58/dev/sps/io/alarmserver/ON` (permanently on) * `http://192.168.2.58/dev/sps/io/alarmserver/OFF` (permanently off) * `http://192.168.2.58/dev/sps/io/alarmserver/Pulse` (briefly on and then off again) We want the camera to send a pulse to the alarm server only for a short time and it will immediately jump back into the __OFF state__ and _armed_ again for the next alarm. We therefore select the last of the three commands above and enter it into the Web interface of our camera: ![Loxone Homeautomation](./Loxone_28.png) Please note that you have to enter the login below to connect to the miniserver via Loxone Config! Accept these settings and trigger an alarm on the camera - e.g. with the [virtual button we created before](#example-1-triggering-an-alarm): ![Loxone Homeautomation](./Loxone_29.png) The alarm is triggered on the camera and the alarm server is contacted. The Loxone Miniserver receives the message and activates the intercom - on all screens where the [Loxone WebUI or App](#smart-home-app-and-webui) is opened, the live video of the triggering camera is shown to you immediately.<file_sep>--- date: "2019-03-05" title: "Setting up an OKD Cluster" categories: - LINUX - Docker - OpenShift --- ![<NAME>](./photo-875sdfgd_67456dfdj_o.jpg) <!-- TOC --> - [System and environment requirements](#system-and-environment-requirements) - [SELinux requirements](#selinux-requirements) - [Optional: Configuring Core Usage](#optional-configuring-core-usage) - [DNS Requirements](#dns-requirements) - [Packages Requirements](#packages-requirements) - [OpenShift Installation](#openshift-installation) - [Enable NetworkManager and Docker Services](#enable-networkmanager-and-docker-services) - [Install Ansible and Clone Openshift-Ansible on the Master Node](#install-ansible-and-clone-openshift-ansible-on-the-master-node) - [Generate SSH Keys on the Master Node](#generate-ssh-keys-on-the-master-node) - [Creating the OpenShift Inventory File](#creating-the-openshift-inventory-file) <!-- /TOC --> OKD brings together Docker and Kubernetes, and provides an API to manage these services. OKD allows you to create and manage containers. Containers are standalone processes that run within their own environment, independent of the operating system and the underlying infrastructure. OKD helps you to develop, deploy, and manage container-based applications. It provides you with a self-service platform to create, modify, and deploy applications on demand, thus enabling faster development and release life cycles. ## System and environment requirements * __Master hosts__ In a highly available OKD cluster with external etcd, a master host needs to meet the minimum requirements and have 1 CPU core and 1.5 GB of memory for each 1000 pods. Therefore, the recommended size of a master host in an OKD cluster of 2000 pods is the minimum requirements of 2 CPU cores and 16 GB of RAM, plus 2 CPU cores and 3 GB of RAM, totaling 4 CPU cores and 19 GB of RAM. * __Node hosts__: The size of a node host depends on the expected size of its workload. As an OKD cluster administrator, you need to calculate the expected workload and add about 10 percent for overhead. For production environments, allocate enough resources so that a node host failure does not affect your maximum capacity. --- 1. `/var/lib/openshift` * Used for etcd storage only when in single master mode and etcd is embedded in the atomic-openshift-master process. * Less than 10GB. * Will grow slowly with the environment. Only storing metadata. 2. `/var/lib/etcd` * Used for etcd storage when in Multi-Master mode or when etcd is made standalone by an administrator. * Less than 20 GB. * Will grow slowly with the environment. Only storing metadata. 3. `/var/lib/docker` * When the run time is docker, this is the mount point. Storage used for active container runtimes (including pods) and storage of local images (not used for registry storage). Mount point should be managed by docker-storage rather than manually. * 50 GB for a Node with 16 GB memory. Additional 20-25 GB for every additional 8 GB of memory. * Growth is limited by the capacity for running containers. 4. `/var/lib/containers` * When the run time is CRI-O, this is the mount point. Storage used for active container runtimes (including pods) and storage of local images (not used for registry storage). * 50 GB for a Node with 16 GB memory. Additional 20-25 GB for every additional 8 GB of memory. * Growth limited by capacity for running containers 5. `/var/lib/origin/openshift.local.volumes` * Ephemeral volume storage for pods. This includes anything external that is mounted into a container at runtime. Includes environment variables, kube secrets, and data volumes not backed by persistent storage PVs. * Varies * Minimal if pods requiring storage are using persistent volumes. If using ephemeral storage, this can grow quickly. 6. `/var/log` * Log files for all components. * 10 to 30 GB. * Log files can grow quickly; size can be managed by growing disks or managed using log rotate. --- ### SELinux requirements Security-Enhanced Linux (SELinux) must be enabled on all of the servers before installing OKD or the installer will fail. Also, configure `SELINUX=enforcing` and `SELINUXTYPE=targeted` in the `/etc/selinux/config` file: ```conf # This file controls the state of SELinux on the system. # SELINUX= can take one of these three values: # enforcing - SELinux security policy is enforced. # permissive - SELinux prints warnings instead of enforcing. # disabled - No SELinux policy is loaded. SELINUX=enforcing # SELINUXTYPE= can take one of three two values: # targeted - Targeted processes are protected, # minimum - Modification of targeted policy. Only selected processes are protected. # mls - Multi Level Security protection. SELINUXTYPE=targeted ``` ### Optional: Configuring Core Usage By default, OKD masters and nodes use all available cores in the system they run on. You can choose the number of cores you want OKD to use by setting the GOMAXPROCS environment variable. For example, run the following before starting the server to make OKD only run on one core: ```bash # export GOMAXPROCS=1 ``` ### DNS Requirements Adding entries into the `nano /etc/hosts` file on each host is not enough (_? According to the official documentation ?_). In the example below all three host - one master and two minions - are resolved by their domain name `in-centos-master`, `in-centos-minion1`, `in-centos-minion2`: ``` # The following lines are desirable for IPv4 capable hosts 127.0.0.1 localhost.localdomain localhost 127.0.0.1 localhost4.localdomain4 localhost4 # The following lines are desirable for IPv6 capable hosts ::1 localhost.localdomain localhost ::1 localhost6.localdomain6 localhost6 # Kubernetes Cluster 192.168.127.12 in-centos-master master 172.16.17.32 in-centos-minion1 minion1 172.16.17.32 in-centos-minion2 minion2 ``` You can test that the nodes are able to reach each other by sending a ping: `ping master`, `ping minion1`, `ping minion2`. You can set those hostnames on each node with the following command: ```bash hostnamectl set-hostname in-centos-master # for the master node, etc. ``` <!-- This file is not copied into containers running on the platform. Key components of OKD run themselves inside of containers and use the following process for name resolution: * By default, containers receive their DNS configuration file `/etc/resolv.conf` from their host. * OKD then sets the pod’s first name server to the IP address of the node. As of OKD 1.2, __dnsmasq__ is automatically configured on all masters and nodes. The pods use the nodes as their DNS, and the nodes forward the requests. By default, __dnsmasq__ is configured on the nodes to listen on port 53, therefore the nodes cannot run any other type of DNS application. --> ### Packages Requirements On a fresh Centos-minimal install you will have to install the following packages on all nodes: ```bash yum install -y wget git zile nano net-tools docker bind-utils iptables-services bridge-utils bash-completion kexec-tools sos psacct openssl-devel httpd-tools NetworkManager python-cryptography python2-pip python-devel python-passlib java-1.8.0-openjdk-headless "@Development Tools" ``` ## OpenShift Installation Add the Epel Release (and disable it by default) and the Centos Openshift Mirror: ```bash rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm sed -i -e "s/^enabled=1/enabled=0/" /etc/yum.repos.d/epel.repo nano /etc/yum.repos.d/Openshift.repo ``` Add the following configuration: ```yaml [openshift] name=CentOS-OpenShift baseurl=http://mirror.centos.org/centos/7/paas/x86_64/openshift-origin39 gpgcheck=0 enabled=1 ``` And check that both have been added with: ```bash yum repolist ``` ### Enable NetworkManager and Docker Services ```bash systemctl start NetworkManager systemctl enable NetworkManager systemctl status NetworkManager systemctl start docker systemctl enable docker systemctl status docker ``` ### Install Ansible and Clone Openshift-Ansible on the Master Node Now we can install [Ansible](https://docs.ansible.com/) from the Epel repository - note you need to enable it first if you followed along and disabled it earlier: ```bash yum -y --enablerepo=epel install ansible pyOpenSSL git clone https://github.com/openshift/openshift-ansible.git cd openshift-ansible && git fetch && git checkout release-3.11 ``` Now we have the [OpenShift-Ansible](https://github.com/openshift/openshift-ansible) package cloned and the checked out the latest stable version 3.11. ### Generate SSH Keys on the Master Node We can now generate an SSH key on our master and copy it to the minion nodes: ```bash ssh-keygen -f ~/.ssh/id_rsa -N '' ``` This will generate a key in `~/.ssh/id_rsa.pub` that you have to copy to each node to be able to ssh into those nodes without a password: ```bash ssh-copy-id -i ~/.ssh/id_rsa.pub master -p 22 ssh-copy-id -i ~/.ssh/id_rsa.pub minion1 -p 22 ssh-copy-id -i ~/.ssh/id_rsa.pub minion2 -p 22 ``` Now verify that you are able to access each node from the master without having to use your password to login: ```bash ssh root@master -p 22 ssh root@minion1 -p 22 ssh root@minion2 -p 22 ``` ## Creating the OpenShift Inventory File [Playbooks](https://docs.ansible.com/ansible/latest/user_guide/playbooks_intro.html) are a completely different way to use ansible than in ad-hoc task execution mode, and are particularly powerful. Simply put, playbooks are the basis for a really simple configuration management and multi-machine deployment system, unlike any that already exist, and one that is very well suited to deploying complex applications. Playbooks can declare configurations, but they can also orchestrate steps of any manual ordered process, even as different steps must bounce back and forth between sets of machines in particular orders. They can launch tasks synchronously or asynchronously. Ansible works against multiple systems in your infrastructure at the same time. It does this by selecting portions of systems listed in [Ansible’s inventory](https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html). You can check out `cd ~/openshift-ansible/inventory` for some example files. Based on those files we can create our first own `nano inventory.ini`: ```ini # OpenShift-Ansible host inventory # Create an OSEv3 group that contains the masters and nodes groups [OSEv3:children] masters nodes etcd # Set variables common for all OSEv3 hosts [OSEv3:vars] ansible_ssh_user=root enable_excluders=False enable_docker_excluder=False openshift_enable_service_catalog=False ansible_service_broker_install=False # Debug level for all OpenShift components (Defaults to 2) debug_level=2 containerized=True os_sdn_network_plugin_name='redhat/openshift-ovs-multitenant' openshift_disable_check=disk_availability,docker_storage,memory_availability,docker_image_availability openshift_node_kubelet_args={'pods-per-core': ['10']} deployment_type=origin openshift_deployment_type=origin openshift_release=v3.11.0 openshift_pkg_version=v3.11.0 openshift_image_tag=v3.11.0 openshift_service_catalog_image_version=v3.11.0 openshift_service_broker_image_version=v3.11.0 osm_use_cockpit=true # Router on dedicated Infra Node openshift_hosted_router_selector='region=infra' openshift_master_default_subdomain=apps.test.instar.wiki openshift_public_hostname=master.test.instar.wiki # Image Registry on dedicated Infra Node openshift_hosted_registry_selector='region=infra' # htpasswd authentication with OSAdmin / dmlAjICyfrYXCsEH3NOoeeZMBkbo9G0JJy70z4etiO1dlCoo openshift_master_identity_providers=[{'name': 'htpasswd_auth', 'login': 'true', 'challenge': 'true', 'kind': 'HTPasswdPasswordIdentityProvider', 'filename': '/etc/origin/master/htpasswd'}] openshift_master_htpasswd_users={'OSAdmin': '$apr1$de7aZ7AQ$b0/L6hgDDskpuKekx/kfe.'} # Check http://www.htaccesstools.com/htpasswd-generator/ [masters] in-centos-master openshift_ip=172.16.31.10 openshift_port=45454 [etcd] in-centos-master openshift_ip=172.16.31.10 openshift_port=45454 [nodes] in-centos-master openshift_ip=172.16.31.10 openshift_port=56721 openshift_schedulable=true in-centos-minion1 openshift_ip=192.168.127.12 openshift_schedulable=true openshift_node_labels="{'region': 'infra', 'zone': 'default'}" in-centos-minion2 openshift_ip=172.16.58.3 openshift_port=35745 openshift_schedulable=true openshift_node_labels="{'region': 'primary', 'zone': 'default'}" ``` We can now use the Ansible Playbook command to check the prerequisites to deploy the OpenShift Cluster: ```bash ansible-playbook -i inventory/inventory.ini playbooks/prerequisites.yml ``` <file_sep>import threading # Create threads for each SSH connection def create_threads(list, function): threads = [] for ip in list: th = threading.Thread(target = function, args = (ip,)) th.start() threads.append(th) for th in threads: th.join()<file_sep>--- date: "2019-01-02" title: "Managing Containers in Kubernetes" categories: - LINUX - Docker - Kubernetes --- ![Shanghai, China](./11627184673_98a1904863_o.jpg) Kubernetes, the open-source system for deploying, managing, and scaling containerized apps, is at the heart of Google Kubernetes Engine. This integration is totally natural. Google developed Kubernetes in-house, and uses it to run its own most popular, global apps. Gmail, YouTube, and even Search run in Kubernetes Engine. [Kubernetes Docs](https://kubernetes.io/docs/home/) Technologies used: * __Kubernetes__: Kubernetes is a manager for cluster of containerized applications. It automates container deployment and scaling containers across clusters of hosts. A Kubernetes cluster is made up of __Nodes__ or __Minions__ (server instances to which pods are deployed), __Pods__ (Pods consist of one or more containers. Those containers are located on the same host machine to facilitate sharing of resources. Pods are assigned unique IPs within each cluster.), __Labels__ (key-value-pairs) can be assigned to Pods or Minions to group them, __Selectors__ represent queries that are made against labels to resolve matching objects, __Controllers__ manage a set of pods and enforce a configuration state and __Containers__ (containerized applications/services executed by the docker runtime). * __Docker__: Docker is the container runtime that we are going to use to run our containerized applications on. * __ectd__: etcd is a distributed key value store that provides a reliable way to store data across a cluster of machines. Kubernetes uses etcd to exchange messages between master and minion server and reporting on the cluster status. <!-- TOC --> - [Network Time Protocol Service](#network-time-protocol-service) - [Setting a Hostname](#setting-a-hostname) - [Install Docker & Kuberenetes on CentOS](#install-docker--kuberenetes-on-centos) - [Install and Configure Master Controller](#install-and-configure-master-controller) - [Kubernetes](#kubernetes) - [etcd](#etcd) - [API Server](#api-server) - [Install and Configure the Minions](#install-and-configure-the-minions) - [Kubernetes](#kubernetes-1) - [Kubelet](#kubelet) - [Testing Kubectl](#testing-kubectl) - [Working with Docker](#working-with-docker) - [Running Containers](#running-containers) - [Docker Files](#docker-files) - [Managing Ports](#managing-ports) - [Working with Kubernetes](#working-with-kubernetes) - [Pod Definitions](#pod-definitions) <!-- /TOC --> ## Network Time Protocol Service NTP- is a protocol which runs over port 123 UDP at Transport Layer and allows computers to synchronize time over networks for an accurate time. This service - by default - is handled by __Chrony.d__ on CentOS 7 and higher. But we are going to use the __ntp__ package instead. Using the same service everywhere makes sure that every node inside the cluster is set to the same time - or as close as possible. You can check if __Chrony.d__ is active on your system with the command `systemctl status chronyd.service`, stopping and disabling it with `systemctl stop chronyd.service`, `systemctl disable chronyd.service`. To install __ntp__ run `yum install ntp -y`. Run `systemctl enable ntpd && systemctl start ntpd` to activate the NTP service. You can verify that the service is working with `systemctl status ntpd` and `ntpstat`: --- ![Managing Containers in Kubernetes](./Kubernetes_01.png) --- The NTP daemon (ntpd) should be considered for systems which are normally kept permanently on. Systems which are required to use broadcast or multicast IP, or to perform authentication of packets with the Autokey protocol, should consider using ntpd. Chrony would be considered a best match for the systems which are frequently suspended or otherwise intermittently disconnected from a network (mobile and virtual servers etc). ## Setting a Hostname We now need to make sure that all of our CentOS server can talk to each other via a hostname that is bound to their internal IP address. This can be set in `nano /etc/hosts`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_02.png) --- Add all server that you want to connect by their local IP and assign a host name, by which you want to call them - add those lines to all your server's host files: ```yaml 192.168.2.110 in-centos-master 192.168.2.111 in-centos-minion1 ``` You can test if the hostname is used by pinging it: --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_03.png) --- ## Install Docker & Kuberenetes on CentOS First we need to add the repository to pull the Docker code from - type `nano /etc/yum.repos.d/virt7-docker-common-release.repo` and add: ```yaml [virt7-docker-common-release] name=virt7-docker-common-release baseurl=http://cbs.centos.org/repos/virt7-docker-common-release/x86_64/os/ gpgcheck=0 ``` Then install [Docker](https://www.docker.com/), [Kubernetes](https://kubernetes.io) and [etcd](https://coreos.com/etcd/docs/latest/) on the master server as well as all your hosts: ```bash yum update yum -y install --enablerepo=virt7-docker-common-release kubernetes docker etcd systemctl enable docker systemctl start docker ``` ## Install and Configure Master Controller ### Kubernetes Let's start with editing the configuration file for Kubernetes in `nano /etc/kubernetes/config`. We need to add two information: ```yaml KUBE_MASTER="--master=http://in-centos-master:8080" KUBE_ETCD_SERVERS="--etcd-servers=http://in-centos-master:2379" ``` The address / hostname of our master server and the etcd server - which, in our case, also resides on the master server. --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_04.png) --- ### etcd The etcd configuration can be found in `nano /etc/etcd/etcd.conf`. Here we need to change the following lines: ```yaml ETCD_LISTEN_CLIENT_URLS="http://0.0.0.0:2379" ETCD_ADVERTISE_CLIENT_URLS="http://0.0.0.0:2379" ``` Changing those lines from `http://localhost:2379` to `http://0.0.0.0:2379` so we accept messages from every network interface. --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_05.png) --- ### API Server The Kubernetes API Server configuration can be found in `nano /etc/kubernetes/apiserver` - the following lines need to be changed: ```yaml KUBE_API_ADDRESS="--address=0.0.0.0" KUBE_API_PORT="--port=8080" KUBELET_PORT="--kubelet-port=10250" # KUBE_ADMISSION_CONTROL="--admission-control=NamespaceLifecycle,NamespaceExists,LimitRanger,SecurityContextD$ ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_06.png) --- We now have to make sure that the services on the __Master Server__ are started in a particular order: ```bash systemctl enable etcd kube-apiserver kube-controller-manager kube-scheduler systemctl start etcd kube-apiserver kube-controller-manager kube-scheduler systemctl status etcd kube-apiserver kube-controller-manager kube-scheduler | grep "(running)" | wc -l ``` The output of the last command should be 4 - as we wanted to start four services in total. ## Install and Configure the Minions ### Kubernetes Let's start with editing the configuration file for Kubernetes in `nano /etc/kubernetes/config`. As before we need to add two lines: ```yaml KUBE_MASTER="--master=http://in-centos-master:8080" KUBE_ETCD_SERVERS="--etcd-servers=http://in-centos-master:2379" ``` The address / hostname of our master server and the etcd server - which, in our case, also resides on the master server. ### Kubelet The Kubelet configuration can be found in `nano /etc/kubernetes/kubelet` - the following lines need to be changed: ```yaml KUBELET_ADDRESS="--address=0.0.0.0" KUBELET_PORT="--port=10250" KUBELET_HOSTNAME="--hostname-override=in-centos-minion1" KUBELET_API_SERVER="--api-servers=http://in-centos-master:8080" # KUBELET_POD_INFRA_CONTAINER="--pod-infra-container-image=registry.access.redhat.com/rhel7/pod-infrastructur$ ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_07.png) --- We now have to make sure that the services on the __Minion Server__ are started in this particular order: ```bash systemctl enable kube-proxy kubelet docker systemctl start kube-proxy kubelet docker systemctl status kube-proxy kubelet docker | grep "(running)" | wc -l ``` The output of the last command should be 3 - as we wanted to start three services in total. You can verify that docker is running with `docker --version` or `docker images`. To verify that docker is working, let's pull and run the `hello-world` image: ```bash docker pull hello-world docker run hello-world ``` The image will run, print it's hello world to your console and exit again - that means everything is in perfect order: --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_08.png) --- ## Testing Kubectl kubectl is the control function for Kubernetes. A list of commands that you can try on your master server: ```bash kubectl get nodes //this should list all your minions kubectl describe node in-centos-minion //get info for a specific node ``` ## Working with Docker Get information about your Docker installation with `docker version` and `docker info` - the latter can tell you how many images you downloaded, how many of the are running and how much space and memory you have left on your host system. Further information - e.g. on containers and volumes - can be found in `cd /var/lib/docker`. ### Running Containers Ready-build containers can be downloaded from the [DockerHub](https://hub.docker.com/) - an example is the official image for [Ubuntu](https://hub.docker.com/_/ubuntu). Requests for specific versions can be added to the pull command. E.g. the following command pulls the Ubuntu 18.04 Bionic image: ```bash docker pull ubuntu:bionic ``` All available versions of the selected image are listed on the corresponding DockerHub page. Running this command will then build the image according to a [Docker File](https://github.com/tianon/docker-brew-ubuntu-core/blob/185c5e23efaa8c7c857683e6dcf4d886acda3cba/bionic/Dockerfile). Alternatively, you can also use the search command to find all available versions: ```bash docker search ubuntu ``` To build an container using that image the following command: ```bash docker run -t -i ubuntu:bionic /bin/bash ``` The `-i` makes it interactive and the `-t` runs the output on your terminal - to run the image in the background instead use `-d` for detached mode. A full list of the [command options](https://docs.docker.com/engine/reference/commandline/run/#options) can be found in the docker docs. Adding `/bin/bash` is the command you want Docker to run inside the image, which in this case will attach you as root user to the Ubuntu OS - you can verify with `dh -f` or `ps aux`. Typing `exit` will both exit you out of the container and stop it as well. You can verify with `docker ps` that no image is running anymore. To restart the container run `docker ps -a` to find out the name that was given to the container - in this case it was __suspicious_lovelace__ - then type `docker restart suspicious_lovelace` to run it in detached mode. And `docker attach suspicious_lovelace` when you need to reconnect your terminal. --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_09.png) --- To start a new container directly in detached mode run: ```bash docker run -tid ubuntu:bionic /bin/bash ``` You can see that this spun up a second container from the same image: --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_10.png) --- You can get more information about the container with the inspect command followed by the name of the container you want to inspect: ```bash docker inspect flamboyant_lamarr docker inspect flamboyant_lamarr | grep IP ``` The second command will only show you information related to the IP configuration of the container: --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_11.png) --- ### Docker Files A Docker File is a recipe for an Docker image. So far we only pulled predefined images from the DockerHub - writing our own Dockerfile allows us to make changes to those images. Lets start by creating a folder in your home directory named __RunAsUser__ and add a Dockerfile `nano Dockerfile` with the following content: ```dockerfile # Dockerfile based on the latest CentOS 7 image - non-privileged user entry FROM centos:latest MAINTAINER <EMAIL> RUN useradd -ms /bin/bash user USER user ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_12.png) --- The Dockerfile can now be used to build a Docker image with the following command: ```bash docker build -t centos7/nonroot:v1 /root/docker/builds/RunAsUser docker run -ti centos7/nonroot:v1 /bin/bash ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_13.png) --- The container is build, the user __user__ created and you are connected to the container with the non-privileged account. But we can still connect as the root user. First exit the container and run `docker ps -a` to find out the name that was assigned to it - in this case it was __optimistic\_swirles__. Then start the container up again with `docker start optimistic_swirles`. Then run: ``` docker exec -u 0 -it optimistic_swirles /bin/bash ``` This command connects to the referenced container with the user with id=0 (root) and starts the bash terminal: --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_14.png) --- Another advantage is, that you can now exit the container without stoping it - it will continue running with the default non-privileged __user__. ### Managing Ports Docker allows us to expose ports to access services provided by applications inside the container. Let's pull the [NGINX](https://hub.docker.com/_/nginx) image and try to access the default landing page: ```bash docker pull nginx docker run -d nginx:latest ``` We can now run `docker ps` to find out the assigned container name (__lucid_lichterman__) and run `docker inspect lucid_lichterman` to find out the IP address the container is using (__172.17.0.2__). If you have the [elinks text browser](http://elinks.or.cz/) istalled, you can run `elinks http://172.17.0.2` to see the NGINX greeting page: --- ![Red Hat Certified Specialist in Containerized Application Development](./Kubernetes_15.png) --- But you cannot reach the page over `elinks http://localhost` because the internal port 80 was not exposed to be reachable from outside the Docker network. Let's stop the container `docker stop lucid_lichterman` and run it again - but this time we are going to forward the internal port 80 to the external port 8081: ```bash docker run -d -p 8081:80 nginx:latest ``` The website is no reachable via `elinks http://localhost:8081` on our host machine as well as on the local IP address on the local network - e.g. over _http://192.168.2.111:8081_. ## Working with Kubernetes ### Pod Definitions Create a folder in your home directory on your master server and add `nano nginx.yaml` inside. This file is going to create a pod with a single container that runs the NGINX image v1.7.9: ```yaml apiVersion: v1 kind: Pod metadata: name: nginx spec: containers: - name: nginx image: nginx:1.7.9 ports: - containerPort: 80 ``` We can now use __kubectl__ to create the pod based on our configuration file: ```bash kubectl create -f /root/docker/builds/nginx.yaml ```<file_sep>--- date: "2019-02-15" title: "IFTTT Alarmserver for IP Cameras" categories: - IoT - Smarthome --- import GifContainer from "../../src/components/ImageContainer"; ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Basic Setup](#basic-setup) - [IFTTT Setting up a Webhook](#ifttt-setting-up-a-webhook) - [Choosing an Action (Telegram Messenger)](#choosing-an-action-telegram-messenger) - [Alarmserver Setup](#alarmserver-setup) <!-- /TOC --> ## Basic Setup _If This Then That_, also known as [IFTTT](https://ifttt.com/) is a free web-based service to create chains of simple conditional statements, called applets. Build your own applets to connect web services with your INSTAR IP Camera. ### IFTTT Setting up a Webhook 1. First you need to [create an account](https://ifttt.com/join) on IFTTT and [login to your account](https://ifttt.com/login?wp_=1). Then go to the __Create__ and click on __IF + This__: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_01.png) --- 2. We now want to set up a [webhook service](https://ifttt.com/services/maker_webhooks) - which is basically just a web address that our INSTAR Full HD camera can contact in case of an alarm to trigger an IFTTT applet. Search for _web_ and select the __Webhooks Service__: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_02.png) --- 3. Then name the event - this will be part of the URL that we will later have to contact with with our camera to trigger the event: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_03.png) --- 4. With the trigger set up we now need to assign an IFTTT service we want to trigger. To do this click on __that__: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_04.png) --- 5. As you can see from the list we have a large variety of services that we can now trigger with the webhook we created: <GifContainer gifUrl="/assets/gif/IFTTT_for_INSTAR_Alarmserver_05.gif" alt="IFTTT for INSTAR Alarmserver" /> ### Choosing an Action (Telegram Messenger) 6. Let's use the __Telegram service__ - when the URL (webhook) for this applet is contacted we want to receive a text message on [Telegram Messenger](https://telegram.org/): --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_06.png) --- 7. Now you need to connect the IFTTT service to your Telegram account to allow the IFTTT bot to send you messages: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_07.png) --- __Note__: I experienced issues doing this with the IFTTT web interface on a Windows desktop with the Telegram desktop app. Switching to an Android phone solved those issues for me: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_08.png) --- 8. Once the IFTTT bot is we can choose that we want to receive a text message, when the applet is triggered: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_09.png) --- 9. Now we can configure the subject and body text the email should have. Notice the words that are highlighted in white - these are variables that will be filled with data before the mail is sent. __EventName__ will be the name that you choose in step 4 above, __OccuredAt__ will be filled with a time stamp. The __Value 1-3__ below are variables that we can define in the Alarmserver setup in our camera's webUI and are send them whenever a alarm is triggered: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_10.png) --- ### Alarmserver Setup 10. Now we have to configure the Alarmserver in our camera's webUI so that the URL for our IFTTT Webhook receives a __POST__ message from our camera every time an alarm is triggered: ``` https://maker.ifttt.com/trigger/instar_alarmserver/with/key/API_KEY ``` __Note__ that this is using the __Event Name__ that we set as name for our Webhook `instar_alarmserver`. To identify our IFTTT account we first need to find out our personal __API_KEY__ and add it at the end of the URL. To get access to our personal API key we first have to sign in to our IFTTT account and open the following page `https://ifttt.com/services/maker_webhooks/settings`. Copy the alpha-numeric key in the Account Info - e.g. if you find the `URL: https://maker.ifttt.com/use/c3oL9WnAbz8Z08KumJWS`, then `<KEY>` is your API key: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_11.png) --- We can now test our webhook and URL query parameter by copying the following URL (just replace your personal `API_Key`) and pasting it into the address bar of a web browser: ``` https://maker.ifttt.com/trigger/instar_alarmserver/with/key/c<KEY>WS?value1=Office&value2=Entrance_Left&value3=Alarm ``` You should get a reply that you triggered the __instar_alarmserver__ applet and shortly afterwards receive a message on Telegram: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_12.png) --- 11. Now we have to copy&paste all the parts from this URL into the Alarmserver configuration in our camera's webUI: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_13.png) ---<file_sep>import React from 'react' import Helmet from 'react-helmet' export default () => ( <Helmet> <meta http-equiv="refresh" content="0;url=https://mpolinowski.github.io/red-hat-certified-engineer-rhce-exam" /> </Helmet> )<file_sep>--- date: "2018-11-10" title: "Getting started with PostgreSQL in Windows 10" categories: - Databases - SQL --- ![<NAME>](./photo-15328454698_e5687fc21d_o.png) <!-- TOC --> - [Installation](#installation) - [Adding some Dummy Data to work with](#adding-some-dummy-data-to-work-with) - [SELECT Queries](#select-queries) - [ORDER BY](#order-by) - [WHERE](#where) - [OR](#or) - [AND](#and) - [LIKE & LOWER](#like--lower) - [LIKE & UPPER](#like--upper) - [Working with numbers](#working-with-numbers) - [Adding a another Dataset](#adding-a-another-dataset) - [ALTER TABLE & ADD COLUMN](#alter-table--add-column) - [UPDATE & SET](#update--set) - [VARCHARS](#varchars) - [length](#length) - [left & right](#left--right) - [reverse](#reverse) - [Working with Dates](#working-with-dates) - [AS](#as) - [AGE](#age) - [DATE_PART & COUNT](#datepart--count) - [Changing the Data Type](#changing-the-data-type) - [CAST](#cast) - [to_date](#todate) - [IS null](#is-null) <!-- /TOC --> ## Installation [Download the Version](https://www.postgresql.org/download/) of PostgreSQL that suits your OS. In my case I will choose the installer certified by EnterpriseDB for Windows 64bit. Start the installer and accept the defaults - only add __your own password__ that you will be able to use with the __username: postgres__ to log in to the pgAdmin Control Panel. ## Adding some Dummy Data to work with We are going to use the free available [Consumer Complaint Database](https://www.consumerfinance.gov/data-research/consumer-complaints/) from the Consumer Financial Protection Bureau. Start the __pgAdmin4__ Admin Panel and right-click __Databases__ to create a new db. Give it a name, e.g. `consumer complaints`: ![PostgrSQL](./postgresql_01.png) Now right-click the newly created Database and selct the __Query Tool__. Paste the following in and click on __Execute__ to create the `consumer_complaint` table: ```sql CREATE TABLE consumer_complaints ( date_received varchar, product_name varchar, sub_product varchar, issue varchar, sub_issue varchar, consumer_complaint_narrative varchar, company_public_response varchar, company varchar, state_name varchar, zip_code varchar, tags varchar, consumer_consent_provided varchar, submitted_via varchar, date_sent varchar, company_response_to_consumer varchar, timely_response varchar, consumer_disputed varchar, complaint_id integer ); ``` > Note: If you are running the Astrill VPN client on your System, this will block access to the Query Tool. Add pgAdmin4 to the list of allowed programs that are not forced through the VPN tunnel. Now we can add the data from the earlier downloaded _ConsumerComplaints.csv_ file, by executing the following query (change the path-to-file according to your download folder) ```sql COPY consumer_complaints FROM 'E:\postgresql-getting-started\ConsumerComplaints.csv' DELIMITER ',' CSV HEADER; ``` ![PostgrSQL](./postgresql_02.png) We have now created our schema with 18 columns and copied 65499 data entries from the CSV file into our table. We can check our data by running the following query: ```sql SELECT * FROM consumer_complaints; ``` ### SELECT Queries The `SELECT *` allows you to grab the content of _all columns_ of your table. If you want to select specific columns you can string them together, separated by commas - e.g. `SELECT date_received, issue, state_name, tags`. ![PostgrSQL](./postgresql_03.png) #### ORDER BY ```sql SELECT * FROM consumer_complaints ORDER BY company; ``` ```sql SELECT * FROM consumer_complaints ORDER BY zip_code ASC; ``` ```sql SELECT * FROM consumer_complaints ORDER BY zip_code, date_received DESC; ``` #### WHERE To Filter your results, by adding a condition with the __Where__ statement: ```sql SELECT product_name, issue FROM consumer_complaints WHERE state_name = 'NY'; ``` This query only displays results from the two selected columns inside the referenced table where the _state_name_ equals New York. We can also add some logic to our queries - e.g. if we want to know how many complaints weren't answered on the same day we can write the following SQL query: ```sql SELECT company, product_name, issue FROM consumer_complaints WHERE date_sent != date_received; ``` #### OR ```sql SELECT company, product_name, issue FROM consumer_complaints WHERE state_name = 'NY' OR state_name = 'CA'; ``` #### AND ```sql SELECT company, product_name FROM consumer_complaints WHERE tags = 'Servicemember' AND timely_response = 'Yes'; ``` #### LIKE & LOWER ```sql SELECT product_name FROM consumer_complaints WHERE LOWER(product_name) LIKE '%credit%'; ``` The __LIKE__ condition allows us to use %-wildcards to search for every value of _product\_name_ that contains the word _credit_. The Select query is case sensitive - by adding __LOWER__ we transform all _product\_name_ values to lower-case and then compare it against a lower-case search query. The same can be achieved with using __UPPER__ instead: #### LIKE & UPPER ```sql SELECT company FROM consumer_complaints WHERE UPPER(issue) LIKE '%LATE%'; ``` #### Working with numbers Display all companies that are in an area that zip-code starts with `12*`: ```sql SELECT company, product_name FROM consumer_complaints WHERE zip_code LIKE '12___'; ``` All zip-codes have 5 digits - we signify the position of our search number by adding underscore for every digit that is a wildcard - e.g. `__1__` would hit all zip-codes that have a 1 in the middle. Alternatively, you can also use the `%` syntax to get the same result: ```sql SELECT company, product_name FROM consumer_complaints WHERE zip_code LIKE '12%'; ``` Using `%1%` instead would give you all the zip-codes that have a number 1 in __any position__. __COUNT__ ```sql SELECT COUNT(company) FROM consumer_complaints WHERE LOWER(company) LIKE '%bell%'; ``` Count all instances where a company with the word _bell_ in its name filed a complaint. ## Adding a another Dataset The .csv files used here can be downloaded from here ([Section 5](https://www.superdatascience.com/sql/)). Right-click __Databases__ to add a new db and name it `console games`. Then create the following tables with the 2 .csv files you just downloaded: ```sql CREATE TABLE console_games ( game_rank integer, game_name varchar(1200), platform varchar(1200), game_year integer, genre varchar(20), publisher varchar(1200), na_sales float8, eu_sales float8, jp_sales float8, other_sales float8 ); COPY console_games FROM 'E:\postgresql-getting-started\ConsoleGames.csv' DELIMITER ',' CSV HEADER; ``` and ```sql CREATE TABLE console_dates ( platform_name char(120), first_retail_availability date, discontinued date, units_sold_mill float8, platform_comment varchar(120) ); COPY console_dates FROM 'E:\postgresql-getting-started\ConsoleDates.csv' DELIMITER ',' CSV HEADER; ``` Lets work with our data and add all NA, EU and JP sales together to get a new colum with `global_sales`: ### ALTER TABLE & ADD COLUMN ```sql ALTER TABLE console_games ADD COLUMN global_sales float8; ``` This will alternate the table _console\_games_ and add a column named _global\_sales_ that will receive a number with decimal point. We can now fill in the value by a simple addition: ### UPDATE & SET ```sql UPDATE console_games SET global_sales = na_sales + eu_sales + jp_sales; ``` Now we can calculate the percentage of the North-American sales from the global sales: ```sql ALTER TABLE console_games ADD COLUMN na_sales_percent float8; UPDATE console_games SET na_sales_percent = na_sales / global_sales * 100 WHERE global_sales > 0; ``` ### VARCHARS Working with __String Functions__ #### length ```sql SELECT game_name, length(game_name) FROM console_games ORDER BY length(game_name) DESC; ``` Will give us a column with the count of characters inside the name of the game: ![PostgrSQL](./postgresql_04.png) #### left & right To only grab the first 4 letters of the publisher name: ```sql SELECT left(publisher, 4) FROM console_games ORDER BY left(publisher, 1) DESC; ``` #### reverse Will reverse the order of the characters or numbers of the values of the selected column: ```sql SELECT reverse(genre) FROM console_games; ``` ### Working with Dates #### AS ```sql SELECT *, discontinued - first_retail_availability AS days_existed FROM console_dates; ``` __AS__ will create a __temporary column__ (we need to use ALTER TABLE add it permanently) with the number of days a console system was available on the market: ![PostgrSQL](./postgresql_05.png) This can help you to get a quick overview over your data - but isn't very precise e.g.: ```sql SELECT *, (discontinued - first_retail_availability)/365 AS years_existed FROM console_dates ORDER BY years_existed DESC; ``` #### AGE The more accurate way to calculate it: ```sql SELECT *, AGE(discontinued, first_retail_availability) AS platform_alive FROM console_dates ORDER BY platform_alive DESC; ``` ![PostgrSQL](./postgresql_06.png) #### DATE_PART & COUNT The PostgreSQL DATE_PART() function, that allows you to retrieve subfields e.g., year, month, week from a date or time value. E.g. DATE_PART(month) will only subtract the subfield month of a date - you have one date in November and the second in December this will be `11 - 12 = -1`. One use case of this function is to check how many entries are from a specific month of the year: ```sql SELECT COUNT(platform_name) FROM console_dates WHERE DATE_PART('month', first_retail_availability) - 11 = 0 OR DATE_PART('month', first_retail_availability) - 12 = 0; ``` This query will __COUNT__ all the console systems that were released in November __OR__ December - just in time for the X-mas present madness. ![PostgrSQL](./postgresql_07.png) Or you can check what consoles where released in a specific year: ```sql SELECT * FROM console_dates WHERE DATE_PART('year', first_retail_availability) - 1990 = 0; ``` ![PostgrSQL](./postgresql_08.png) The values of the field must be in a list of permitted values mentioned below: * century * decade * year * month * day * hour * minute * second * microseconds * milliseconds * dow * doy * epoch * isodow * isoyear * timezone * timezone_hour * timezone_minute ### Changing the Data Type #### CAST ```sql SELECT CAST(game_year as varchar(4)) FROM console_games ORDER BY game_year; ``` A shortcut is that will do the same: ```sql SELECT game_year::varchar(4) FROM console_games ORDER BY game_year; ``` ##### to_date More useful is the conversion to a date data type: ```sql SELECT to_date(CAST(game_year as varchar(4)), 'yyyy') FROM console_games ORDER BY game_year; ``` ### IS null Finding undefined fields (notice: we have to use __IS__ instead of an equal sign when working with _null_ : ```sql SELECT * FROM console_games WHERE game_name IS NULL; ``` Adding a value to an undefined field: ```sql UPDATE console_games SET jp_sales = round((na_sales + eu_sales + other_sales) / 3) WHERE jp_sales IS null; ```<file_sep>--- date: "2019-09-19" title: "Creating Magento 2 Console Commands" categories: - Magento --- ![TST, Hongkong](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Command Line Interface](#command-line-interface) <!-- /TOC --> ## Command Line Interface To create custom commands to the CLI interface add a folder called `./Console/Command` to your [Custom Magento 2 Module](/creating-magento-2-modules). Here we can create our custom command `AddItem.php`: ```php <?php namespace INSTAR\SampleModule\Console\Command; use Symfony\Component\Console\Command\Command; use Symfony\Component\Console\Input\InputArgument; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Output\OutputInterface; use INSTAR\SampleModule\Model\ItemFactory; use Magento\Framework\Console\Cli; class AddItem extends Command { const INPUT_KEY_NAME = 'name'; const INPUT_KEY_DESCRIPTION = 'description'; private $itemFactory; public function __construct(ItemFactory $itemFactory) { $this->itemFactory = $itemFactory; parent::__construct(); } protected function configure() { $this->setName('instar:item:add') ->addArgument( self::INPUT_KEY_NAME, InputArgument::REQUIRED, 'Item name' )->addArgument( self::INPUT_KEY_DESCRIPTION, InputArgument::OPTIONAL, 'Item description' ); parent::configure(); } protected function execute(InputInterface $input, OutputInterface $output) { $item = $this->itemFactory->create(); $item->setName($input->getArgument(self::INPUT_KEY_NAME)); $item->setDescription($input->getArgument(self::INPUT_KEY_DESCRIPTION)); $item->setIsObjectNew(true); $item->save(); return Cli::RETURN_SUCCESS; } } ``` Magento 2 uses the Symfony framework to create command line commands. This examples imports the necessary modules from Symfony and creates a command that accepts 2 inputs - an item name and description. We now need to implement this command so that those values can be written to [our modules database table](/creating-magento-2-modules#module-install-scripts) - for this we have to add a __Dependency Injection__ configuration to our module with `./etc/di.xml`: ```xml <?xml version="1.0"?> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:ObjectManager/etc/config.xsd"> <type name="Magento\Framework\Console\CommandList"> <arguments> <argument name="commands" xsi:type="array"> <item name="instarAddItem" xsi:type="object">INSTAR\SampleModule\Console\Command\AddItem</item> </argument> </arguments> </type> </config> ``` Now flush the Magento cache (all XML files are cached by Magento - changing them always requires a flush): ```bash bin/magento cache:flush ``` The list of available Magento commands should now contain the new command we created: ```bash root@Magento2 /var/www/html/magento # bin/magento Magento CLI 2.3.3 Usage: command [options] [arguments] Options: -h, --help Display this help message -q, --quiet Do not output any message -V, --version Display this application version --ansi Force ANSI output --no-ansi Disable ANSI output -n, --no-interaction Do not ask any interactive question -v|vv|vvv, --verbose Increase the verbosity of messages: 1 for normal output, 2 for more verbose output and 3 for debug Available commands: help Displays help for a command list Lists commands instar instar:item:add ``` You can now use the command to add Items and append them to your modules database table: ```bash bin/magento instar:item:add "Item 3" "Third item description" ``` You can now verify that the item was added by connecting to your MySQL database, or by opening the [view we created earlier](/creating-magento-2-modules#view-layer): ![Magento2 Module Components](./Magento2_Module_Components_01.png)<file_sep>--- date: "2020-06-16" title: "Salt State" categories: - LINUX --- ![Guangzhou, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Setting up your Minions](#setting-up-your-minions) - [Installing Apache](#installing-apache) - [Configuring Apache](#configuring-apache) - [Jinja Scripts](#jinja-scripts) - [Conditionals](#conditionals) - [Looping](#looping) - [Scripts CleanUP](#scripts-cleanup) - [Working with Dictionaries](#working-with-dictionaries) - [Splitting up our Files](#splitting-up-our-files) - [Using Custom Python Scripts](#using-custom-python-scripts) <!-- /TOC --> ## Setting up your Minions ### Installing Apache We can set the state of an Minion inside a YAML file that contains instructions for Salt. Start by creating a folder `/srv/salt` and run `git init` to version your [minion state](https://docs.saltstack.com/en/master/ref/states/all/index.html). To make sure that our minion has [Apache installed](https://docs.saltstack.com/en/master/ref/states/all/salt.states.pkg.html#salt.states.pkg.installed) we will create an `apache.sls` file inside the directory __on your MASTER__: ```yaml install_apache: pkg.installed: - name: apache2 ``` You can now execute this set of instructions on your minion server by running: ```bash sudo salt ubuntuAsus state.sls apache ubuntuAsus: ---------- ID: install_apache Function: pkg.installed Name: apache2 Result: True Comment: The following packages were installed/updated: apache2 Started: 15:15:20.619100 Duration: 28624.3 ms Changes: ---------- apache2: ---------- new: 2.4.41-4ubuntu3 old: apache2-bin: ---------- new: 2.4.41-4ubuntu3 old: apache2-data: Summary for ubuntuAsus ------------ Succeeded: 1 (changed=1) Failed: 0 ------------ Total states run: 1 Total run time: 28.624 s ``` This step had our Minion download a copy of `apache.sls` file from our master and run it's instructions. The master does not need to know how the minion is going to download and install the required software - the OS on our minion makes that decision. Since we have Ubuntu installed on our minion it will run `apt-get update && apt-get install apache2`. ### Configuring Apache We can now also make sure that Apache will be [enabled and activated as a service](https://docs.saltstack.com/en/master/ref/states/all/salt.states.service.html#module-salt.states.service) by adding the following line to our instruction file: ```yaml install_apache: pkg.installed: - name: apache2 enable_apache: service.running: - name: apache2 - enable: True ``` We can also configure Apache to display a landing page on Port 80: ```yaml install_apache: pkg.installed: - name: apache2 enable_apache: service.running: - name: apache2 - enable: True add_landing_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body><h1>Salty Dayze, Sailor!</h1></body> ``` Now re-run `state.sls` and the default Apache Landing Page will be overwritten by our __Hello World__: ```bash sudo salt ubuntuAsus state.sls apache test=true sudo salt ubuntuAsus state.sls apache ``` You can visit the website on your minions IP address and port 80. You can get the IP address by running the following command: ```bash sudo salt ubuntuAsus network.ip_addrs ubuntuAsus: - 10.1.88.0 - 172.17.0.1 - 192.168.2.111 ``` ```html curl 192.168.2.111 <!doctype html> <body><h1>Salty Dayze, Sailor!</h1></body> ``` ## Jinja Scripts ### Conditionals The Apache install script in this form will only work for Debian-based operating system - the Apache package is called `apache2` on Ubuntu but `httpd` on CentOS. We can use the Jinja script syntax to make our script more robust: ```yaml install_apache: pkg.installed: {% if salt.grains.get('os_family') == 'Debian' %} - name: apache2 {% elif salt.grains.get('os_family') == 'RedHat' %} - name: httpd {% endif %} enable_apache: service.running: {% if salt.grains.get('os_family') == 'Debian' %} - name: apache2 {% elif salt.grains.get('os_family') == 'RedHat' %} - name: httpd {% endif %} - enable: True add_landing_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body><h1>Salty Dayze, Sailor!</h1></body> ``` We can test this script with the `state.show_sls` command: ```bash sudo salt '*' state.show_sls apache | less ubuntuAsus: ---------- enable_apache: ---------- __env__: base __sls__: apache service: |_ ---------- name: apache2 |_ ---------- enable: True - running |_ ---------- order: 10001 ``` We can see that our Ubuntu Minion correctly resolved the `apache2` package and not `httpd`. ### Looping To configure Apache we can now create configuration files in the `conf-available` directory and copy them over into the `conf-enabled` directory to start using them: ```yaml mod_status: file.managed: - name: /etc/apache2/conf-available/mod_status.conf - contents: | <Location "/status"> SetHandler server-status </Location> cmd.run: - name: a2enmod status && a2enconf mod_status - creates: /etc/apache2/conf-enabled/mod_status.conf mod_info: file.managed: - name: /etc/apache2/conf-available/mod_info.conf - contents: | <Location "/info"> SetHandler server-info </Location> cmd.run: - name: a2enmod info && a2enconf mod_info - creates: /etc/apache2/conf-enabled/mod_info.conf ``` This configuration script can be compacted by writing a for-loop: ```yaml {% for conf in ['status', 'info'] %} mod_{{ conf }}: file.managed: - name: /etc/apache2/conf-available/mod_{{ conf }}.conf - contents: | <Location "/{{ conf }}"> SetHandler server-{{ conf }} </Location> {% if salt.grains.get('os_family') == 'Debian' %} cmd.run: - name: a2enmod {{ conf }} && a2enconf mod_{{ conf }} - creates: /etc/apache2/conf-enabled/mod_{{ conf }}.conf {% endif %} {% endfor %} ``` The command step is only necessary on Debian systems and can be wrapped into a conditional. We can again test our script: ```bash sudo salt '*' state.show_sls mods | less ubuntuAsus: ---------- mod_info: ---------- __env__: base __sls__: mods ... ``` ### Scripts CleanUP Commenting your scripts and separating logic from state: __apache.sls__ ```yaml # Install vanilla Apache on Debian/RedHat {% if salt.grains.get('os_family') == 'Debian' %} {% set apache_pkg = 'apache2' %} {% elif salt.grains.get('os_family') == 'RedHat' %} {% set apache_pkg = 'httpd' %} {% endif %} install_apache: pkg.installed: - name: {{ apache_pkg }} enable_apache: service.running: - name: {{ apache_pkg }} # Will be enabled automatically on Debian but has to be enabled manually on RedHat - enable: True # Adding a blank front page add_landing_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body><h1>Salty Dayze, Sailor!</h1></body> ``` ### Working with Dictionaries Create a lookup dictionary to assign the correct package and service name for each minion: __apache.sls__ ```yaml # Install vanilla Apache on Debian/RedHat {% set lookup = { 'Debian': { 'pkg': 'apache2', 'srv': 'apache2' }, 'RedHat': { 'pkg': 'httpd', 'srv': 'httpd' } } %} {% set apache = lookup[salt.grains.get('os_family')] %} install_apache: pkg.installed: - name: {{ apache.pkg }} enable_apache: service.running: - name: {{ apache.srv }} # Will be enabled automatically on Debian but has to be enabled manually on RedHat - enable: True # Adding a blank front page add_landing_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body><h1>Salty Dayze, Sailor!</h1></body> ``` making this is a little bit more compact by using `grains.filter_by`: ```bash sudo salt '*' grains.filter_by '{Debian: apache2, RedHat: httpd}' ubuntuAsus: apache2 ``` __apache.sls__ ```yaml # Install vanilla Apache on Debian/RedHat {% set apache = salt.grains.filter_by({ 'Debian': { 'pkg': 'apache2', 'srv': 'apache2' }, 'RedHat': { 'pkg': 'httpd', 'srv': 'httpd' } }) %} install_apache: pkg.installed: - name: {{ apache.pkg }} enable_apache: service.running: - name: {{ apache.srv }} # Will be enabled automatically on Debian but has to be enabled manually on RedHat - enable: True # Adding a blank front page add_landing_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body><h1>Salty Dayze, Sailor!</h1></body> ``` Again, you can test your script with `sudo salt '*' state.show_sls apache | less`. ### Splitting up our Files We can now break up our configuration file so that every SLS file only does one thing - if possible. We will collect the resulting files inside a subdirectory `/srv/salt/apache`: __welcome.sls__ ```yaml # Adding a blank front page add_landing_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body><h1>Salty Dayze, Sailor!</h1></body> ``` __map.sls__ ```yaml # Get package/service name per OS version {% set apache = salt.grains.filter_by({ 'Debian': { 'pkg': 'apache2', 'srv': 'apache2' }, 'RedHat': { 'pkg': 'httpd', 'srv': 'httpd' } }) %} ``` __init.sls__ ```yaml # Install vanilla Apache on Debian/RedHat {% from "apache/map.sls" import apache with context %} install_apache: pkg.installed: - name: {{ apache.pkg }} enable_apache: service.running: - name: {{ apache.srv }} # Will be enabled automatically on Debian but has to be enabled manually on RedHat - enable: True ``` __mods.sls__ ```yaml {% for conf in ['status', 'info'] %} mod_{{ conf }}: file.managed: - name: /etc/apache2/conf-available/mod_{{ conf }}.conf - contents: | <Location "/{{ conf }}"> SetHandler server-{{ conf }} </Location> {% if salt.grains.get('os_family') == 'Debian' %} cmd.run: - name: a2enmod {{ conf }} && a2enconf mod_{{ conf }} - creates: /etc/apache2/conf-enabled/mod_{{ conf }}.conf {% endif %} {% endfor %} ``` Again, you can test your script with `sudo salt '*' state.show_sls apache | less` ## Using Custom Python Scripts You can write your own Python modules and execute them with Salt. Start by creating a folder `/srv/salt/_modules` and add your Python scripts - `myUtils.py`: ```python def getDate(): return __salt__['cmd.run']('date') ``` ```bash sudo salt '*' saltutil.sync_modules //sync script with all minions sudo salt '*' myUtils.getDate ubuntuAsus: Tue Aug 4 09:19:49 UTC 2020 ```<file_sep>--- date: "2017-12-07" title: "React Router 4" categories: - Javascript - React --- ![Hongkong](./photo-34445986622_955cf9ae16_o.jpg) [Github](https://github.com/mpolinowski/react-router-4) ![React Router 4](./rr4_01.png) <!-- TOC --> - [01 create-react-app](#01-create-react-app) - [02 react-bootstrap](#02-react-bootstrap) - [03 react-router-dom](#03-react-router-dom) - [04 Nested Routes](#04-nested-routes) - [05 Loading Animations](#05-loading-animations) <!-- /TOC --> ## 01 create-react-app We use the [Facebook React Boilerplate](https://github.com/facebookincubator/create-react-app) to get started: ``` create-react-app react-router-4 ``` ## 02 react-bootstrap And add some [Bootstrap](https://react-bootstrap.github.io/getting-started.html) for styling: ``` npm install --save react-bootstrap ``` We can now add the Bootsrap CSS inside the head of _./public/index.html_ : ```html <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous"> ``` And the Javascript at the end of the body tag: ```html <script src="https://code.jquery.com/jquery-3.2.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.3/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script> ``` To add a simple Navbar on top, we will now replace the default create-react-app JSX inside _./src/coponents/app.js_ with a Bootstrap Navbar from their [Example list](https://getbootstrap.com/docs/4.0/examples/) (remember to replace all instances of __class__ with __className__!): ```js return ( <div className="App"> <nav className="navbar navbar-expand-md navbar-dark bg-dark fixed-top mb"> <a className="navbar-brand" href="#">React Router 4</a> <button className="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarsExampleDefault" aria-controls="navbarsExampleDefault" aria-expanded="false" aria-label="Toggle navigation"> <span className="navbar-toggler-icon"></span> </button> <div className="collapse navbar-collapse" id="navbarsExampleDefault"> <ul className="navbar-nav mr-auto"> <li className="nav-item active"> <a className="nav-link" href="#">Page 1 <span className="sr-only">(current)</span></a> </li> <li className="nav-item"> <a className="nav-link" href="#">Page 2</a> </li> </ul> </div> </nav> <div className="jumbotron mt"> <div className="col-sm-8 mx-auto mt"> <h1>This is just a Test</h1> <p> <a className="btn btn-primary" href="#" role="button">View navbar docs &raquo;</a> </p> </div> </div> </div> ); ``` Now start the app with: ``` npm start ``` The Bootstrap Navbar should now show up in our React app on _locallhost:3000_: ## 03 react-router-dom We can now use the [basic example](https://reacttraining.com/react-router/web/example/basic) from the reacttraining.com website to add some routing to our app. First install the web based router - which is now called react-router-dom: ``` npm install --save react-router-dom ``` To add links to our navigation, we will need the [\<NavLink/\> component](https://reacttraining.com/react-router/web/api/NavLink), which is special version of the \<Link/\> that will add styling attributes to the rendered element when it matches the current URL (activeState). Replace all: ```html <a href="#"></a> ``` with ```html <NavLink to="#"></NavLink> ``` and import \<NavLink /\> from react-router-dom: ```js import { NavLink } from 'react-router-dom' ``` We created two links to two components, aptly named _/page-1_ and _/page-2_, that we now have to create inside the _./src/components_ directory. For \<PageOne /\> we want to use an __ES6 Class Component__ to render some JSX: ```js import React, {Component} from 'react' import { Link } from 'react-router-dom' import { Button } from 'react-bootstrap' class PageOne extends Component { render() { return ( <div className="jumbotron mt"> [...] </div> ); } } export default PageOne ``` And for \<PageTwo /\> we use a __Stateless Component__: ```js import React from 'react' import { Link } from 'react-router-dom' import { Button } from 'react-bootstrap' const PageTwo = () => ( <div className="jumbotron mt"> [...] </div> ) export default PageTwo ``` Make sure to import all components in _./src/index.js_: ```js import App from './App'; import PageOne from './PageOne'; import PageTwo from './PageTwo'; ``` As well as adding the Router itself: ```js import { BrowserRouter as Router, Route, Link } from 'react-router-dom' ``` Now we can copy the navigation (everything inside the \<nav\> tag) from _./src/components/app.js_ to _./src/index.js_ and replace the \<App /\> component that was placed there by create-react-app. Then wrap the JSX into a \<Router\> tag: ```js render( <Router> <div className="container"> <nav className="navbar navbar-expand-md navbar-dark bg-dark fixed-top mb"> <NavLink className="navbar-brand" to="/"> <img src={Logo} alt="React Router v4" /> </NavLink> <button className="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarsExampleDefault" aria-controls="navbarsExampleDefault" aria-expanded="false" aria-label="Toggle navigation"> <span className="navbar-toggler-icon"></span> </button> <div className="collapse navbar-collapse"> <ul className="navbar-nav mr-auto"> <li className="nav-item"> <NavLink className="nav-link" to="/page-1">Page 1</NavLink> </li> <li className="nav-item"> <NavLink className="nav-link" to="/page-2">Page 2</NavLink> </li> </ul> </div> </nav> <Route exact path="/" component={App}/> <Route path="/page-1" component={PageOne}/> <Route path="/page-2" component={PageTwo}/> </div> </Router>, document.getElementById('root')) ``` ## 04 Nested Routes To create nested routes in React Router v4, we just have to add more routes inside a component. E.g. you have a route that leads the user from _/_ to _/chapter1_, rendering the \<Chapter1 /\> component. To create a route to a nested article inside the chapter, like _/chapter1/subarticle1_, we now have to add further routes with the __${match.url}__ attribute inside the \<Chapter1 /\>: ```js <NavLink to={`${match.url}/subarticle1`} /> <Route path={`${match.url}/subarticle1`} component={SubArticle1}/> ``` So lets add another Link and Route to the Nav component in _./src/index.js_ to render another component, called \<NestedRoutes /\>, under _/nested-routes_. This is going to be our parent component for two nested routes/components: \<PageOneNested /\> and \<PageTwoNested /\> rendered under _./nested-routes/page-1-nested_ and _./nested-routes/page-2-nested_, respectively. ```js import React from 'react' import { Route, NavLink, Link } from 'react-router-dom' import PageOneNested from './PageOneNested' import PageTwoNested from './PageTwoNested' const Topics = ({ match }) => ( <div className="container-fluid"> <div className="row"> <nav className="nav nav-pills nav-fill col-sm-3 col-md-2 d-none d-sm-block bg-light sidebar"> <div className="clearfix"><br/><br/><br/></div> <Link className="nav-item nav-link" to={match.url}> <h3>Nested Routes</h3> <hr/> </Link> <NavLink className="nav-item nav-link" to={`${match.url}/page-1-nested`}> NestOne </NavLink> <NavLink className="nav-item nav-link" to={`${match.url}/page-2-nested`}> NestTwo </NavLink> </nav> <main role="main" className="col-sm-9 ml-sm-auto col-md-10 pt-3"> <div className="clearfix"><br/><br/><br/></div> <h1>Dashboard</h1> <Route path={`${match.url}/page-1-nested`} component={PageOneNested}/> <Route path={`${match.url}/page-2-nested`} component={PageTwoNested}/> <Route exact path={match.url} render={() => ( <div> [...] </div> )}/> </main> </div> </div> ) export default Topics ``` You can create the two files PageOneNested.js and PageTwoNested.js inside _./src/components_. Just copy and paste the content from PageOne.js or PageTwo.js and change the component names, inside the file to PageOneNested and PageTwoNested. ![React Router 4](./rr4_02.png) Clicking on the NestOne or NestTwo link will load the components \<PageOneNested /\> or \<PageTwoNested /\> in the \<main\> area of the screen, under the Dashboard title - awesome ! ## 05 Loading Animations The [react-transition-group](https://github.com/reactjs/react-transition-group/tree/v1-stable) is an easy way to perform animations when a React component enters or leaves the DOM. ``` npm install react-transition-group --save ``` https://hackernoon.com/animated-page-transitions-with-react-router-4-reacttransitiongroup-and-animated-1ca17bd97a1a https://medium.com/appifycanada/animations-with-reacttransitiongroup-4972ad7da286 https://github.com/reactjs/react-transition-group/tree/v1-stable<file_sep>--- date: "2019-01-21" title: "Kubernetes NGINX https Service" categories: - LINUX - Docker - Kubernetes - NGINX --- ![Shenzhen, China](./photo-34606004425_223f1c6e87_o.jpg) <!-- TOC --> - [Generate Certificates and NGINX Configuration](#generate-certificates-and-nginx-configuration) - [Create a https NGINX Application running in a Kubernetes Cluster](#create-a-https-nginx-application-running-in-a-kubernetes-cluster) <!-- /TOC --> __Tutorial based on the official [Kubernetes Staging Examples](https://github.com/mpolinowski/examples/tree/master/staging/https-nginx)__ Create a nginx web proxy as end-to-end https service in kubernetes. It uses an nginx server block to serve the index page over both http and https. It will detect changes to nginx's configuration file, default.conf, mounted as a configmap volume and reload nginx automatically. ## Generate Certificates and NGINX Configuration First generate a self signed rsa key and certificate that the server can use for TLS: ```bash mkdir ./nginx sudo openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout ./nginx/nginx.key -out ./nginx/nginx.crt ``` Fill out the certificate form - you can use your servers domain name, it's WAN IP or local IP address as __Common Name__. This will be the address that the certificate is valid for. ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_01.png) Then create your NGINX server configuration file - we are going to use this default file: ```js server { listen 80 default_server; listen [::]:80 default_server ipv6only=on; listen 443 ssl; root /usr/share/nginx/html; index index.html; server_name localhost; ssl_certificate /etc/nginx/ssl/tls.crt; ssl_certificate_key /etc/nginx/ssl/tls.key; location / { try_files $uri $uri/ =404; } } ``` ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_02.png) ## Create a https NGINX Application running in a Kubernetes Cluster The first thing we’ll do is define our endpoint by creating a ConfigMap that stores our Nginx configuration as well as creating a secret from the SSL certs we just created: ```bash kubectl create secret tls nginxsecret --key ~/nginx/nginx.key --cert ~/nginx/nginx.crt kubectl create configmap nginxconfigmap --from-file=default.conf ``` ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_03.png) Create a Dockerfile for the NGINX container: ```Dockerfile FROM nginx COPY index2.html /usr/share/nginx/html/index2.html RUN chmod +r /usr/share/nginx/html/index2.html COPY auto-reload-nginx.sh /home/auto-reload-nginx.sh RUN chmod +x /home/auto-reload-nginx.sh # install inotify RUN apt-get update && apt-get install -y inotify-tools ``` This file relies on __two supporting files__: 1. The HTML page that we want to host: ```html <!DOCTYPE html> <html> <head> <title>Nginx reloaded!</title> <style> body { width: 35em; margin: 0 auto; font-family: Tahoma, Verdana, Arial, sans-serif; } </style> </head> <body> <h1>Nginx has been reloaded!</h1> <p>If you see this page, the nginx web server has been automatically reloaded, since the config file has been updated using <a href="https://github.com/kubernetes/kubernetes">Kubernetes</a>.</p> <p>For online documentation and support please refer to <a href="http://kubernetes.io/">kubernetes.io</a>.<br/></p> <p>For online documentation and support please refer to <a href="http://nginx.org/">nginx.org</a>.<br/> Commercial support is available at <a href="http://nginx.com/">nginx.com</a>.</p> <p><em>Thank you for using nginx.</em></p> </body> </html> ``` 2. And the [shell script](https://github.com/mpolinowski/examples/blob/master/staging/https-nginx/auto-reload-nginx.sh) that we need to automatically reload NGINX whenever the configuration file is changed: ```js nginx "$@" oldcksum=`cksum /etc/nginx/conf.d/default.conf` inotifywait -e modify,move,create,delete -mr --timefmt '%d/%m/%y %H:%M' --format '%T' \ /etc/nginx/conf.d/ | while read date time; do newcksum=`cksum /etc/nginx/conf.d/default.conf` if [ "$newcksum" != "$oldcksum" ]; then echo "At ${time} on ${date}, config file update detected." oldcksum=$newcksum nginx -s reload fi done ``` Then create a __Service__ and a __Replication Controller__ using the configuration in `nginx-app.yaml`: ```yaml apiVersion: v1 kind: Service metadata: name: nginx labels: app: nginx spec: type: NodePort ports: - port: 80 protocol: TCP name: http - port: 443 protocol: TCP name: https selector: app: nginx --- apiVersion: v1 kind: ReplicationController metadata: name: nginx spec: replicas: 1 template: metadata: labels: app: nginx spec: volumes: - name: secret-volume secret: secretName: nginxsecret - name: configmap-volume configMap: name: nginxconfigmap containers: - name: nginxhttps image: ymqytw/nginxhttps:1.5 command: ["/home/auto-reload-nginx.sh"] ports: - containerPort: 443 - containerPort: 80 livenessProbe: httpGet: path: /index.html port: 80 initialDelaySeconds: 30 timeoutSeconds: 1 volumeMounts: - mountPath: /etc/nginx/ssl name: secret-volume - mountPath: /etc/nginx/conf.d name: configmap-volume ``` By running the following Kubernetes command: ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_04.png) ```bash kubectl create -f nginx-app.yaml ``` Then, find the node port that Kubernetes is using for http and https traffic. ```bash kubectl get service nginxsvc -o json ``` ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_05.png) You can test that the service is up and running by accessing your external cluster IP address followed by the port from above: ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_06.png) If everything worked, you will see the default NGINX start page after accepting the self-signed certificate above: ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_07.png) Then we will update the configmap by changing `index.html` to `index2.html`: ```bash kubectl create configmap nginxconfigmap --from-file=default.conf -o yaml --dry-run\ | sed 's/index.html/index2.html/g' | kubectl apply -f - ``` Wait a few seconds to let the change propagate and verify that Nginx has been reloaded with new configuration. ![NGINX Webproxy for your Kubernetes Cluster](./kubernetes-nginx_08.png)<file_sep>--- date: "2019-09-13" title: "Magento 2 and Varnish 6" categories: - LINUX - Magento --- ![<NAME>, Hongkong](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Install Varnish 6 on Debian 10](#install-varnish-6-on-debian-10) - [Configure NGINX](#configure-nginx) - [Modify the Varnish system configuration](#modify-the-varnish-system-configuration) - [Modify default.vcl](#modify-defaultvcl) - [Configure Magento to use Varnish](#configure-magento-to-use-varnish) - [Export a Varnish Configuration File](#export-a-varnish-configuration-file) - [The Varnishing of the TopMenu](#the-varnishing-of-the-topmenu) <!-- /TOC --> <!-- ## Install Varnish 5 on Debian 10 Add the packagecloud GPG key so that the repository will be authenticated and verified: ```bash wget https://packagecloud.io/varnishcache/varnish5/gpgkey -O - | sudo apt-key add - ``` Make sure you have these packages for using https repositories: ```bash sudo apt-get install apt-transport-https debian-archive-keyring -y ``` Add the Varnish 5 repository from packagecloud for Debian Buster (10): ```bash echo "deb https://packagecloud.io/varnishcache/varnish5/debian/ buster main" | sudo tee -a /etc/apt/sources.list.d/varnishcache_varnish5.list echo "deb-src https://packagecloud.io/varnishcache/varnish5/debian/ buster main" | sudo tee -a /etc/apt/sources.list.d/varnishcache_varnish5.list apt update ``` __NO RELEASE OF VARNISH 5 AVAILABLE FOR DEBIAN BUSTER__ Have to wait for Magento update to support for Varnish 6..... --> ## Install Varnish 6 on Debian 10 In order to get [Varnish](https://varnish-cache.org/docs/6.0/installation/install.html) up and running type sudo apt-get install varnish. Enter the following command to display the version of Varnish you are running: ```bash varnishd -V varnishd (varnish-6.1.1 revision efc2f6c1536cf2272e471f5cff5f145239b19460) Copyright (c) 2006 <NAME> AS Copyright (c) 2006-2015 Varnish Software AS ``` ## Configure NGINX Configure your web server to listen on a port other than the default port `80` because Varnish responds directly to incoming HTTP requests, not the web server. In the sections that follow, we use port `8080` as an example: ```bash nano /etc/nginx/sites-available/magento.conf ``` Change the server to listen to port `8080`: ```js upstream fastcgi_backend { server unix:/run/php/php7.2-fpm.sock; } server { listen 8080 default_server; server_name your-server.de; set $MAGE_ROOT /var/www/html/magento; include /var/www/html/magento/nginx.conf.sample; } ``` Test and reload NGINX: ```bash nginx -t service nginx reload ``` ## Modify the Varnish system configuration As a user with root privileges, open your Vanish configuration file in a text editor: ```bash nano /etc/default/varnish ``` Set the Varnish listen port to 80: ```js VARNISH_LISTEN_PORT=80 ``` Make sure that DAEMON_OPTS contains the correct listening port for the `-a` parameter: ```js DAEMON_OPTS="-a :80 \ -T localhost:6082 \ -f /etc/varnish/default.vcl \ -S /etc/varnish/secret \ -s malloc,256m" ``` Save your changes to the Varnish configuration file and exit the text editor. ## Modify default.vcl This section discusses how to provide minimal configuration so Varnish returns HTTP response headers. This enables you to verify Varnish works before you configure Magento to use Varnish. 1. Back up default.vcl: ```bash cp /etc/varnish/default.vcl /etc/varnish/default.vcl.bak ``` Open `/etc/varnish/default.vcl` in a text editor and locate the `backend default` and replace the value of .host with the fully qualified hostname or IP address and listen port of the Varnish backend or origin server; that is, the server providing the content Varnish will accelerate. Replace the value of .port with the web server’s listen port: ```js backend default { .host = "my.domain.com"; .port = "8080"; } ``` Save your changes to default.vcl and exit the text editor and restart Varnish: ```bash service varnish restart ``` __Unfortunatly__ this did not change the Port Varnish was running on: ```bash netstat -tulpn | grep varnish tcp 0 0 0.0.0.0:6081 0.0.0.0:* LISTEN 1634/varnishd tcp 0 0 127.0.0.1:6082 0.0.0.0:* LISTEN 1634/varnishd tcp6 0 0 :::6081 :::* LISTEN 1634/varnishd ``` [Following this guide](https://serverfault.com/questions/824389/varnish-daemon-not-listening-on-configured-port/824399), applying changes to the default service is best done by creating a new file `/etc/systemd/system/varnish.service.d/customexec.conf`: ```bash # create the drop in directory mkdir /etc/systemd/system/varnish.service.d # create the drop in file. The name is irrelevant, as long as it ends in .conf nano /etc/systemd/system/varnish.service.d/customexec.conf ``` Here you only need to add the settings you want do change, everything else will be loaded from the default definition file. ```bash [Service] ExecStart=/usr/sbin/varnishd -j unix,user=vcache -F -a :80 -T localhost:6082 -f /etc/varnish/default.vcl -S /etc/varnish/secret -s malloc,256m ``` Afterwards, tell systemctl to reload it's config files and to restart the service ```bash systemctl daemon-reload service varnish restart ``` Mow I received an error message: ```bash service varnish restart Failed to restart varnish.service: Unit varnish.service has a bad unit file setting. See system logs and 'systemctl status varnish.service' for details. systemctl status varnish.service Warning: The unit file, source configuration file or drop-ins of varnish.service changed on disk● varnish.service - Varnish HTTP accelerator Loaded: bad-setting (Reason: Unit varnish.service has a bad unit file setting.) Drop-In: /etc/systemd/system/varnish.service.d └─customexec.conf Active: active (running) since Fri 2020-02-07 11:10:15 CET; 33min ago Docs: https://www.varnish-cache.org/docs/6.1/ man:varnishd Main PID: 1634 (varnishd) Tasks: 217 (limit: 4915) Memory: 71.4M CGroup: /system.slice/varnish.service ├─1634 /usr/sbin/varnishd -j unix,user=vcache -F -a :6081 -T localhost:6082 -f /etc/v └─1646 /usr/sbin/varnishd -j unix,user=vcache -F -a :6081 -T localhost:6082 -f /etc/v Feb 07 11:10:15 Magento2 systemd[1]: Started Varnish HTTP accelerator. Feb 07 11:10:16 Magento2 varnishd[1634]: Debug: Version: varnish-6.1.1 revision efc2f6c1536cf227Feb 07 11:10:16 Magento2 varnishd[1634]: Debug: Platform: Linux,4.19.0-6-amd64,x86_64,-junix,-smFeb 07 11:10:16 Magento2 varnishd[1634]: Version: varnish-6.1.1 revision efc2f6c1536cf2272e471f5Feb 07 11:10:16 Magento2 varnishd[1634]: Platform: Linux,4.19.0-6-amd64,x86_64,-junix,-smalloc,-Feb 07 11:10:16 Magento2 varnishd[1634]: Debug: Child (1646) Started Feb 07 11:10:16 Magento2 varnishd[1634]: Child (1646) Started Feb 07 11:10:16 Magento2 varnishd[1634]: Info: Child (1646) said Child starts Feb 07 11:10:16 Magento2 varnishd[1634]: Child (1646) said Child starts Feb 07 11:43:32 Magento2 systemd[1]: varnish.service: Service has more than one ExecStart= setti ``` The duplicated `ExecStart` line is in `/lib/systemd/system/varnish.service`: ``` cat /lib/systemd/system/varnish.service [Unit] Description=Varnish HTTP accelerator Documentation=https://www.varnish-cache.org/docs/6.1/ man:varnishd [Service] Type=simple LimitNOFILE=131072 LimitMEMLOCK=82000 ExecStart=/usr/sbin/varnishd -j unix,user=vcache -F -a :6081 -T localhost:6082 -f /etc/varnish/default.vcl -S /etc/varnish/secret -s malloc,256m ExecReload=/usr/share/varnish/varnishreload ProtectSystem=full ProtectHome=true PrivateTmp=true PrivateDevices=true [Install] WantedBy=multi-user.target ``` Note that the drop-in should have an empty `ExecStart=` to prevent this problem: ```bash [Service] ExecStart= ExecStart=/usr/sbin/varnishd -j unix,user=vcache -F -a :80 -T localhost:6082 -f /etc/varnish/default.vcl -S /etc/varnish/secret -s malloc,256m ``` Varnish is now running on port 80: ```bash netstat -tulpn | grep varnish tcp 0 0 0.0.0.0:80 0.0.0.0:* LISTEN 2972/varnishd tcp 0 0 127.0.0.1:6082 0.0.0.0:* LISTEN 2972/varnishd tcp6 0 0 :::80 :::* LISTEN 2972/varnishd ``` And I am able to access the Magento store again on port 80. You can check that your shop is served by Varnish by querying the HTTP header of your page: ```bash curl -I http://my.domain.com/ HTTP/1.1 200 OK Server: nginx/1.14.2 Date: Fri, 07 Feb 2020 12:46:23 GMT Content-Type: text/html; charset=UTF-8 Set-Cookie: X-Magento-Vary=6b1086e51dc44ada73095007287c835c2e8a8cb2; expires=Fri, 07-Feb-2020 13:46:23 GMT; Max-Age=3600; path=/; HttpOnly X-Magento-Cache-Control: max-age=86400, public, s-maxage=86400 X-Magento-Cache-Debug: MISS X-Magento-Tags: store,cms_b,FPC Pragma: no-cache Cache-Control: max-age=0, must-revalidate, no-cache, no-store Expires: Thu, 07 Feb 2019 12:46:23 GMT X-Content-Type-Options: nosniff X-XSS-Protection: 1; mode=block X-Frame-Options: SAMEORIGIN Content-Encoding: gzip Vary: Accept-Encoding X-Varnish: 98400 Age: 0 Via: 1.1 varnish (Varnish/6.1) Connection: keep-alive ``` > Before you can look at headers, you must set Magento for developer mode. You can also use the [magento deploy:mode:set](https://devdocs.magento.com/guides/v2.3/config-guide/cli/config-cli-subcommands-mode.html#change-to-developer-mode) command. Make sure Varnish is running then enter the following command on the Varnish server and click on a link on your website: ```bash varnishlog ``` ## Configure Magento to use Varnish 1. Log in to the Magento Admin as an administrator. 2. Click __Stores__ > __Configuration__ > __Advanced__ > __System__ > __Full Page Cache__. 3. From the Caching Application list, click Varnish Caching. 4. Enter a value in the TTL for public content field. 5. Expand Varnish Configuration and enter the following information: | Field | Description | | -- | -- | | Access list | Enter the fully qualified hostname, IP address, or Classless Inter-Domain Routing (CIDR) notation IP address range for which to invalidate content. [More information](https://www.varnish-cache.org/docs/3.0/tutorial/purging.html) | | Backend host | Enter the fully qualified hostname or IP address and listen port of the Varnish backend or origin server; that is, the server providing the content Varnish will accelerate. Typically, this is your web server. [More information](https://www.varnish-cache.org/docs/trunk/users-guide/vcl-backends.html) | | Backend port | Origin server's listen port. | | Grace period | The grace period determines how long Varnish serves stale content if the backend is not responsive. The default value is 300 seconds. | ![Magento 2 with Varnish 6](./Magento_Varnish_01.png) 6. Click Save Config. ## Export a Varnish Configuration File 1. Click one of the export button to create a `varnish.vcl` you can use with Varnish. ![Magento 2 with Varnish 6](./Magento_Varnish_02.png) 2. Upload the file to `/etc/varnish/varnish.vcl` 3. It is recommended to change the value of `acl purge` to the IP address of the Varnish host. ```bash acl purge { "localhost"; } ``` 4. Edit `nano /etc/default/varnish` to add the new configuration file: ```js DAEMON_OPTS="-a :80 \ -T localhost:6082 \ -f /etc/varnish/varnish.vcl \ -S /etc/varnish/secret \ -s malloc,256m" ``` 5. Static files should not be cached by default, but if you want to cache them, you can edit the section `Static files caching` in the VCL to have the following content: ```bash # Static files should not be cached by default return (pass); # But if you use a few locales and don't use CDN you can enable caching static files by commenting previous line (#return (pass);) and uncommenting next 3 lines #unset req.http.Https; #unset req.http./* */; #unset req.http.Cookie; ``` 6. And reload Varnish and NGINX: ```bash systemctl daemon-reload service varnish restart service nginx restart ``` 7. Now that you’re using the `default.vcl` generated for you by Magento, you can perform some final verifications to make sure Varnish is working. ```bash curl -I -v --location-trusted 'http://my.domain.com' ``` ## The Varnishing of the TopMenu After switching to Varnish I noticed that my _topmenu_ module was no longer loading. Checking the page source showed me that the [Edge Side Include](https://varnish-cache.org/docs/6.0/users-guide/esi.html) was failing: ![Varnish 6 on Magento 2](./Varnish_ESI_Problems_01.png) The [problem in my case](https://github.com/magento/magento2/issues/3421) was a `ttl="3600"` parameter given in the menu component that has to be deleted: ```xml <?xml version="1.0"?> <page xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:View/Layout/etc/page_configuration.xsd"> <body> <referenceContainer name="div.sidebar.main"> <block class="TemplateMonster\Megamenu\Block\Html\Topmenu" name="catalog.sidebarnav" template="TemplateMonster_Megamenu::html/topmenu.phtml" ifconfig="megamenu/config/megamenu_general_show_left" before="-" ttl="3600" /> </referenceContainer> <referenceBlock name="catalog.topnav" template="TemplateMonster_Megamenu::html/topmenu.phtml" /> </body> ``` I found this block in `themeXXX\app\code\TemplateMonster\Megamenu\view\frontend\layout\default.xml` in the template source. And on the server inside the magento installation dir `/vendor/magento/module-theme/view/frontend/layout/default.xml`. There might also be issues with [HTTPS on the backend side](https://support.magento.com/hc/en-us/articles/360028757791-Top-navigation-panel-does-not-load-on-storefront): ```bash nano /etc/default/varnish ``` In the DAEMON_OPTS variable, add -p feature=+esi_ignore_https, -p feature=+esi_ignore_other_elements, -p feature=+esi_disable_xml_check. This would look like: ```js DAEMON_OPTS="-a :80 \ -p feature=+esi_ignore_other_elements \ -p feature=+esi_disable_xml_check \ -p feature=+esi_ignore_https \ -T localhost:6082 \ -f /etc/varnish/varnish.vcl \ -S /etc/varnish/secret \ -s malloc,256m" ``` When you change this, you need to run `service varnish restart` for the changes to take effect.<file_sep>--- date: "2016-06-01" title: "Node/Express with MongoDB" categories: - Javascript - Node - Databases --- ![Hongkong](./photo-34445934842_9cbfb7dfcb_o.jpg) [Github Repository](github.com/mpolinowski/node_express_git) > This code is part of a training in web development with [Node.js](https://nodejs.org/en/). [EJS](http://ejs.co) will be used as template engine for rendering HTML out of [Express](https://expressjs.com). The library application will use [MongoDB](https://www.mongodb.com) to store information about books and authors - but will also employ the [GoodReads API](https://www.goodreads.com/api) to provide more details on each. [Passport.js](http://www.passportjs.org) is used for local security. * This is Part I and will guide us through the basic [Node.js](https://nodejs.org/en/) and [Express](https://expressjs.com) setup * In [Part II](/node-express-mongodb-part-ii/) we will add [Bower.js](https://bower.io/) and the [Gulp.js](https://gulpjs.com/) taskrunner to manage our style dependencies and auto-restart our development environment using [Nodemon.js](https://nodemon.io) * In [Part III](/node-express-mongodb-part-iii/) will add the magic of the [EJS](http://ejs.co) templating engine * [Part IV](/node-express-mongodb-part-iv/) deals with advanced Express routing options * [Part V](/node-express-mongodb-part-v/) deals with [Passport.js](http://www.passportjs.org) user authentication and [MongoDB](https://www.mongodb.com) <!-- TOC --> - [01 Install Node.js and Express.js to serve our Web Application](#01-install-nodejs-and-expressjs-to-serve-our-web-application) - [02 Add Start Script](#02-add-start-script) - [03 Add Routing (Hello World)](#03-add-routing-hello-world) - [04 Serve Static Files](#04-serve-static-files) - [05 Add Bower to the Project](#05-add-bower-to-the-project) - [06 Add Gulp to the Project](#06-add-gulp-to-the-project) - [Inject Bower Dependencies with Wiredep](#inject-bower-dependencies-with-wiredep) - [Inject with Gulp-Inject](#inject-with-gulp-inject) - [Auto-restart with Nodemon](#auto-restart-with-nodemon) - [07 Add a Templating Engine - EJS](#07-add-a-templating-engine---ejs) - [08 Adding a Page Navigation with Routing](#08-adding-a-page-navigation-with-routing) - [09 Adding a Router for the Listview of our Book Page](#09-adding-a-router-for-the-listview-of-our-book-page) - [Adding a Route to Render](#adding-a-route-to-render) - [Cleaning up the App File with Routers](#cleaning-up-the-app-file-with-routers) - [10 Creating a Single Book by ID Route & View](#10-creating-a-single-book-by-id-route--view) - [11 Cleaning up our routes by creating a variable for the NAV element](#11-cleaning-up-our-routes-by-creating-a-variable-for-the-nav-element) - [12 Adding MongoDB](#12-adding-mongodb) - [Download and Install MongoDB](#download-and-install-mongodb) - [Create adminRoutes to populate the Database](#create-adminroutes-to-populate-the-database) - [14 Use the MongoDB Response](#14-use-the-mongodb-response) - [Select Many](#select-many) - [Select One](#select-one) - [15 Creating a SignIn Form on Index.ejs](#15-creating-a-signin-form-on-indexejs) - [16 Creating the Authentication Route](#16-creating-the-authentication-route) - [17 Adding Passport.js Middleware](#17-adding-passportjs-middleware) - [18 Authentication with Local Strategy](#18-authentication-with-local-strategy) - [19 Saving the User to MongoDB](#19-saving-the-user-to-mongodb) - [20 User SignIn from mongoDB](#20-user-signin-from-mongodb) - [21 Verifying User in DB](#21-verifying-user-in-db) <!-- /TOC --> ## 01 Install Node.js and Express.js to serve our Web Application First install [Node.js](https://nodejs.org/en/download/) and initialize the project with npm init. Then npm install express --save to the app directory. Create a _app.js_ and run the Express webserver on a specified port: ```javascript var express =require('express'); var app = express(): var port = 3000; app.listen(port, function(err){ console.log('running server on port' + port); }); ``` Running the app with node app.js should give you the console log that the webserver is up an running on the specified port. ## 02 Add Start Script Now we will add a npm script to the _package.json_ file to start our app: ```json "name": "node-express", "version": "1.0.0", "description": "Library App", "main": "app.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1", "start": "node app.js" ``` The line _"start": "node app.js"_ allows us to use the npm start command instead of having to define our starting point like before - _node app.js_ ## 03 Add Routing (Hello World) When accessing the home route (http://localhost:3000/), we want to send a Hello World, to test our routing. Then we add another route - /books to the _app.js_ file: ```javascript var express =require('express'); var app = express(): var port = 3000; app.get('/', function(req, res){ res.send('Hello World') }); app.get('/books', function(req, res){ res.send('Hello World from the books route') }); app.listen(port, function(err){ console.log('running server on port' + port); }); ``` ## 04 Serve Static Files We first add to new folders to our project - public/css & public/js and a src/views folder. We download a free Bootstrap theme from a page like bootstrapzero.com and put the css/js files into the public folder. The html file has to be located in the views folder. The Express middleware is used to serve the content of our public folder, by adding the line app.use(express.static('public')); to the app.js file. The static html file - index.html - from our template, will be served by app.use(express.static('src/views'));. ```javascript var express =require('express'); var app = express(); var port = 3000; app.use(express.static('public')); app.use(express.static('src/views')); app.get('/', function(req, res){ res.send('Hello World') }); app.get('/books', function(req, res){ res.send('Hello World from the books route') }); app.listen(port, function(err){ console.log('running server on port' + port); }); ``` Through the public route, we are now able to access the css/js files by typing in e.g. http://localhost:3000/css/styles.css into our browser (the bootstrap components of the template will not be used - we Bower to add them later - __DELETE THOSE FILES FOR [Next Part](/node-express-mongodb-part-ii/)__. The index.html is accessible by http://localhost:3000/index.html. ## 05 Add Bower to the Project First install Bower globally with _npm install bower -g_. Then do a bower init to the app directory (creation of bower.json). We now add a new file to tell Bower to install directly into our public directory: __.bowerrc__ ```json "directory": "public/lib" ``` Next we bower install bootstrap font-awesome --save to get the latest stable version of the framework (add bower_components bootstrap + jquery). They will be installed to the lib directory in our public folder. The bootstrap/jquery/font-awesome files can now be added to the template index.html by linking e.g. ## 06 Add Gulp to the Project ### Inject Bower Dependencies with Wiredep First install Gulp with npm install -g gulp globally. Then install it to the app directory via _npm install --save-dev gulp_ (as a development dependency). We now want to inject dependencies (css,js) to our views automatically with wiredep - _npm install --save-dev wiredep_. We now add a new file to tell Gulp what to do - ignore node_modules only use files from the src directory, add dependencies with wiredep. __gulpfile.js__ ```javascript var gulp = require('gulp'); var jsFiles = ['*.js', 'src/**/*.js']; gulp.task('inject', function() { var wiredep = require('wiredep').stream; /* Use wiredep to inject css/js dependencies to views e.g. bootstrap */ var options = { bowerJson: require('./bower.json'), /* Tell wiredep to check dependencies from the bower.json file e.g. bootstrap */ directory: './public/lib', /* Tell wiredep to find dependencies in the lib directory. It will search for the json file - e.g. ./public/lib/bootstrap/.bower.json */ ignorePath: '../../public' /* The path to the css/js files has to be given relative to the public folder - e.g. (../../public/)/lib/bootstrap/dist/css/bootstrap.min.css*/ }; return gulp.src('./src/views/*.html') .pipe(wiredep(options)) .pipe(gulp.dest('./src/views')); }); ``` Bootstrap 3 now uses LESS - we have to override the defaults to grab the CSS files instead and add them to our index.html. The main overrides can be added to the global bower.json file. This way the bower.json file inside public/lib/bootstrap and public/lib/font-awesome will be ignored. __bower.json__ ```json { "name": "node-express", "description": "node express test", "main": "app.js", "authors": [ "[object Object]" ], "license": "MIT", "homepage": "", "ignore": [ "**/.*", "node_modules", "bower_components", "test", "tests" ], "dependencies": { "bootstrap": "^3.3.6", "font-awesome": "^4.6.1" }, "overrides": { "bootstrap": { "main": [ "dist/js/bootstrap.js", "dist/css/bootstrap.min.css", "dist/less/bootstrap.less" ] }, "font-awesome": { "main": [ "less/font-awesome.less", "css/font-awesome.min.css", "scss/font-awesome.scss" ] } } } ``` __index.html__ We now have to add and to our index.html template to inject the Bower css/js dependencies, when the command gulp inject is run. ```html <!DOCTYPE html> <html lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=UTF-8"> <meta charset="utf-8"> <title>LibraryApp</title> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> <!--bower:css--> <link rel="stylesheet" href="/lib/bootstrap/dist/css/bootstrap.min.css" /> <!-- Will be automatically injected with the command "gulp inject" --> <link rel="stylesheet" href="/lib/font-awesome/css/font-awesome.min.css" /> <!-- Will be automatically injected with the command "gulp inject" --> <!--endbower--> <!-- bower:js --> <script src="/lib/jquery/dist/jquery.js"></script> <!-- Will be automatically injected with the command "gulp inject" --> <script src="/lib/bootstrap/dist/js/bootstrap.js"></script> <!-- Will be automatically injected with the command "gulp inject" --> <!-- endbower --> </head> ``` ### Inject with Gulp-Inject After injecting the Bower dependencies, we now have to inject our ccs and js files from the public folder. We will use Gulp-Inject to perform this task. First do a npm install --save-dev gulp inject, to install Gulp-Inject as a development dependency. We now add Gulp-Inject to our gulpfile.js: __gulpfile.js__ ```javascript var gulp = require('gulp'); var jsFiles = ['*.js', 'src/**/*.js']; gulp.task('inject', function() { var wiredep = require('wiredep').stream; var inject = require('gulp-inject'); /* Use gulp-inject to inject our personal css/js dependencies to views */ var injectSrc = gulp.src(['./public/css/*.css', /* Tell gulp-inject where our personal css/js dependencies are located */ './public/js/*.js' ], { read: false /* We only need the path not content */ }); var injectOptions = { ignorePath: '/public' /* Tell gulp-inject to use a path relative to /public */ }; var options = { bowerJson: require('./bower.json'), directory: './public/lib', ignorePath: '../../public' }; return gulp.src('./src/views/*.html') .pipe(wiredep(options)) .pipe(inject(injectSrc, injectOptions)) /* Use gulp-inject to inject our personal css/js dependencies to views */ .pipe(gulp.dest('./src/views')); }); ``` We now have to add and to our index.html template to inject our css/js dependencies, when the command gulp inject is run. ```html <!DOCTYPE html> <html lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=UTF-8"> <meta charset="utf-8"> <title>LibraryApp</title> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> <!--bower:css--> <link rel="stylesheet" href="/lib/bootstrap/dist/css/bootstrap.min.css" /> <!-- Will be automatically injected with the command "gulp inject" --> <link rel="stylesheet" href="/lib/font-awesome/css/font-awesome.min.css" /> <!-- Will be automatically injected with the command "gulp inject" --> <!--endbower--> <!-- bower:js --> <script src="/lib/jquery/dist/jquery.js"></script> <!-- Will be automatically injected with the command "gulp inject" --> <script src="/lib/bootstrap/dist/js/bootstrap.js"></script> <!-- Will be automatically injected with the command "gulp inject" --> <!-- endbower --> <!-- inject:css--> <link rel="stylesheet" href="/css/styles.css"> <!-- Will be automatically injected with the command "gulp inject" --> <!-- endinject--> <!--inject:js--> <script src="/js/default.js"></script> <!-- Will be automatically injected with the command "gulp inject" --> <!--endinject--> <!--[if lt IE 9]> <script src="//html5shim.googlecode.com/svn/trunk/html5.js"></script> <![endif]--> </head> ``` ### Auto-restart with Nodemon We now add Nodemon to monitor our node.js app - Nodemon will automatically restart the server when a change was detected. To install Nodemon type _npm install --save-dev nodemon_. We now add Nodemon to our gulpfile.js: __gulpfile.js__ ```javascript var gulp = require('gulp'); var nodemon = require('gulp-nodemon'); /* Add nodemon to automatically restart the server, when a change was detected */ var jsFiles = ['*.js', 'src/**/*.js']; gulp.task('inject', function() { var wiredep = require('wiredep').stream; var inject = require('gulp-inject'); var injectSrc = gulp.src(['./public/css/*.css', './public/js/*.js' ], { read: false }); var injectOptions = { ignorePath: '/public' }; var options = { bowerJson: require('./bower.json'), directory: './public/lib', ignorePath: '../../public' }; return gulp.src('./src/views/*.html') .pipe(wiredep(options)) .pipe(inject(injectSrc, injectOptions)) .pipe(gulp.dest('./src/views')); }); gulp.task('serve', ['inject'], function() { /* Create a 'serve' task to automatically execute the 'inject' function above on start-up */ var options = { /* In the line above we used an Object for the 'inject' function - here you can add more functions to be executed */ script: 'app.js', /* 'serve' starts our app.js on 'PORT' and nodemon restarts it when 'jsFiles' are changed */ delayTime: 1, env: { 'PORT': 8080 /* Environment variables e.g. database connection strings */ }, watch: jsFiles }; return nodemon(options) .on('restart', function(ev) { console.log('Restarting...'); }); }); ``` ## 07 Add a Templating Engine - EJS EJS combines data and a template to produce HTML. JavaScript between <% %> is executed. JavaScript between <%= %> adds strings to your HTML and <%- %> can contain HTML formated content. To add our templating engine we first have to install it with npm install --save ejs. Now we add the engine to our app.js file: __app.js__ ```javascript var express =require('express'); var app = express(); var port = process.env.PORT || 3000; /* 'gulp serve' uses PORT 8080 - if no port is defined by the environment use port 3000 */ app.use(express.static('public')); app.set('views', './src/views'); app.set('view engine', 'ejs'); /* Templating Engine is set to EJS */ app.get('/', function(req, res){ res.render('index', {title: 'Rendered Title', list: ['a', 'b']}); /* This content will be displayed in the index.ejs file we´ll create next */ }); app.get('/books', function(req, res){ res.send('Hello World from the books route') }); app.listen(port, function(err){ console.log('running server on port' + port); }); ``` Now we create a simple index.ejs file in our src/views directory: __index.ejs__ ```html <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title><%= title %></title> </head> <body> <h1><%= title %></h1> <ul> <%= for(var i=0; i<list.length; i++) { %> <li><%= list[i] %></li> <%= } %> </ul> </body> </html> ``` Open http://localhost:8080/ to check the result - EJS should fill out the title and create the unordered list with the items a and b. Now we will take the code from our template index.html code and copy it to index.ejs. EJS will later be used to display a list view of books in our library app. ## 08 Adding a Page Navigation with Routing We want to add two routes to our navigation bar - one for authors and one for books. In the final version of the library app, this will display all books either by their author or book title. We will create those routes in the app.js file and add the navigation to our navbar using EJS. __app.js__ ```javascript var express =require('express'); var app = express(); var port = process.env.PORT || 3000; app.use(express.static('public')); app.set('views', './src/views'); app.set('view engine', 'ejs'); app.get('/', function(req, res){ res.render('index', { title: 'Home', list: [{Link: '/Books', Text: 'Books'}, /* We change the list from before to a nav element */ {Link: '/Authors', Text: 'Authors'}] }); }); app.get('/books', function(req, res){ res.send('Hello World from the books route') }); app.listen(port, function(err){ console.log('running server on port' + port); }); ``` __index.ejs__ ```html <header> <nav class="navbar navbar-inverse navbar-fixed-top" role="banner"> <div class="container-fluid"> <div class="navbar-header"> <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a href="/" class="navbar-brand"> <%= title %> <!-- Adding nav element from app.js --> </a> </div> <div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1"> <ul class="nav navbar-nav navbar-right"> <% for(var i=0; i<nav.length;i++){%> <!-- Adding nav element from app.js --> <li> <a href="<%=nav[i].Link%>"> <!-- Adding nav element from app.js --> <%= nav[i].Text %> <!-- Adding nav element from app.js --> </a> </li> <%}%> </ul> </div> </div> </nav> </header> ``` ## 09 Adding a Router for the Listview of our Book Page ### Adding a Route to Render We want to group all routes for the Book pages under one Router - later we will simply export this router from a separate file to app.js. __app.js__ ```javascript var express =require('express'); var app = express(); var port = process.env.PORT || 3000; var bookRouter = express.Router(); /* Creating a Router for all Book Routes */ app.use(express.static('public')); app.set('views', './src/views'); app.set('view engine', 'ejs'); bookRouter.route('/') /* When you go to /Books you will get the response 'Hello Books' */ .get(function(req, res) { res.send('Hello Books') }); bookRouter.route('/Single') /* When you go to /Books/Single you will get the response 'Hello Single Books' */ .get(function(req, res) { res.send('Hello Single Books') }); app.use('/Books', bookRouter); /* bookRouter will be used once you go to /Books*/ app.get('/', function(req, res){ res.render('index', { title: 'Home', list: [{Link: '/Books', Text: 'Books'}, {Link: '/Authors', Text: 'Authors'}] }); }); app.get('/books', function(req, res){ res.send('Hello World from the books route') }); app.listen(port, function(err){ console.log('running server on port' + port); }); ``` bookRouter now sends us a string 'Hello Books' or 'Hello Single Books' when we go to http://localhost:8080/Books or http://localhost:8080/Books/Single . We now want to render different views when we access those URLs. __app.js__ ```javascript var express =require('express'); var app = express(); var port = process.env.PORT || 3000; var bookRouter = express.Router(); app.use(express.static('public')); app.set('views', './src/views'); /* The render function requires an EJS file here to render */ app.set('view engine', 'ejs'); bookRouter.route('/') .get(function(req, res) { res.render('bookListView', { /* We change res.send to res.render. Since we set views to ../src/views, the router will search for a bookListView.ejs in this directory to render */ title: 'Home', /* We have to add nav since it is displayed on every view - we will export it later */ list: [{Link: '/Books', Text: 'Books'}, {Link: '/Authors', Text: 'Authors'}] }); }); bookRouter.route('/Single') .get(function(req, res) { res.send('Hello Single Books') }); app.use('/Books', bookRouter); app.get('/', function(req, res){ res.render('index', { title: 'Home', list: [{Link: '/Books', Text: 'Books'}, {Link: '/Authors', Text: 'Authors'}] }); }); app.get('/books', function(req, res){ res.send('Hello World from the books route') }); app.listen(port, function(err){ console.log('running server on port' + port); }); ``` You can copy the index.ejs file and rename the copy to bookListView.ejs - this file will now be rendered, when you access http://localhost:8080/Books . Adding some Books to the Book View We now have a view that is rendered when we access the Books view. Now we want to use EJS to populate the view with some books. Later, those books will be added from MongoDB. Now we just hardcode some books into app.js to prove the concept: __app.js__ ```javascript var express =require('express'); var app = express(); var port = process.env.PORT || 3000; var bookRouter = express.Router(); app.use(express.static('public')); app.set('views', './src/views'); app.set('view engine', 'ejs'); var books = [{ /* Just some hardcoded books for now - later we will use MongoDB */ title: 'Cryptonomicon', author: '<NAME>', read: true }, { title: 'Leviathan Wakes', author: '<NAME>', read: false }, { title: 'The Lord of the Rings', author: '<NAME>', read: true }, { title: 'Norwegian Wood', author: '<NAME>', read: false }, { title: 'Microserfs', author: '<NAME>', read: true }, { title: 'Up Country', author: '<NAME>', read: true }, { title: 'Night over Water', author: '<NAME>', read: true }, { title: 'The Stand', author: '<NAME>', read: true }]; bookRouter.route('/') .get(function(req, res) { res.render('bookListView', { title: 'Home', list: [{Link: '/Books', Text: 'Books'}, {Link: '/Authors', Text: 'Authors'}] books: books /* passing in the book array from above - so it will be available for rendering */ }); }); bookRouter.route('/Single') .get(function(req, res) { res.send('Hello Single Books') }); app.use('/Books', bookRouter); app.get('/', function(req, res){ res.render('index', { title: 'Home', list: [{Link: '/Books', Text: 'Books'}, {Link: '/Authors', Text: 'Authors'}] }); }); app.get('/books', function(req, res){ res.send('Hello World from the books route') }); app.listen(port, function(err){ console.log('running server on port' + port); }); ``` Now we can modify our bookListview to add those books via EJS: __ookListView.ejs__ ```html <section class="container" style="margin-bottom: 400px;"> <div class="row"> <% for(var i=0; i<books.length;i++){ %> <!-- Not <%= ... %> with the EQUAL sign it will not be executed --> <div class="col-xs-6 col-md-4 col-lg-3 center-block" style="margin-bottom: 10px;"> <div class="panel panel-default"> <div class="panel-heading"> <h4><%= books[i].title %></h4> </div> <div class="panel-body"> <div class="col-xs-12 col-sm-4 col-lg-6"> <a class="story-title" href="/Books/<%=books[i]._id%>"><img alt="" src="<%=books[i].cover%>" style="height:100px" class="img-thumbnail"></a> </div> <div class="col-xs-12 col-sm-8 col-lg-6"> <p><span class="label label-default"><strong><%= books[i].author %></strong></span></p> <p><span style="font-family:courier,'new courier';" class="text-muted"><a href="/Books/<%= i %>" class="text-muted">Read More</a></span></p> <!-- The link to the detailed single book view will be /Books/[i] - we later change this to /Books/:id --> </div> </div> </div> </div> <% } %> <!-- Not <%= } %> with the EQUAL sign it will not be executed --> </div> <hr> </section> ``` When you access http://localhost:8080/Books you will see the nav bar from before, as well as a list of our books. ### Cleaning up the App File with Routers Remove routes from the app.js file - We create a file bookRoutes.js under src/routes, cut bookRoutes from app.js and simply require bookRouter instead: __bookRoutes.js__ ```javascript var express = require('express'); var bookRouter = express.Router(); var books = [ { title: 'War and Peace', genre: 'Historical Fiction', author: '<NAME>', read: false }, { title: 'Les Misérables', genre: 'Historical Fiction', author: '<NAME>', read: false }, { title: 'The Time Machine', genre: 'Science Fiction', author: '<NAME>', read: false }, { title: 'A Journey into the Center of the Earth', genre: 'Science Fiction', author: '<NAME>', read: false }, { title: 'The Dark World', genre: 'Fantasy', author: '<NAME>', read: false }, { title: 'The Wind in the Willows', genre: 'Fantasy', author: '<NAME>', read: false }, { title: 'Life On The Mississippi', genre: 'History', author: '<NAME>', read: false }, { title: 'Childhood', genre: 'Biography', author: '<NAME>', read: false } ]; bookRouter.route('/') /* route accessed via /Books - bookListView.ejs will be rendered and populated with title, nav and books */ .get(function (req, res) { res.render('bookListView', { title: 'Books', nav: [{ Link: '/Books', Text: 'Books' }, { Link: '/Authors', Text: 'Authors' }], books: books }); }); } module.exports = bookRouter; /* the bookRouter has to be exported to be available for require in app.js */ ``` __app.js__ ```javascript var express = require('express'); var app = express(); var port = process.env.PORT || 5000; var bookRouter = require('./src/routes/bookRoutes'); /* We now require the book routes that we moved to bookRouter.js*/ app.use(express.static('public')); app.set('views', './src/views'); app.set('view engine', 'ejs'); app.use('/Books', bookRouter); /* bookRouter is called here when you access /Books - routes are taken from bookRouter.js */ app.get('/', function (req, res) { res.render('index', { title: 'Books', nav: [{ Link: '/Books', Text: 'Books' }, { Link: '/Authors', Text: 'Authors' }] }); }); app.get('/books', function (req, res) { res.send('Hello Books'); }); app.listen(port, function (err) { console.log('running server on port ' + port); }); ``` ## 10 Creating a Single Book by ID Route & View Now we want to add another route to a detailed view of a single books. The Route should be accessible by /Books/:id (ID of the book inside the hardcoded books object - later we will pull an ID from MongoDB). The view rendered will be bookView.ejs. __bookRoutes.js__ ```javascript var express = require('express'); var bookRouter = express.Router(); var books = [ { title: 'War and Peace', genre: 'Historical Fiction', author: '<NAME>', read: false }, { title: 'Les Misérables', genre: 'Historical Fiction', author: '<NAME>', read: false }, { title: 'The Time Machine', genre: 'Science Fiction', author: '<NAME>', read: false }, { title: 'A Journey into the Center of the Earth', genre: 'Science Fiction', author: '<NAME>', read: false }, { title: 'The Dark World', genre: 'Fantasy', author: '<NAME>', read: false }, { title: 'The Wind in the Willows', genre: 'Fantasy', author: '<NAME>', read: false }, { title: 'Life On The Mississippi', genre: 'History', author: '<NAME>', read: false }, { title: 'Childhood', genre: 'Biography', author: '<NAME>', read: false } ]; bookRouter.route('/') .get(function (req, res) { res.render('bookListView', { title: 'Books', nav: [{ Link: '/Books', Text: 'Books' }, { Link: '/Authors', Text: 'Authors' }] }); }); bookRouter.route('/:id') /* We want to be able to access detailed info about a single book by adding the book ID - /Books/:id */ .get(function (req, res) { var id = req.params.id; /* pass id parameter into URL - will be retrieved from books[id] */ res.render('bookView', { /* We have to create another view for the single book - bookView.ejs */ title: 'Books', nav: [{ Link: '/Books', Text: 'Books' }, { Link: '/Authors', Text: 'Authors' }] book: books[id] }); }); } module.exports = bookRouter; ``` Now we need to write the view to be rendered bookView.ejs (the code below only contains the body part - the header is identical to bookListView.ejs): __bookView.ejs__ ```html <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>Library App</title> </head> <body> <section class="container" style="margin-bottom: 300px;"> <div class="row"> <div class="col-xs-12 center-block"> <div class="panel panel-default"> <div class="panel-heading"> <h4><%= book.title %></h4> </div> <div class="panel-body"> <div class="col-xs-12 col-sm-2 col-lg-1"> <a class="story-title"><img alt="Book Cover" src="<%=book.book.image_url%>" class="img-responsive"></a> </div> <div class="col-xs-12 col-sm-10 col-lg-11"> <h4><span class="label label-default"><strong><%= book.author %></strong></span></h4> </div> </div> </div> </div> </div> </section> </body> </html> ``` ## 11 Cleaning up our routes by creating a variable for the NAV element We created a navbar in all our views and used EJS to inject some navigational elements in there. But we don´t want to have to copy it into every route. We will create a nav element in app.js instead. __app.js__ ```javascript var express = require('express'); var app = express(); var port = process.env.PORT || 5000; var nav = [{ /* We create a NAV element in app.js - this is now available for all routes */ Link: '/Books', Text: 'Book' }, { Link: '/Authors', Text: 'Author' }]; var bookRouter = require('./src/routes/bookRoutes')(nav); /* The NAV element is now automatically passed into bookRouter to be available on all bookRoutes */ app.use(express.static('public')); app.set('views', './src/views'); app.set('view engine', 'ejs'); app.use('/Books', bookRouter); app.get('/', function (req, res) { res.render('index', { title: 'Home', nav: nav /* We no longer have to type in the whole navigation - YEAH!*/ }); }); app.get('/books', function (req, res) { res.send('Hello Books'); }); app.listen(port, function (err) { console.log('running server on port ' + port); }); ``` Now we have to wrap our routes into a router function with NAV as a variable, to make it available to those routes: __bookRoutes.js__ ```javascript var express = require('express'); var bookRouter = express.Router(); var router = function(nav){ /* The router is wrapped into a function with NAV as a variable */ var books = [ { title: 'War and Peace', genre: 'Historical Fiction', author: '<NAME>', read: false }, { title: 'Les Misérables', genre: 'Historical Fiction', author: '<NAME>', read: false }, { title: 'The Time Machine', genre: 'Science Fiction', author: '<NAME>', read: false }, { title: 'A Journey into the Center of the Earth', genre: 'Science Fiction', author: '<NAME>', read: false }, { title: 'The Dark World', genre: 'Fantasy', author: '<NAME>', read: false }, { title: 'The Wind in the Willows', genre: 'Fantasy', author: '<NAME>', read: false }, { title: 'Life On The Mississippi', genre: 'History', author: '<NAME>', read: false }, { title: 'Childhood', genre: 'Biography', author: '<NAME>', read: false } ]; bookRouter.route('/') .get(function (req, res) { res.render('bookListView', { title: 'Books', nav: nav, /* All routes wrapped into router function can now use NAV as a variable */ books: books }); }); bookRouter.route('/:id') .get(function (req, res) { var id = req.params.id; res.render('bookView', { title: 'Books', nav: nav, /* All routes wrapped into router function can now use NAV as a variable */ book: books[id] }); }); return bookRouter; /* bookRouter has now to be returned from our router function */ } module.exports = router; /* We now have to export the router instead of bookRouter - the router function will be executed in app.js with the NAV element to create a router */ ``` ## 12 Adding MongoDB ### Download and Install MongoDB The installer can be downloaded from [MongoDB.com](https://www.mongodb.com) Install the database, navigate to the install directory (e.g. _C:\Program Files\MongoDB\Server\3.2\bin_) with your command line and start the application with the command __mongod__. Make sure that you created a directory C:\data\db before running the mongod process! Useful Commands for MongoDB: | | | |---|---| | __*Command*__ | __*Function*__ | | __mongo__ | Start | | __show dbs__ | Display all Databases | | __mongo libraryApp__ | Open libraryApp Database | | __show collections__ | Show all Collections of the opened Databases | | __db.books.find();__ | Display all Objects inside the books Collection | | __db.books.remove({})__ | Remove all Objects from the books Collection | ### Create adminRoutes to populate the Database First install mongoDB into our project with _npm install --save mongodb_ Now we create a new file adminRoutes.js in the src/routes directory. adminRoutes.js ```javascript var express = require('express'); var adminRouter = express.Router(); var mongodb = require('mongodb').MongoClient; /* Pull in the mongoClient */ var books = [{ /* Copy books from bookRoutes.js */ title: 'Cryptonomicon', isbn10: '0060512806', author: '<NAME>', bookId: '816', cover: 'http://ecx.images-amazon.com/images/I/414L%2BIbzcvL._SX317_BO1,204,203,200_.jpg', read: true }, { title: 'Leviathan Wakes', isbn10: '0316129089', author: '<NAME>', bookId: '9533361', cover: 'http://ecx.images-amazon.com/images/I/51QvTzb2vYL._SX322_BO1,204,203,200_.jpg', read: false }, { title: 'The Lord of the Rings', isbn10: '0395193958', author: '<NAME>', bookId: '569465', cover: 'http://ecx.images-amazon.com/images/I/51eq24cRtRL._SX331_BO1,204,203,200_.jpg', read: true }, { title: 'Norwegian Wood', isbn10: '0375704027', author: '<NAME>', bookId: '11297', cover: 'http://ecx.images-amazon.com/images/I/512ZgaaHjIL._SX322_BO1,204,203,200_.jpg', read: false }, { title: 'Microserfs', isbn10: '0006548598', author: '<NAME>', bookId: '2751', cover: 'http://ecx.images-amazon.com/images/I/512ZD5DVC4L._SX345_BO1,204,203,200_.jpg', read: true }, { title: 'Up Country', isbn10: '0446611913', author: '<NAME>', bookId: '33820', cover: 'http://ecx.images-amazon.com/images/I/512Jrk-RopL._SX290_BO1,204,203,200_.jpg', read: true }, { title: 'Night over Water', isbn10: '0451173139', author: '<NAME>', bookId: '967690', cover: 'http://ecx.images-amazon.com/images/I/51OON2-%2BI-L._SX297_BO1,204,203,200_.jpg', read: true }, { title: 'The Stand', isbn10: '0307947300', author: '<NAME>', bookId: '13155183', cover: 'http://ecx.images-amazon.com/images/I/41IzCMjxPWL._SX320_BO1,204,203,200_.jpg', read: true }]; var router = function (nav) { adminRouter.route('/addBooks') /* open http://localhost:8080/Admin/addBooks to add books to MongoDB */ .get(function (req, res) { var url = 'mongodb://localhost:27017/libraryApp'; /* Connect to our local installation of MongoDB via the default port 27017 - create DB libraryApp on insert */ mongodb.connect(url, function (err, db) { var collection = db.collection('books'); /* Connect to a Collection in libraryApp named books - is created on first insert */ collection.insertMany(books, /* insertMany inserts all Objects from the books variable from above (otherwise insertOne) */ function (err, results) { res.send(results); /* Display the Collection after Insert - Object will be assigned ID by MongoDB*/ db.close(); /* db.close has to be inside the callback (async !)*/ } ); }); }); return adminRouter; }; module.exports = router; ``` Add the /Admin route to App.js and use adminRouter for it __app.js__ ```javascript var express = require('express'); var app = express(); var port = process.env.PORT || 5000; var nav = [{ Link: '/Books', Text: 'Book' }, { Link: '/Authors', Text: 'Author' }]; var bookRouter = require('./src/routes/bookRoutes')(nav); var adminRouter = require('./src/routes/adminRoutes')(nav); /* Add adminRoutes */ app.use(express.static('public')); app.set('views', './src/views'); app.set('view engine', 'ejs'); app.use('/Books', bookRouter); app.use('/Admin', adminRouter); /* Use adminRoutes for /Admin */ app.get('/', function (req, res) { res.render('index', { title: 'Hello from render', nav: [{ Link: '/Books', Text: 'Books' }, { Link: '/Authors', Text: 'Authors' }] }); }); app.get('/books', function (req, res) { res.send('Hello Books'); }); app.listen(port, function (err) { console.log('running server on port ' + port); }); ``` Now make sure mongod is running and access http://localhost:8080/Admin/addBooks - you will get a JSON Object as MongoDB Response. All books will have an ID assigned by the Database and the DB 'libraryApp' and Collection 'books' will be created. Use the mongo commands (List, above) to check. ## 14 Use the MongoDB Response ### Select Many Remove the hardcoded books variable and use the mongoDB response instead. Display all books from the books Collection. (bookListView.ejs) __bookRoutes.js__ ```javascript var express = require('express'); var bookRouter = express.Router(); var mongodb = require('mongodb').MongoClient; var router = function (nav) { /* var books = [...]; has been deleted */ bookRouter.route('/') .get(function (req, res) { var url = 'mongodb://localhost:27017/libraryApp'; mongodb.connect(url, function (err, db) { var collection = db.collection('books'); /* Connect to mongoDBs libraryApp books Collection */ collection.find({}).toArray( /* find all Objects in the books Collection and put it into an Array */ function (err, results) { res.render('bookListView', { /* Copy the res.render from before into the function to render the result of our mongoDB query*/ title: 'Books', nav: nav, books: results }); } ); }); }); return bookRouter; }; module.exports = router; ``` ### Select One Now we want to have a books details page (bookView.ejs) that only displays one book from the books Collection __bookRoutes.js__ ```javascript var express = require('express'); var bookRouter = express.Router(); var mongodb = require('mongodb').MongoClient; var objectId = require('mongodb').ObjectID; /* Each book is assigned an ID by mongoDB - we make this ID available for our bookListView.ejs */ var router = function (nav) { bookRouter.route('/') .get(function (req, res) { var url = 'mongodb://localhost:27017/libraryApp'; mongodb.connect(url, function (err, db) { var collection = db.collection('books'); collection.find({}).toArray( function (err, results) { res.render('bookListView', { title: 'Books', nav: nav, books: results }); } ); }); }); bookRouter.route('/:id') .get(function (req, res) { var id = new objectId(req.params.id); /* We use the mongoDB ID (_id) for id -> URL is now /Books/:_id instead of /Books/:id */ var url = 'mongodb://localhost:27017/libraryApp'; mongodb.connect(url, function (err, db) { var collection = db.collection('books'); collection.findOne({_id: id}, /* findOne returns the first book from the books collection with the same _id */ function (err, results) { res.render('bookView', { /* result will be rendered in bookView.ejs */ title: 'Books', nav: nav, book: results }); } ); }); }); return bookRouter; }; module.exports = router; ``` Now we want to have a books details page (bookView.ejs) that only displays one book from the books Collection __bookListView.ejs__ ```html ... <!-- ################################################ Media ######################################################### --> <section class="container" style="margin-bottom: 400px;"> <div class="row"> <% for(var i=0; i<books.length;i++){%> <div class="col-xs-6 col-md-4 col-lg-3 center-block" style="margin-bottom: 10px;"> <div class="panel panel-default"> <div class="panel-heading"> <h4><%=books[i].title%></h4> </div> <div class="panel-body"> <div class="col-xs-12 col-sm-4 col-lg-6"> <a class="story-title" href="/Books/<%=books[i]._id%>"><img alt="" src="<%=books[i].cover%>" style="height:100px" class="img-thumbnail"></a> </div> <div class="col-xs-12 col-sm-8 col-lg-6"> <p><span class="label label-default"><strong><%=books[i].author%></strong></span></p> <p><span style="font-family:courier,'new courier';" class="text-muted"><a href="/Books/<%=books[i]._id%>" class="text-muted">Read More</a></span></p> <!-- Change URL from /Books/:i (<%= i %> with i = 0,1,2,3....8) to /Books/:_id -> _id will be used to findOne --> </div> </div> </div> </div> <%}%> </div> <hr> </section> ... ``` ## 15 Creating a SignIn Form on Index.ejs Just a simple Input Form __index.ejs__ ```html ... <!-- ################################################ Login ######################################################### --> <div class="col-xs-4 col-xs-offset-1" style="margin-top: 30px;"> <div class="container"> <div class="row"> <div class="col-sm-offset-1 col-sm-2 col-xs-12 text-center"> <form name="signUpForm" action="/auth/signUp" method="post"> <!-- Creating a form to post SignUp to /auth/signUp --> User Name: <input name="userName" id="userName"> <!-- Input userName for post --> <br/> <br/> Password: <input name="<PASSWORD>" id="<PASSWORD>"> <!-- Input password for post --> <br/> <br/> <input type="submit" value="Sign Up"> <!-- Submit post --> </form> </div> </div> <!-- /row --> </div> <!-- /container --> </div> <!-- /v-center --> <!-- ################################################ /Login ######################################################### --> ... ``` ## 16 Creating the Authentication Route We need to add var bodyParser = require('body-parser'); to app.js. The body-parser middleware will be used in app.use(bodyParser.json()); and app.use(bodyParser.urlencoded()); to create a req.body object from JSON elements or URL parameter. Body-parser is install with npm install --save body-parser. __authRoute.js__ ```javascript var express = require('express'); var authRouter = express.Router(); /* Creating the Authentication Router */ var mongodb = require('mongodb').MongoClient; var passport = require('passport'); var router = function () { authRouter.route('/signUp') /* Creating the SingUp route */ .post(function (req, res) { console.log(req.body); /* We log the req.body Object created by bodyParser from the signUp post to /auth/signup */ }); }; return authRouter; /* return authRouter to be available for app.js */ }; module.exports = router; ``` We now add the authRoute to app.js __app.js__ ```javascript var express = require('express'); var bodyParser = require('body-parser'); /* Install bodyParser see above */ var app = express(); var port = process.env.PORT || 5000; var nav = [{ Link: '/Books', Text: 'Book' }, { Link: '/Authors', Text: 'Author' }]; var bookRouter = require('./src/routes/bookRoutes')(nav); var adminRouter = require('./src/routes/adminRoutes')(nav); var authRouter = require('./src/routes/authRoutes')(nav); /* Use the created authRouter for the Authentication routes */ app.use(express.static('public')); app.use(bodyParser.json()); /* Use bodyParser to create req.body Object from JSON elements*/ app.use(bodyParser.urlencoded()); /* Use bodyParser to create req.body Object from URL encoded JSON elements*/ require('./src/config/passport')(app); app.set('views', './src/views'); app.set('view engine', 'ejs'); app.use('/Books', bookRouter); app.use('/Admin', adminRouter); app.use('/Auth', authRouter); /* Use the created authRouter for the Authentication routes */ app.get('/', function (req, res) { res.render('index', { title: 'Hello from render', nav: [{ Link: '/Books', Text: 'Books' }, { Link: '/Authors', Text: 'Authors' }] }); }); app.get('/books', function (req, res) { res.send('Hello Books'); }); app.listen(port, function (err) { console.log('running server on port ' + port); }); ``` ## 17 Adding Passport.js Middleware First we need to _npm install --save cookie-parser passport express-session_. __authRoute.js__ ```javascript var express = require('express'); var bodyParser = require('body-parser'); var cookieParser = require('cookie-parser'); /* To parse the session cookie used by passport */ var passport = require('passport'); /* user authentication */ var session = require('express-session'); /* for passport-session: creates a session for the logged in user. Session stores the user information inside a cookie for the active session */ var app = express(); var port = process.env.PORT || 5000; var nav = [{ Link: '/Books', Text: 'Book' }, { Link: '/Authors', Text: 'Author' }]; var bookRouter = require('./src/routes/bookRoutes')(nav); var adminRouter = require('./src/routes/adminRoutes')(nav); var authRouter = require('./src/routes/authRoutes')(nav); app.use(express.static('public')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded()); app.use(cookieParser()); /* use cookieParser to parse the session cookie */ app.use(session({secret: 'library'})); /* The session needs a secret - can be chosen freely */ require('./src/config/passport')(app); /* We separate the passport stuff src/config/passport.js - we pull in (app) to be able to app.use inside passport.js */ app.set('views', './src/views'); app.set('view engine', 'ejs'); app.use('/Books', bookRouter); app.use('/Admin', adminRouter); app.use('/Auth', authRouter); app.get('/', function (req, res) { res.render('index', { title: 'Hello from render', nav: [{ Link: '/Books', Text: 'Books' }, { Link: '/Authors', Text: 'Authors' }] }); }); app.get('/books', function (req, res) { res.send('Hello Books'); }); app.listen(port, function (err) { console.log('running server on port ' + port); }); ``` Separate the passport component to _src/config/passport.js_. We need to _npm install --save passport-local_ to use the local strategy of authentication (not OAuth). __passport.js__ ```javascript var passport = require('passport'); /* pull in passport */ module.exports = function (app) { app.use(passport.initialize()); app.use(passport.session()); passport.serializeUser(function (user, done) { /* Add User to Session */ done(null, user); /* Callback User from Database */ }); passport.deserializeUser(function (user, done) { /* Remove User from Session */ done(null, user); }); require('./strategies/local.strategy')(); /* We only use a local.strategy for authentication - not passport.google, passport.facebook, etc. - Third-party OAuth. We save the file in src/config/strategies/local.strategy.js */ }; ``` ## 18 Authentication with Local Strategy Now we create the local.strategy.js as required in passport.js: __local.strategy.js__ ```javascript var passport = require('passport'), LocalStrategy = require('passport-local').Strategy, mongodb = require('mongodb').MongoClient; module.exports = function () { passport.use(new LocalStrategy({ usernameField: 'userName', /* take userName from input form in index.ejs when posted to /auth/signUp (bodyParser) */ passwordField: '<PASSWORD>' /* take password from input form in index.ejs when posted to /auth/signUp (bodyParser) */ }, function (username, password, done) { /* Pass username/password - then callBack done */ var user = {username: username, password: <PASSWORD> }; done(null, user); /* Take user and return user - authentication with mongoDB comes next */ })); }; ``` req.login and redirect to Profile __authRoute.js__ ```javascript var express = require('express'); var authRouter = express.Router(); var mongodb = require('mongodb').MongoClient; var router = function () { authRouter.route('/signUp') .post(function (req, res) { console.log(req.body); req.login(req.body, function(){ /* We do not yet save the user to mongoDB - just redirect him to /auth/profile */ res.redirect('/auth/profile'); }); }); authRouter.route('/profile') /* we have to create the profile route */ .get(function(req, res) { /* When GET /profile... */ res.json(req.user); /* ... respond with the JSON Object user */ }); return authRouter; }; module.exports = router; ``` ## 19 Saving the User to MongoDB SignUp save User to MongoDB __authRoute.js__ ```javascript var express = require('express'); var authRouter = express.Router(); var mongodb = require('mongodb').MongoClient; var router = function () { authRouter.route('/signUp') .post(function (req, res) { console.log(req.body); var url = 'mongodb://localhost:27017/libraryApp'; mongodb.connect(url, function (err, db) { /* connect to local install of mongoDB */ var collection = db.collection('users'); /* open users collection that is created on first signUp */ var user = { /* Creation of a user object from req.body */ username: req.body.userName, password: <PASSWORD> }; collection.insert(user, /* the user is automatically inserted into the users collection (collection is automatically created) */ function (err, results) { req.login(results.ops[0], function () { /* user is no longer taken from req.body but from the results ops[0] limits the result to the {username, password, _id} JSON object */ res.redirect('/auth/profile'); }); }); }); }); authRouter.route('/profile') .get(function(req, res) { res.json(req.user); }); return authRouter; }; module.exports = router; ``` ## 20 User SignIn from mongoDB Creating the SignIn Form __index.ejs__ ```javascript <!-- ################################################ Login ######################################################### --> <div class="col-xs-4 col-xs-offset-1" style="margin-top: 30px;"> <div class="container"> <div class="row"> <div class="col-sm-offset-3 col-sm-2 col-xs-12 text-center"> <form name="signInForm" action="/auth/signIn" method="post"> <!-- SignIN --> User Name: <input name="userName" id="userName"> <br/> <br/> Password: <input name="<PASSWORD>" id="<PASSWORD>"> <br/> <br/> <input type="submit" value="Sign In"> </form> </div> <div class="clearfix visible-xs" style="margin-bottom: 20px;"></div> <div class="col-sm-offset-1 col-sm-2 col-xs-12 text-center"> <form name="signUpForm" action="/auth/signUp" method="post"> <!-- SignUp --> User Name: <input name="userName" id="userName"> <br/> <br/> Password: <input name="<PASSWORD>" id="<PASSWORD>"> <br/> <br/> <input type="submit" value="Sign Up"> </form> </div> </div> <!-- /row --> </div> <!-- /container --> </div> <!-- /v-center --> <!-- ################################################ /Login ######################################################### --> ``` SignIn save User to MongoDB __authRoute.js__ ```javascript var express = require('express'); var authRouter = express.Router(); var mongodb = require('mongodb').MongoClient; var passport = require('passport'); /* Pull in passport for signIn */ var router = function () { authRouter.route('/signUp') .post(function (req, res) { console.log(req.body); var url = 'mongodb://localhost:27017/libraryApp'; mongodb.connect(url, function (err, db) { var collection = db.collection('users'); var user = { username: req.body.userName, password: <PASSWORD> }; collection.insert(user, function (err, results) { req.login(results.ops[0], function () { res.redirect('/auth/profile'); }); }); }); }); authRouter.route('/signIn') .post(passport.authenticate('local', { /* user post is authenticated with passport local strategy */ failureRedirect: '/' /* If user did not sign up first - redirect back to home */ }), function (req, res) { res.redirect('/auth/profile'); /* If successfully authenticated go to profile page */ }); authRouter.route('/profile') .all(function (req, res, next) { if (!req.user) { res.redirect('/'); } next(); }) .get(function (req, res) { res.json(req.user); }); return authRouter; }; module.exports = router; ``` ## 21 Verifying User in DB __authRoute.js__ ...<file_sep>--- date: "2019-11-08" title: "Adding an OBS Stream to your Website" categories: - LINUX - Smarthome - IoT - Docker --- ![<NAME>, Cambodia](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Update the NGINX Configuration File](#update-the-nginx-configuration-file) - [RTMP Module Config](#rtmp-module-config) - [HTTP Server Config](#http-server-config) - [Complete nginx.conf](#complete-nginxconf) - [Test your Stream](#test-your-stream) - [From existing rtmp stream already in h264](#from-existing-rtmp-stream-already-in-h264) - [From local webcam](#from-local-webcam) - [Embed within website](#embed-within-website) <!-- /TOC --> > This tutorial starts where the last ended. I setup OBS to re-stream my INSTAR IP cameras RTSP stream to the NGINX RTMP module and tested that I was able to access this RTMP stream with a VLC player. I now want to embed this video stream into a website ([Reference](https://docs.peer5.com/guides/setting-up-hls-live-streaming-server-using-nginx/)). ## Update the NGINX Configuration File ### RTMP Module Config Our current RTMP setup looks like this: ```conf rtmp { server { listen 1935; chunk_size 4096; application live { live on; record off; } } } ``` We will be using stream as our stream name so our endpoint will be: `rtmp://localhost/show/stream` . Which will later be available as `http://localhost:8080/hls/stream.m3u8`. For good HLS experience we recommend using 3 seconds fragments with 60 seconds playlist: ```conf rtmp { server { listen 1935; # Listen on standard RTMP port chunk_size 4096; application show { live on; # Turn on HLS hls on; hls_path /mnt/hls/; hls_fragment 3; hls_playlist_length 60; # disable consuming the stream from nginx as rtmp deny play all; } } } ``` Note that the example points `/mnt/hls/` as the target path for the hls playlist and video files. You can change this to a different directory but make sure that nginx have write permissions: ```bash mkdir /mnt/hls/ chown -R <nginx user>:<nginx user> /mnt/hls/ chmod 777 /mnt/hls ``` ### HTTP Server Config Since HLS consists of static files, a simple http server can be set up with two additions, correct MIME types and CORS headers. ```conf server { listen 8080; location /hls { # Disable cache add_header Cache-Control no-cache; # CORS setup add_header 'Access-Control-Allow-Origin' '*' always; add_header 'Access-Control-Expose-Headers' 'Content-Length'; # allow CORS preflight requests if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '*'; add_header 'Access-Control-Max-Age' 1728000; add_header 'Content-Type' 'text/plain charset=UTF-8'; add_header 'Content-Length' 0; return 204; } types { application/vnd.apple.mpegurl m3u8; video/mp2t ts; } root /mnt/; } } ``` ### Complete nginx.conf The default location for nginx conf is `/usr/local/nginx/conf/nginx.conf` or `/etc/nginx/nginx.conf`: ```conf worker_processes auto; events { worker_connections 1024; } # RTMP configuration rtmp { server { listen 1935; # Listen on standard RTMP port chunk_size 4096; application show { live on; # Turn on HLS hls on; hls_path /mnt/hls/; hls_fragment 3; hls_playlist_length 60; # disable consuming the stream from nginx as rtmp deny play all; } } } http { sendfile off; tcp_nopush on; directio 512; default_type application/octet-stream; server { listen 8080; location / { # Disable cache add_header 'Cache-Control' 'no-cache'; # CORS setup add_header 'Access-Control-Allow-Origin' '*' always; add_header 'Access-Control-Expose-Headers' 'Content-Length'; # allow CORS preflight requests if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '*'; add_header 'Access-Control-Max-Age' 1728000; add_header 'Content-Type' 'text/plain charset=UTF-8'; add_header 'Content-Length' 0; return 204; } types { application/dash+xml mpd; application/vnd.apple.mpegurl m3u8; video/mp2t ts; } root /mnt/; } } } ``` Restart nginx with: ```bash /usr/local/nginx/sbin/nginx -s stop /usr/local/nginx/sbin/nginx -t /usr/local/nginx/sbin/nginx ``` ## Test your Stream ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_01.png) ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_02.png) ## From existing rtmp stream already in h264 if you have an existing rtmp stream in the correct codec, you can skip ffmpeg and tell nginx to pull the stream directly. In order to do so add a [pull directive](https://github.com/arut/nginx-rtmp-module/wiki/Directives#pull) under application section in nginx.conf like so: ```conf application show { live on; pull rtmp://example.com:4567/sports/channel3 live=1; # to change the local stream name use this syntax: ... live=1 name=ch3; # other directives... # hls_... } ``` ## From local webcam To achieve the stream encoding and muxing we will use the almighty ffmpeg. To install ffmpeg using [PPA](https://launchpad.net/~mc3man/+archive/ubuntu/bionic-prop) run these commands ```bash add-apt-repository ppa:mc3man/bionic-prop apt-get update apt-get install ffmpeg ``` ```bash ffmpeg -i rtsp://192.168.2.117/12 -vcodec libx264 -vprofile baseline -acodec aac -strict -2 -f flv rtmp://localhost:1935/show/stream ``` * `-re` - consume stream on media's native bitrate (and not as fast as possible) * `-f` - use video4linux2 plugin * `-i` - select physical device to capture from * `-vcodec` - specify video codec to output * `-vprofile` - use x264 baseline profile * `-acodec` - use aac audio codec * `-strict` - allow using the experimental aac codec * `-f` - specify format to output * `rtmp://localhost/show/stream` - rtmp endpoint to stream to. if the target port is not 1935 is should be included in the uri. The last path component is the stream name - that means that multiple channels can be pushed using different names ```conf rtmp { server { listen 1935; # Listen on standard RTMP port chunk_size 4096; application show { # Once receive stream, transcode for adaptive streaming # This single ffmpeg command takes the input and transforms # the source into 4 different streams with different bitrate # and quality. P.S. The scaling done here respects the aspect # ratio of the input. exec ffmpeg -i rtmp://192.168.2.111/appname/streamname -vcodec libx264 -vprofile baseline -acodec aac -strict -2 -f flv rtmp://localhost/show/stream live on; # Turn on HLS hls on; hls_path /mnt/hls/; hls_fragment 3; hls_playlist_length 60; # disable consuming the stream from nginx as rtmp deny play all; } } } ``` Restart nginx with: ```bash /usr/local/nginx/sbin/nginx -s stop /usr/local/nginx/sbin/nginx -t /usr/local/nginx/sbin/nginx ``` ## Embed within website Now that we are pushing our stream into nginx, a manifest file in the format stream-name.m3u8 is created in the target folder along with the video fragments. For our example, the manifest is available at: http://localhost:8080/hls/stream.m3u8. For testing our new HLS live stream we will use [videojs5](http://videojs.com/). ```html <video id=autoplay width="720" height="360" class="video-js vjs-default-skin vjs-big-play-centered" controls> <source src="http://192.168.2.111:8080/hls/stream.m3u8" type="application/x-mpegURL"> </video> <link href="//vjs.zencdn.net/5.7.1/video-js.css" rel="stylesheet"> <script src="//vjs.zencdn.net/5.7.1/video.js"></script> <script src="//videojs.github.io/videojs-contrib-hls/node_modules/videojs-contrib-hls/dist/videojs.hls.min.js"></script> <script> var player = videojs('autoplay'); player.play(); </script> ``` [HTML5 Boilerplate](https://html5boilerplate.com) <file_sep>--- date: "2019-01-17" title: "Kubernetes and Microservices" categories: - LINUX - Docker - Kubernetes --- import GifContainer from "../../src/components/ImageContainer"; ![Sydney, Australia](./photo-34364880182_fe2d33582b_o.jpg) <!-- TOC --> - [Domain Driven Design and boundedContext](#domain-driven-design-and-boundedcontext) - [Deploying the Queue](#deploying-the-queue) - [Deploying the Position Simulator](#deploying-the-position-simulator) - [How to Debug a failed Deployment](#how-to-debug-a-failed-deployment) - [Deploying the Position Tracker](#deploying-the-position-tracker) - [Deploying the API Gateway](#deploying-the-api-gateway) - [Deploying the Angular Frontend](#deploying-the-angular-frontend) - [Persistence in a Kubernetes Cluster](#persistence-in-a-kubernetes-cluster) - [MongoDB Pod](#mongodb-pod) - [MongoDB Service](#mongodb-service) - [Volume Mounts](#volume-mounts) - [Using PersistentVolumeClaims](#using-persistentvolumeclaims) - [Cloud Deployment](#cloud-deployment) <!-- /TOC --> ## Domain Driven Design and boundedContext A Microservice architecture is an implementation of the [Domain Drive Design](https://martinfowler.com/tags/domain%20driven%20design.html) principle and [bounded Context](https://martinfowler.com/bliki/BoundedContext.html) is a central pattern in Domain-Driven Design. It is the focus of DDD's strategic design section which is all about dealing with large models and teams. DDD deals with large models by dividing them into different Bounded Contexts and being explicit about their interrelationships. The idea behind the principle is to break architectures into small, __cohesive components__ that only fullfil one job (__single responsibility principle__) for one other component (__loosely coupled__). E.g. instead of having one big Integration Database that is build with a single schema and serves all parts of your monolithic application we will add a small, exclusive database to every component that needs data storage. We are going to continue to build the web application we [started to build earlier](https://mpolinowski.github.io/creating-a-kubernetes-cluster/) - with an ActiveMQ Message broker in the backend and an Angular web frontend that will show us the position of our car fleet on a map. The architecture will consist of 5 pods: ``` Position Simulator -> ActiveMQ -> Position Tracker <- API Gateway <- NGINX Reverse Proxy <-> Web Browser ``` The necessary Docker images can be found on [DockerHUB](https://hub.docker.com/u/richardchesterwood) - their corresponding source code can be found on [Github](https://github.com/DickChesterwood/k8s-fleetman). * The [Position Simulator](https://hub.docker.com/r/richardchesterwood/k8s-fleetman-position-simulator) will simulate the GPS signal from our cars, reporting in their position every 10s. * The Position Simulator is then send to our [ActiveMQ](https://hub.docker.com/r/richardchesterwood/k8s-fleetman-queue) service we already deployed earlier. * This Queue server / Message Broker then forwards the information it received to the [Position Tracker](https://hub.docker.com/r/richardchesterwood/k8s-fleetman-position-tracker) that is storing the information as well as doing some basic calculations on it, like estimating the average speed the car is traveling with. * To prevent the frontend code to directly contact this backend part of our application, we will add an [API Gateway](https://hub.docker.com/r/richardchesterwood/k8s-fleetman-api-gateway) that will serve as an interface between front- and backend. This way changes in the backend will never directly affect the frontend, or vice versa - see [API Gateway Pattern](https://microservices.io/patterns/apigateway.html). * The final container will run our [Web Application Frontend](https://hub.docker.com/r/richardchesterwood/k8s-fleetman-webapp-angular) with the help of an NGINX reverse proxy. ### Deploying the Queue We want to start with a fresh cluster - if you already [followed the earlier steps](https://mpolinowski.github.io/creating-a-kubernetes-cluster/), just enter the directory that contains all your configuration files (services.yaml, pods.yaml, networking-tests.yaml) and force delete everything that was build from them on your cluster: ```bash kubectl delete -f . rm networking-tests.yaml mv pods.yaml workloads.yaml nano workloads.yaml ``` We are also deleting an unnecessary file from the previous step and renaming another - just if you are following along. We can now add the queue server to the `workloads.yaml` file: ```yaml apiVersion: apps/v1 kind: Deployment metadata: # Unique key of the ReplicaSet instance name: queue spec: selector: matchLabels: # the ReplicaSet manages all Pods # where the label = app: queue app: queue # only 1 Pod should exist atm - if it # crashes, a new pod will be spawned. replicas: 1 template: metadata: labels: app: queue spec: containers: - name: queue image: richardchesterwood/k8s-fleetman-queue:release1 ``` Secondly, we need to apply a service that exposes our Queue container, which is done in the `services.yaml` file (if you still have the webapp service from the previous step - just leave it in for now): ```yaml apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-queue spec: ports: - name: admin-console port: 8161 targetPort: 8161 # The nodePort is available from outside of the # cluster when is set to NodePort. It's value has # to be > 30000 nodePort: 30010 - name: endpoint port: 61616 targetPort: 61616 selector: # Define which pods are going to # be represented by this service # The service makes an network # endpoint for our app app: queue # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: NodePort ``` The Queue service is going to expose the port __8161__ for the administration console (this should be removed once the app goes into production!) and makes it accessible over the port __30010__ from outside of the Kubernetes cluster. Additionally we need to expose the port __61616__ that ActiveMQ is using to broker messages. You can now start both the pod as well as the service with: ```bash kubectl apply -f workloads.yaml kubectl apply -f services.yaml kubectl get all ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_01.png) --- The Admin panel should now be accessible over the IP address of your Kubernetes master server with the port __30010__: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_02.png) --- ### Deploying the Position Simulator We can now add the Position Simulator to the `workloads.yaml` file - directly under the configuration of our queue server, divided by `---`: ```yaml --- apiVersion: apps/v1 kind: Deployment metadata: # Unique key of the ReplicaSet instance name: position-simulator spec: selector: matchLabels: app: position-simulator # only 1 Pod should exist atm - if it # crashes, a new pod will be spawned. replicas: 1 template: metadata: labels: app: position-simulator spec: containers: - name: position-simulator image: richardchesterwood/k8s-fleetman-position-simulator:release1 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice ``` The configuration for deployment is similar to the ActiveMQ config. We only need to add a __Environment Variable__ that sets the service to _Production Settings_ - `SPRING_PROFILES_ACTIVE: production-microservice` (the service can be started with different profiles, depending if you are in a development or production environment). We can apply the new workloads configuration, check for the name of the new container (it will be called `position-simulator` + an deployment ID + an replicationSet ID) and check if it is build correctly: ```bash kubectl apply -f workloads.yaml kubectl get all kubectl describe pod position-simulator-68bfc8d6fb-8vxkt ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_03.png) --- This service does not need to be accessed from outside of the Kubernetes cluster - so __we do not need to create a service__ for it. We can test the deployment by accessing the IP Address followed by the port __30010__, click on _Managing ActiveMQ Broker_, sign in with __admin, admin__ and click on __Queues__: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_07.png) ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_08.png) --- You should see a rising number of pending messages, telling you that our __Position Simulator__ is successfully sending GPS coordinates to the message broker. ### How to Debug a failed Deployment As we have seen above the deployment work and the Position Simulator is up and running - but how would we debug a container that cannot be started for some reason? We can create this circumstance by adding a typo in the environment variable inside the config file we created above, e.g. `SPRING_PROFILES_ACTIVE: production-microservice-typo`: ```bash kubectl apply -f workloads.yaml kubectl get all kubectl describe pod position-simulator-77dcb74d75-dt27n ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_04.png) --- We can now see that the deployment is failing and Kubernetes is trying to restart the container in a loop (__CrashLoopBackOff__). To check what is going wrong, we can call the Kubernetes logs for the failing container: ```bash kubectl logs position-simulator-77dcb74d75-dt27n ``` And the developer of the application should be able to spot the typo inside the selected profile for you: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_05.png) --- Fixing the _typo_ and re-applying the configuration should show you that the "loop-crashing" container is now being discarded and replaced by a working version: ```bash kubectl apply -f workloads.yaml kubectl get all ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_06.png) --- ### Deploying the Position Tracker Now we need to start up the position tracker who's job it is take out the message that are send by our position simulator and are currently piling up in our ActiveMQ Server. The position tracked does some calculation on those messages and exposes his results through an REST interface to the API gateway. We can now add the __Position Tracker__ to the `workloads.yaml` file - directly under the configuration of our position simulator, divided by `---`: ```yaml --- apiVersion: apps/v1 kind: Deployment metadata: # Unique key of the ReplicaSet instance name: position-tracker spec: selector: matchLabels: app: position-tracker # only 1 Pod should exist atm - if it # crashes, a new pod will be spawned. replicas: 1 template: metadata: labels: app: position-tracker spec: containers: - name: position-tracker image: richardchesterwood/k8s-fleetman-position-tracker:release1 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice ``` We can now deploy the position tracker and then take a look at our message queue. We should be able to see that the tracker is working and messages are getting de-queued: ```bash kubectl apply -f workloads.yaml ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_09.png) --- We can now add a service for the position tracker to expose the REST interface directly on port __8080__ (_optional - only for testing_). For this we need to add the following lines to our `services.yaml`: ```yaml --- apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-position-tracker spec: ports: - name: rest-interface port: 8080 targetPort: 8080 # The nodePort is available from outside of the # cluster when is set to NodePort. It's value has # to be > 30000 nodePort: 30020 selector: app: position-tracker # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: NodePort ``` Now apply the changes `kubectl apply -f services.yaml` and open the REST interface on your Master server IP address with the port __30020__: ```bash http://192.168.3.11:30020/vehicles/City%20Truck ``` You should be able to see the current location and speed of the vehicle with the designation __City Truck__: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_10.png) --- Since it would be dangerous to expose the REST API directly to the internet, we will remove the NodePort and have the API be available only from inside our cluster on port 8080: ```yaml --- apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-position-tracker spec: ports: - name: rest-interface port: 8080 targetPort: 8080 selector: app: position-tracker # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: ClusterIP ``` ### Deploying the API Gateway We can now add the __API Gateway__ to the `workloads.yaml` file - directly under the configuration of our position tracker, divided by `---`: ```yaml --- apiVersion: apps/v1 kind: Deployment metadata: # Unique key of the ReplicaSet instance name: api-gateway spec: selector: matchLabels: app: api-gateway # only 1 Pod should exist atm - if it # crashes, a new pod will be spawned. replicas: 1 template: metadata: labels: app: api-gateway spec: containers: - name: api-gateway image: richardchesterwood/k8s-fleetman-api-gateway:release1 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice ``` We can now deploy the gateway with `kubectl apply -f workloads.yaml`. And then expose the port __8080__ to the Kubernetes cluster in `services.yaml`: ```yaml --- apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-api-gateway spec: ports: - name: rest-interface port: 8080 targetPort: 8080 selector: app: api-gateway # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: ClusterIP ``` ### Deploying the Angular Frontend We can now add the __Web APP__ to the `workloads.yaml` file - directly under the configuration of our position tracker, divided by `---`: ```yaml --- apiVersion: apps/v1 kind: Deployment metadata: # Unique key of the ReplicaSet instance name: webapp spec: selector: matchLabels: app: webapp # only 1 Pod should exist atm - if it # crashes, a new pod will be spawned. replicas: 1 template: metadata: labels: app: webapp spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release1 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice ``` We can now deploy the frontend with `kubectl apply -f workloads.yaml`. And then expose the port __80__ to the Kubernetes cluster in `services.yaml`, as well as adding the public port (NodePort) __30080__: ```yaml apiVersion: v1 kind: Service metadata: # Unique key of the Service instance name: fleetman-webapp spec: ports: # Accept traffic sent to port 80 - name: http port: 80 targetPort: 80 # The nodePort is available from outside of the # cluster when is set to NodePort. It's value has # to be > 30000 nodePort: 30080 selector: # Define which pods are going to # be represented by this service # The service makes an network # endpoint for our app app: webapp # Setting the Service type to ClusterIP makes the # service only available from inside the cluster # To expose a port use NodePort instead type: NodePort ``` Your complete Microservice deployment should now look something like this: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_11.png) --- And you should be able to access the web interface on port __30080__ on your master server IP address: <GifContainer gifUrl="/assets/gif/kubernetes_microservices_12.gif" alt="A Kubernetes Cluster&Microservices" /> ## Persistence in a Kubernetes Cluster In Docker all data that is generated inside a container is lost when you restart it. So if we, for example, want to store the geo location of our car fleet and calculate a travel path from it, all of that is gone, when the container restarts. Docker offers persistent in external volumes to prevent this from happening. To add __vehicle tracking__ to our app, we need to update all of our images to the `:release2` in `workloads.yaml`: ```yaml apiVersion: apps/v1 kind: Deployment metadata: name: queue spec: selector: matchLabels: app: queue replicas: 1 template: metadata: labels: app: queue spec: containers: - name: queue image: richardchesterwood/k8s-fleetman-queue:release2 --- apiVersion: apps/v1 kind: Deployment metadata: name: position-simulator spec: selector: matchLabels: app: position-simulator replicas: 1 template: metadata: labels: app: position-simulator spec: containers: - name: position-simulator image: richardchesterwood/k8s-fleetman-position-simulator:release2 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice --- apiVersion: apps/v1 kind: Deployment metadata: name: position-tracker spec: selector: matchLabels: app: position-tracker replicas: 1 template: metadata: labels: app: position-tracker spec: containers: - name: position-tracker image: richardchesterwood/k8s-fleetman-position-tracker:release2 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice --- apiVersion: apps/v1 kind: Deployment metadata: name: api-gateway spec: selector: matchLabels: app: api-gateway replicas: 1 template: metadata: labels: app: api-gateway spec: containers: - name: api-gateway image: richardchesterwood/k8s-fleetman-api-gateway:release2 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice --- apiVersion: apps/v1 kind: Deployment metadata: name: webapp spec: selector: matchLabels: app: webapp replicas: 1 template: metadata: labels: app: webapp spec: containers: - name: webapp image: richardchesterwood/k8s-fleetman-webapp-angular:release2 env: - name: SPRING_PROFILES_ACTIVE value: production-microservice ``` Restarting everything with `kubectl apply -f workloads.yaml` now shows us the v2 interface on port __30080__. Clicking on a vehicle name in the list on the left will jump you to the selected truck and highlight the path that vehicle has taken: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_13.png) --- This data is stored in an internal data structure inside the __Position Tracker__ container and will be lost if you reload that container. You can get the webapp pod name and delete it: ``` kubectl get all kubectl delete pod -f pod/position-tracker-684d9d84cb-st8pc ``` Your deployment will take care of restarting the webapp for you - if you reload the web interface you will see that the tracker data has been lost. To prevent this from happening, we now want to add a __MongoDB__ database to our cluster that stores the data tracker produces in a persistent way. ### MongoDB Pod We have a new release of the app `:release3` that is build, expecting there to be a [MongoDB Database](https://hub.docker.com/_/mongo) (_3.6.10-stretch_) on our cluster to store the tracking data in. We can create a new Kubernetes deployment for this Docker images and we are going to configure it in a file called `mongo-stack.yaml`: ```yaml apiVersion: apps/v1 kind: Deployment metadata: name: mongodb spec: selector: matchLabels: app: mongodb replicas: 1 template: metadata: labels: app: mongodb spec: containers: - name: mongodb image: mongo:3.6.10-stretch ``` Now update only the __Position Tracker__ image in `workloads.yaml` to v3 (same as before, see above) and apply all changes to the cluster: ``` kubectl apply -f mongo-stack.yaml kubectl apply -f workloads.yaml kubectl get all ``` ### MongoDB Service To enable our tracker to use our new database we need to add a Kubernetes service and expose the MongoDB port to the Cluster. And we can define this services inside the `mongo-stack.yaml` file, right under the pod config: ```yaml apiVersion: apps/v1 kind: Deployment metadata: name: mongodb spec: selector: matchLabels: app: mongodb replicas: 1 template: metadata: labels: app: mongodb spec: containers: - name: mongodb image: mongo:3.6.10-stretch --- apiVersion: v1 kind: Service metadata: name: fleetman-mongodb spec: ports: - name: mongo-default port: 27017 targetPort: 27017 selector: app: mongodb type: ClusterIP ``` It is critical to add the metadata name `fleetman-mongodb` as this is going to be the domain name that CoreDNS inside our cluster is using to resolve the pod IP address and our v3 __Production Tracker__ is [configured to search](https://github.com/DickChesterwood/k8s-fleetman/blob/release3/k8s-fleetman-position-tracker/src/main/resources/application-production-microservice.properties) for the MongoDB database on `mongodb.host=fleetman-mongodb.default.svc.cluster.local`! Also make sure that the service __Selector__ is set to match the __matchLabels__ in the pod config above, as this is used to connect the service. Now re-apply the Mongo stack configuration to start up the service. If you repeat the experiment from earlier and delete the __Position Tracker__ the collected data will persist. ``` kubectl apply -f mongo-stack.yaml kubectl get all kubectl delete pod -f pod/position-tracker-684d9d84cb-st8pc ``` ### Volume Mounts Right now the data is still stored on the filesystem of the MongoDB container. We need to configure the MongoDB container to persist the data outside of the container itself, on our Kubernetes node filesystem in a [Persistent Volume](https://kubernetes.io/docs/concepts/storage/persistent-volumes/) to make it survive a complete reload of our cluster. To mount an external volume into the MongoDB container, we need to add a few lines to the pod configuration file. If you scroll to the bottom of the DockerHub page ([Where to Store Data?](https://hub.docker.com/_/mongo)) you can see that the default data storage path inside the MongoDB container is `/data/db`. We now have to link this path inside the container (`mountPath`) to a volume on our host server (or an EBS volume on AWS). You can find all the options for the receiving volume in the [Kubernetes documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.13/#volume-v1-core). We are going to use a [hostPath](https://kubernetes.io/docs/concepts/storage/volumes/#hostpath) with the type __DirectoryOrCreate__ - meaning that we don't have to go into the host and create the directory first (it will be created if not existing): ```yaml apiVersion: apps/v1 kind: Deployment metadata: name: mongodb spec: selector: matchLabels: app: mongodb replicas: 1 template: metadata: labels: app: mongodb spec: containers: - name: mongodb image: mongo:3.6.10-stretch volumeMounts: - name: mongo-persistent-storage mountPath: /data/db volumes: - name: mongo-persistent-storage hostPath: # directory location on host path: /mnt/kubernetes/mongodb # DirectoryOrCreate, Directory, FileOrCreate, File, etc. type: DirectoryOrCreate ``` Apply those changes to your cluster and use the `describe` command to check if the mount was successful: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_14.png) --- From the event log at the bottom we can see that the container was deployed to `in-centos-minion2` (alternatively use `kubectl get pods -o wide` to list the nodes your pods are hosted on) - a quick check confirms that the volume `/mnt/kubernetes/mongodb` was created and MongoDB started to use it: --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_15.png) --- ### Using PersistentVolumeClaims A [persistentVolumeClaim](https://kubernetes.io/docs/concepts/storage/volumes/#persistentvolumeclaim) volume is used to mount a PersistentVolume into a Pod. PersistentVolumes are a way for users to “claim” durable storage (such as a GCE PersistentDisk or an iSCSI volume) without knowing the details of the particular cloud environment. This is just a minor change to our `mongo-stack.yaml` file: ```yaml apiVersion: apps/v1 kind: Deployment metadata: name: mongodb spec: selector: matchLabels: app: mongodb replicas: 1 template: metadata: labels: app: mongodb spec: containers: - name: mongodb image: mongo:3.6.10-stretch volumeMounts: - name: mongo-persistent-storage mountPath: /data/db volumes: - name: mongo-persistent-storage persistentVolumeClaim: claimName: mongo-pvc ``` The persistent volume is then configured in a separate configuration file we will call `storage.yaml` - this way, if we have to move our cluster to a new cloud provider we do not have to make any changes to the workload or service file: ```yaml # What do want? apiVersion: v1 kind: PersistentVolumeClaim metadata: # has to match the name you used as claimName! name: mongo-pvc spec: # linking the claim with the implementation below storageClassName: mylocalstorage accessModes: - ReadWriteOnce resources: requests: # Let Kubernetes find a node that offers at least the amount of storage storage: 1Gi --- # How do we want it implemented apiVersion: v1 kind: PersistentVolume metadata: name: local-storage spec: storageClassName: mylocalstorage capacity: # create a storage claim for this amount - e.g. create a EBS volume on AWS storage: 1Gi accessModes: - ReadWriteOnce hostPath: # directory location on host path: "/mnt/kubernetes/mongodb-tracking-data" # DirectoryOrCreate, Directory, FileOrCreate, File, etc. type: DirectoryOrCreate ``` The `PersistentVolumeClaim` and `PersistentVolume` are matched up by the `storageClassName`. The cloud administrator has to create persistent storages based on available hardware (or cloud storage partitions). The web developer then creates a claim for storage with a certain capacity - so Kubernetes can search for a fitting volume among the available. We choose the `storageClassName: mylocalstorage` - in production this would be something more useful. E.g. your pods really need very fast storage - so you can set a claim for a __storageClassName__ that refers to high performance _SSD_ storage. Noteworthy also is the [Access Mode](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes) that can be: * ReadWriteOnce – the volume can be mounted as read-write by a single node * ReadOnlyMany – the volume can be mounted read-only by many nodes * ReadWriteMany – the volume can be mounted as read-write by many nodes In the CLI, the access modes are abbreviated to: * RWO - ReadWriteOnce * ROX - ReadOnlyMany * RWX - ReadWriteMany Once you done with the configuration, apply it and check if the pv (persistent volume) was created and bound to the MongoDB pod: ```bash kubectl apply -f storage.yaml kubectl apply -f mongo-stack.yaml kubectl get pv kubectl get pvc ``` --- ![A Kubernetes Cluster & Microservices](./kubernetes_microservices_15.png) --- ## Cloud Deployment kubectl expose deployment webapp --type=LoadBalancer --name=exposed-webapp Examples: * Create a service for a replicated nginx, which serves on port 80 and connects to the containers on port 8000. kubectl expose rc nginx --port=80 --target-port=8000 * Create a service for a replication controller identified by type and name specified in "nginx-controller.yaml", which serves on port 80 and connects to the containers on port 8000. kubectl expose -f nginx-controller.yaml --port=80 --target-port=8000 * Create a service for a pod valid-pod, which serves on port 444 with the name "frontend" kubectl expose pod valid-pod --port=444 --name=frontend * Create a second service based on the above service, exposing the container port 8443 as port 443 with the name "nginx-https" kubectl expose service nginx --port=443 --target-port=8443 --name=nginx-https * Create a service for a replicated streaming application on port 4100 balancing UDP traffic and named 'video-stream'. kubectl expose rc streamer --port=4100 --protocol=udp --name=video-stream * Create a service for a replicated nginx using replica set, which serves on port 80 and connects to the containers on port 8000. kubectl expose rs nginx --port=80 --target-port=8000 * Create a service for an nginx deployment, which serves on port 80 and connects to the containers on port 8000. kubectl expose deployment nginx --port=80 --target-port=8000 <file_sep>const output = document.querySelector("#output"); const url = "https://restcountries.eu/rest/v2/all"; let responseObject = {}; fetch(url).then(res => res.json()) .then(function (data) { responseObject = data; buildSelect(data); }) .catch(error => console.log(error)); function buildSelect(data) { let select = document.createElement('select'); data.forEach(function (item, index) { let option = document.createElement('option'); option.value = index; option.textContent = item.name; select.appendChild(option); }) select.addEventListener("change",outputData); document.querySelector('body').appendChild(select); } function outputData(e){ let country = responseObject[e.target.value]; console.log(country); output.innerHTML = '<h1>'+country.name+'</h1>'; output.innerHTML += '<p><strong>Native Name</strong>: '+country.nativeName+'</p>'; output.innerHTML += '<p><strong>Population</strong>: '+country.population+'</p>'; document.querySelector('img').src = country.flag; output.innerHTML += '<p><strong>Capital</strong>: '+country.capital+'</p>'; output.innerHTML += '<p><strong>Region</strong>: '+country.region+'</p>'; output.innerHTML += '<p><strong>Sub-Region</strong>: '+country.subregion+'</p>'; }<file_sep>--- date: "2019-02-08" title: "Shinobi Video Surveillance" categories: - IoT - Smarthome --- import GifContainer from "../../src/components/ImageContainer"; ![Furano, Japan](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Shinobi Installation](#shinobi-installation) - [Shinobi Configuration](#shinobi-configuration) - [Adding an INSTAR Full HD Camera](#adding-an-instar-full-hd-camera) - [Setting up Motion Detection for your INSTAR IP Camera](#setting-up-motion-detection-for-your-instar-ip-camera) - [Default Motion Detection Settings](#default-motion-detection-settings) - [Global Detector Settings Primary Engine : Pixel Array](#global-detector-settings-primary-engine--pixel-array) - [Motion Detection Primary Engine : Pixel Array](#motion-detection-primary-engine--pixel-array) - [Traditional Recording](#traditional-recording) <!-- /TOC --> [Shinobi](https://shinobi.video) is Open Source, written in Node.js, and real easy to use. It is the future of CCTV and NVR for developers and end-users alike. It is catered to by professionals and most importantly by the one who created it. <GifContainer gifUrl="/assets/gif/shinobi.gif" alt="Shinobi CCTV" /> ## Shinobi Installation <div class="dangerbox"> <h3>Amendment:</h3> <p>Please note that <a href="https://nodejs.org/en/download/package-manager/">Node.js 8 or 9</a> must be present on your system before installation.</p> <p>If Node.js 8/9 is missing, it will be installed by the installation script, but the installation of FFMPEG will fail and the program will not start at the end.</p> <p>If this happens to you, you can <a href="https://www.npmjs.com/package/ffmpeg">reinstall FFMPEG via npm</a>. Go to the installation directory of Shinobi <code>cd /home/Shinobi</code> and start the installation via <code>npm install ffmpeg</code>. After that you should be able to start Shinobi via PM2 <code>pm2 restart all</code>, <code>pm2 list</code>.</p> </div> The easiest way to install, directly on metal, is through git. The following operating systems are supported: * Ubuntu 17.10.1 * CentOS 7 * MacOS 10.7(+) Become root to use the installer and run Shinobi. Use one of the following to do so. Ubuntu 17.10.1: ```bash sudo su ``` CentOS 7: ```bash su ``` MacOS 10.7(+): ```bash su ``` Download and run the installer: ```bash bash <(curl -s https://gitlab.com/Shinobi-Systems/Shinobi-Installer/raw/master/shinobi-install.sh) ``` ![Shinobi CCTV](./shinobi_01.png) <div class="dangerbox"> <h3>Changing the WebUI Port:</h3> <p>By default the Shinobi WebUI uses the port <code>8080</code>. Due to a port conflict we were forced to change that port to <code>8888</code>. The following tutorial will keep referring to the default port - but all screenshots will show the changed port.</p> <p><strong>OPTIONAL</strong>: In case you need to change the port as well, this can be done in the software installation directory (default: <code>/home/Shinobi</code>) by editing both the <code>conf.json</code> and <code>conf.sample.json</code> file:</p> </div> ![Shinobi CCTV](./shinobi_00.png) ## Shinobi Configuration Create a Shinobi user by accessing `http://<IP Address>:8080/super` and login with: ```bash user: <EMAIL> password: <PASSWORD> ``` ![Shinobi CCTV](./shinobi_02.png) Click on __Add User__ and create a default user for your Shinobi installation: ![Shinobi CCTV](./shinobi_03.png) You can now switch to `http://<IP Address>:8080/` and login with the default user you just created: ![Shinobi CCTV](./shinobi_04.png) ## Adding an INSTAR Full HD Camera ![Shinobi CCTV](./shinobi_05.png) We can start using Shinobi by clicking on the __+__ Symbol in the top left and adding an ID and a readable Name for the camera we want to add in the __Identity__ section. It is important to choose the __Watch Only__ mode if you want to use the integrated Motion Detection: ![Shinobi CCTV](./shinobi_06.png) For the __Connection__ part we need to choose _h.264_ and the Video (RTSP) Path for our camera model (__Note__: that we are choosing the `11` path that represents the full resolution for INSTAR cameras. If you are experiencing issues - like a stuttering video stream or high CPU loads - you can switch to `12` or `13` instead to set a lower resolution): ```bash rtsp://<Camera IP Address>/11 ``` ![Shinobi CCTV](./shinobi_07.png) Now close the configuration menu to save your settings. Open the side menu by clicking on the hamburger icon in the top left and click on the camera you just added. The video stream should show up in a floating viewport on the right (might take a few seconds - if it takes too long, consider using a smaller RTSP stream as stated above): ![Shinobi CCTV](./shinobi_08.png) ## Setting up Motion Detection for your INSTAR IP Camera Now click on the __Wrench Button__ to get back to the configuration menu and switch the menu to __Advanced Mode__ at the bottom of the floating window: ![Shinobi CCTV](./shinobi_09.png) The menu will now offer a couple of additional configuration option - a couple of the relate to alarm features: ![Shinobi CCTV](./shinobi_10.png) * __Global Detector Settings__: It is best to leave everything at it's [default values](#default-motion-detection-settings) - you can come back to it later to add some performance fine tuning here. * __No Motion Detector__: An interesting feature is the inverted motion detection - do you want to make sure that your machine is working at all time? Set up Shinobi to notify you when it stopped. * __Audio Detector__: Our RTSP stream contains both the video as well as the audio stream from our camera. Use the audio to trigger a recording in Shinobi. * __Object Detection__: This function requires the [additional installation](https://shinobi.video/docs/object) of [OpenCV](https://opencv.org) and is out of the scope of this tutorial. And last but not least, we have the classic __Motion Detection__: ![Shinobi CCTV](./shinobi_11.png) __Correction__: the screenshot above set's the __Recording Timeout__ to 5min - it is much more sensible to choose a value of 0.2, resulting in 12s alarm videos. Here, again, we will leave most at the [default settings](#default-motion-detection-settings) and move on the __Region Editor__ that allows us to define Motion Detection Areas that will be used to limit the area that will be watched by the detection algorithm: ![Shinobi CCTV](./shinobi_12.png) Klick on the green __+ Button__ to add an area - the new area will be displayed in the top left corner of the camera video image. You can drag&drop each of the white anchor points to reposition them. Or click on the red line to add a new anchor (a right-click deletes a point). Klick on __Close__ to save your settings. When you scroll further down the config menu you will reach the rest of the motion detection settings - again, we will leave most of them at their [default settings](#default-motion-detection-settings) for now (to be further fine-tuned later on): ![Shinobi CCTV](./shinobi_13.png) Close the configuration menu now and walk in front of your camera to trigger a few alarm recordings. The Shinobi offers a couple of ways to view our alarm videos: * Event Log * Video Grid * Video List * Time Lapse Recordings * Power Viewer We are going to show you the __Power Viewer__ as seen in the screenshots below. Click on the button marked with the red arrow: ![Shinobi CCTV](./shinobi_14.png) The Power Viewer will show you a timeline of all alarm recording events - the higher the red bars, the more motion was detected inside the video. Clicking on a bar in the timeline opens the recorded video. ![Shinobi CCTV](./shinobi_15.png) ## Default Motion Detection Settings ### Global Detector Settings Primary Engine : Pixel Array <br/> | Detector Settings | Recommended Option | Description | | --- | --- | --- | | Enabled | Yes | Enable to reveal and use the other options. | | Send Frames | Yes | Push frames to the connected plugin to be analyzed. | | Allow Next Trigger | 2000 | The amount of milliseconds that needs to pass before allowing the next event registration. | | Save Events to SQL | Yes | Save Motion Events in SQL. This will allow display of events over video during the time events occurred in the Power Video Viewer. | | Detector Rate | 2 | How many frames per second to send to the motion detector. | | Feed-In Image Width | 640 | Width of the image being detected. Smaller sizes take less CPU. | | Feed-In Image Height | 480 | Height of the image being detected. Smaller sizes take less CPU. | | How to Record | Traditional | There are multiple methods to record. Read below on why that is. | | Trigger Record | Yes | When enabled it will give the command to start the recording process. | | Recording FPS Change on Start | | Leave blank for no change - when you are using a low frame rate in your live video, use this option to increase the frame rate everytime when an alarm recording is running. | | Recording Timeout | 0.2 | The amount of time, in minutes, that the video will record for. | <br/><br/> ### Motion Detection Primary Engine : Pixel Array <br/> | Motion Detection Settings | Recommended Option | Description | | --- | --- | --- | | Use Built-In | Yes | Use <NAME>'s Motion Detector. This is built into Shinobi and requires no other configuration to activate. | | Indifference | 1 | When using Kevin's Detector Indifference means "minimum percentage changed in region". This option varies based on camera, start with 1 and work your way up. | | Max Indifference (leave blank until comfortable with minimum first) | | As the name suggests it is a max value for Indifference. | | Trigger Threshold | 1 | Minimum number of detections to fire a motion event. Detections. Default is 1. Don't adjust it until tuning begins. | | Color Threshold | 9 | The amount of difference allowed in a pixel before it is considered motion. Default is 9. | | Full Frame Detection | No | Enabling Full Frame Detection will create a region that covers the entire frame. Only enable this if you aren't using regions made in the Region Editor. | | Noise Filter | No | Remove false detections due to "noisy backgrounds" | | Noise Filter Range | 6 | | <br/><br/> ### Traditional Recording <br/> | Traditional Recording Settings | Recommended Option | Description | | --- | --- | --- | | Video Encoder | Auto | The video writer. Shinobi will try to choose the best option for you when selecting Auto. For Manual Selection :  RTSP cameras will generally take copy. MJPEG cameras will take libx264. | | Audio Encoder | No Audio | RTSP cameras will generally use aac. MJPEG cameras must be set to No Audio.  | | Segment Length | 2 | Set this to 2 for a buffer of ~5 seconds. |<file_sep>--- date: "2017-06-17" title: "Unreal Engine Coding Standards" categories: - C++ --- ![Unreal Engine Coding Standards](./photo-34221445950_a285c6eee4_o.jpg) > Learn C++ from scratch. How to make your first video game in Unreal engine. Gain confidence in programming. > This is a fork of the Part I of the first section of the [Unreal Course](https://github.com/UnrealCourse) teaching C++ coding standards for the Unreal Game Engine. > The Source Code can be found in [consoleApplication](https://github.com/mpolinowski/consoleApplication) > The following is the commented Course Journal: <!-- TOC --> - [Intro, Notes & Section 2 Assets](#intro-notes--section-2-assets) - [S02 Game Design Document (GDD)](#s02-game-design-document-gdd) - [How Solutions & Projects Relate](#how-solutions--projects-relate) - [C++ Function Syntax](#c-function-syntax) - [Using, #include and Namespaces](#using-include-and-namespaces) - [Magic Numbers and Constants](#magic-numbers-and-constants) - [Variables and cin for Input](#variables-and-cin-for-input) - [Using getline()](#using-getline) - [Simplifying With Functions](#simplifying-with-functions) - [Iterating With For & While Loops](#iterating-with-for--while-loops) - [Clarity is Worth Fighting For](#clarity-is-worth-fighting-for) - [Booleans and comparisons](#booleans-and-comparisons) - [Using do and while in C++](#using-do-and-while-in-c) - [Introducing Classes](#introducing-classes) - [Using Header Files as Contracts](#using-header-files-as-contracts) - [Including Our Own Header File](#including-our-own-header-file) - [Instantiating Your Class](#instantiating-your-class) - [Writing & Using Getter Methods](#writing--using-getter-methods) - [Introducing the Const Keyword](#introducing-the-const-keyword) - [Constructors For Initialisation](#constructors-for-initialisation) - [Pseudocode Programming](#pseudocode-programming) - [Using using for Type Aliases](#using-using-for-type-aliases) - [Using struct for Simple Types](#using-struct-for-simple-types) - [Using if Statements in C++](#using-if-statements-in-c) - [Debugging 101](#debugging-101) - [A Place for Everything](#a-place-for-everything) - [Introducing enumerations](#introducing-enumerations) - [Writing Error Checking Code](#writing-error-checking-code) - [Using switch Statements](#using-switch-statements) - [Warm Fuzzy Feelings](#warm-fuzzy-feelings) - [Handling Game Win Condition](#handling-game-win-condition) - [Win or Lose "Screen"](#win-or-lose-%22screen%22) - [Introducing Big O Notation](#introducing-big-o-notation) - [TMap and map Data Structures](#tmap-and-map-data-structures) - [Range-based for Loop](#range-based-for-loop) - [Design a Helper Function](#design-a-helper-function) - [Playtesting Your Game](#playtesting-your-game) - [Difficulty & Play Tuning](#difficulty--play-tuning) - [Polishing & Packaging](#polishing--packaging) - [Section 2 Wrap-Up](#section-2-wrap-up) <!-- /TOC --> ### Intro, Notes & Section 2 Assets + Welcome to the first actual coding video. + Why we’re doing this in the IDE only. + What you’ll be building, see resources. + You’ll learn types, loops, routines, classes. + We’ll follow Unreal’s coding style, and re-use. + Notes and resources are attached. ### S02 Game Design Document (GDD) + How much planning should we do? + Define the emotional problem the game solves\* + Chose concept, rules & requirements. + Start to think about the architecture. + _Copy_ as much as possible into the code! + Document now what may change later. **Useful Links** + \* [McConnell, Steve. _Code Complete._ Microsoft Press 2004. Chapter 3.3](https://www.amazon.com/gp/product/0735619670/) ### How Solutions & Projects Relate + How projects and solutions relate. + Setting up a new command line project. + An overview of the structure of our solution. + (Adding main.cpp to our project). ### C++ Function Syntax + The difference between an engine and a library. + How this relates to this console application. + What is building / compiling code? + How the console knows where to find our code. + The syntax of a function in C++. + Write the minimal C++ program to remove error. + Testing our application runs without error. ```cpp // Standard C++ library automatically included by Visual Studio #include "stdafx.h" int main() { return 0; } ``` Created a C++ function "main" in a file "main.cpp" that can be run CTRL+F5 without errors and returns integer 0. ### Using, #include and Namespaces + **#** represents a “preprocessor directive”. + **#include** copies-and-pastes other code. + The idea of using library code. + Use <\> for standard libraries. + Use “ “ for files you have created yourself. + Notice the namespace icon in autocomplete. + Import **iostream** library and use **std** namespace + Clean up your code by removing **std::** that is no longer needed ```cpp #include "stdafx.h" #include <iostream> int main() { std::cout << "Welcome to Bulls and Cows" << std::endl; return 0; } ``` By defining the std namespace we can simplify our code: ```cpp #include "stdafx.h" #include <iostream> using namespace std; int main() { cout << "Welcome to Bulls and Cows" << endl; return 0; } ``` ### Magic Numbers and Constants + What a “magic number” is. + Why it’s a good idea to avoid them. + **constexpr** means “evaluated at compile time”. + Introduce coding standards\*. + Use a constant expression for the word length. ```cpp int main() { // introduce the game constexpr int WORD_LENGTH = 9; cout << "Welcome to Bulls and Cows" << endl; cout << "Can you guess the << WORD_LENGTH; cout << " letter isogram I'm thinking of?/n"; cout << endl; return 0; } ``` There are 2 ways to break to a new line - "endl" and "/n". The latter does not flush the output buffer - otherwise identical. **Useful Links** + \* [Unreal Engine - Coding Standard](https://docs.unrealengine.com/latest/INT/Programming/Development/CodingStandard/index.html) ### Variables and cin for Input + Introducing pseudocode programming - add a comment to describe the function before you start programming + Why we need to **#import \<string\>** + Getting input using **cin** + cin breaks consuming input at space - you cannot input more then 1 word ```cpp // string library is needed for the ">>" operator #include <string> int main() { // introduce the game // ... // get a guess from player cout << "Enter your guess: "; string Guess = ""; cin >> Guess; // return guess to player cout << "Your guess was: " << Guess << endl; cout << endl; return 0; } ``` ### Using getline() + Solve the problem that you cannot enter a guess with more then one word + **getline()** reads through spaces and discards input stream @endl + Where to find C++ documentation => www.cplusplus.com ```cpp int main() { // introduce the game // ... // get a guess from player cout << "Enter your guess: "; string Guess = ""; getline (cin,Guess); // return guess to player cout << "Your guess was: " << Guess << endl; cout << endl; return 0; } ``` ### Simplifying With Functions + Programming is all about managing complexity. + We want to think about a few things at a time. + The idea of abstraction and encapsulation -> the scope of the constexpr WORD_LENGTH is now limited to the PrintIntro function. + Always use **return** at the end of your functions. + Wrap the code Intro code into function to make our code more readable. + The PrintIntro() then be called from within main() *PrintIntro* function: ```cpp void PrintIntro() { // introduce the game constexpr int WORD_LENGTH = 9; cout << "Welcome to Bulls and Cows" << endl; cout << "Can you guess the << WORD_LENGTH; cout << " letter isogram I'm thinking of?/n"; cout << endl; return; } //Entry point of application int main() { // introduce the game PrintIntro (); // get a guess from player // ... // return guess to player // ... return 0; } ``` The collection of functions used by main() should be at the end of the document. We have to put the identifier for the function PrintIntro() on top of the document. This way we can put the body PrintIntro(){} below main() ```cpp void PrintIntro(): //Entry point of application int main() { // introduce the game PrintIntro (); // get a guess from player // ... // return guess to player // ... return 0; } void PrintIntro() { // introduce the game constexpr int WORD_LENGTH = 9; cout << "Welcome to Bulls and Cows" << endl; cout << "Can you guess the << WORD_LENGTH; cout << " letter isogram I'm thinking of?/n"; cout << endl; return; } ``` This makes it easy to spot main() inside the document. Descriptive identifier for each function inside main() make our code readable / **self-documenting** Repeat this process with all other functions inside main(): ```cpp void PrintIntro(): string GetGuessAndPrintBack(); //Entry point of application int main() { PrintIntro (); GetGuessAndPrintBack (); return 0; } // introduce the game void PrintIntro() {...} // get a guess from player and print back string GetGuessAndPrintBack() { cout << "Enter your guess: "; string Guess = ""; getline(cin, Guess); //print guess back to player cout << "Your guess was: " << Guess << endl; return Guess; } ``` ### Iterating With For & While Loops + Use loops to prevent typing the same code repeatedly + When to use **for** vs **while** -> "When you know what you in **FOR**" / "When you are looping for a **WHILE**" -> use for-loop when you know the number of loops at compile time. + The syntax of a for-loop: **for** (**Initialization**: count = 1; **Condition**: count <= limit; **Increase**: count = count +1) {**Statement**} + Think carefully about the first & last loop. + Write a **for** loop to repeat the game. ```cpp int main() { // introduce the game PrintIntro (); // get a guess from player and loop for number of turns constexpr int NUMBER_OF_TURNS = 5; for (int i = 0; i < NUMBER_OF_TURNS; i++) { GetGuessAndPrintBack(); cout << endl; } return 0; } ``` **Useful Links** + \* [www.cplusplus.com](http://www.cplusplus.com/doc/tutorial/control) + \* [msdn.microsoft.com](https://msdn.microsoft.com/en-us/library/b80153d8.aspx) ### Clarity is Worth Fighting For + More about levels of abstraction. + A word on being clever. + Using Visual Studio’s Extract “Extract Function” + What a header file (.h) is. + What’s refactoring, and why we do it. + Removing side-effects. + Where to find the course code. [UnrealCourse](http://www.unrealcourse.com/) & [Github.com](https://github.com/UnrealCourse) Encapsulate for-loop in PlayGame() to clean up main(): ```cpp void PrintIntro(); void PlayGame(): string GetGuessAndPrintBack(); int main() { PrintIntro (); PlayGame (); return 0; //Exit application } void PlayGame() { // get a guess from player and loop for number of turns constexpr int NUMBER_OF_TURNS = 5; for (int i = 0; i < NUMBER_OF_TURNS; i++) { GetGuessAndPrintBack(); cout << endl; } } ``` All functions should only do one thing - removing PrintBack from GetGuess: ```cpp void PlayGame() { // get a guess from player and loop for number of turns constexpr int NUMBER_OF_TURNS = 5; for (int i = 0; i < NUMBER_OF_TURNS; i++) { GetGuess(); string Guess = GetGuess(); cout << "Your guess was: " << Guess << endl; } } string GetGuess() { cout << "Enter your guess: "; string Guess = ""; getline(cin, Guess); return Guess; } ``` To rename all instances of a function identifier in VisualStudio, select it and press **CTRL+R** twice! ### Booleans and comparisons + What a boolean is, and how to use it. + Only use when completely clear what you mean. + Use **==** for comparison. + Use **&&** for logical AND. + Use **||** for logical OR. + Use **[n]** to access a string, starting at n=0. + Use **‘ ‘** for characters, and **“ “** for strings. Add true/false boolean for asking to restart the game after a completed run. ```cpp void PrintIntro(); void PlayGame(): string GetGuessAndPrintBack(); bool AskToPlayAgain(); int main() { PrintIntro (); PlayGame (); AskToPlayAgain(); return 0; //Exit application } bool AskToPlayAgain() { cout << "Do you want to play again? y/n "; string Response = ""; getline(cin, Response); return (Response[0] == 'y') || (Response[0] = 'Y'); } ``` Response[0] takes the first character from the Response string. Compare to character y/Y and return true or false. ### Using do and while in C++ + A **do while** loop is: do {code to repeat} while (condition); + A do/while code is always executed at least once and repeated until the condition is reached. + Making our game play multiple times. ```cpp int main() { bool bPlayAgain = false; do { PrintIntro (); PlayGame (); bPlayAgain = AskToPlayAgain(); } while (bPlayAgain); return 0; } ``` The boolean bPlayAgain is set to **false** at the beginning of the loop - AskToPlayAgain sets it to **true** if player types "yes". The do/while loop is repeated until while is set to false. ### Introducing Classes + Lookup the Turing machine. + A quick overview of the MVC pattern. + User defined types (classes). + About working at an interface level (black box). + An overview of class **FBullCowGame** ### Using Header Files as Contracts + Introducing .h header files in C++. + Why the added complexity is worth it. + Defining the interface to our class. + Writing our first draft of FBullCowGame. ```cpp #pragma once #include <string> class FBullCowGame { public: void Reset(); int GetMaxTries(); int GetCurrentTry(); bool IsGameWon(); bool CheckGuessValidity(std::string); private: int MyCurrentTry; int MyMaxTries; } ``` ### Including Our Own Header File + NEVER use using namespace in a .h + In fact, why use it at all? + Create your .cpp files and **#include** + Don’t create chains of includes. Remove **using namespace std;** from main.cpp - add std:: to all instances of cout, cin, string, endl, getline Add *.cpp file to header - select void Reset(); right-click it, choose Quick-Action and Create Definition - this creates FBullCowGame.cpp. Repeat this for all methods in header file: ```cpp #include FBullCowGame.h void FBullCowGame::Reset() { return; } int FBullCowGame::GetCurrentTry() { return 0; } int FBullCowGame::GetMaxTries() { return 0; } bool FBullCowGame::IsGameWon() { return false, } void FBullCowGame::CheckGuessValidity(std::string) { return false; } ``` ### Instantiating Your Class + Relax, they’re just user defined types! + string FirstName; creates a string object + FBullCowGame BCGame; works the same way + These instances are initialised by “constructors” + Instantiating means “creating an instance of” + So we’re simply creating a FBullCowGame game instance "BCGame". ```cpp #include "FBullCowGame.h" int main() {...} void PlayGame() { FBullCowGame BCGame; constexpr int NUMBER_OF_TURNS = 5; for (int i = 0; i < NUMBER_OF_TURNS; i++) { std::string Guess = GetGuess(); std::cout << "Your guess was: " << Guess << std::endl; std::cout << std::endl; } } ``` ### Writing & Using Getter Methods + Use GetMaxTries to GET number of turns / maxTries + Why we never access variables directly + How to call a method using the dot operator + Pros and cons of initializing in at compile time + Using “Rebuild Project” to make VS behave! Initialize MyMaxTries/MyCurrentTry in FBullCowGame.h at compile time (will later be moved into constructor): **FBullCowGame.h** ```cpp class FBullCowGame { public: ... private: int MyCurrentTry = 1; int MyMaxTries = 5; }; ``` Use GetMaxTries/GetCurrentTry Method to access MyMaxTries/MyCurrentTry: **FBullCowGame.cpp** ```cpp int FBullCowGame::GetMaxTries() { return MyMaxTries; } int FBullCowGame::GetCurrentTry() { return MyCurrentTry; } ``` Move the game Instantiating outside of the scope of PlayGame();, so the game instance becomes globally available to all following methods Use MyMaxTries in main.cpp instead of adding the "magic number" NUMBER_OF_TURNS. Add MyCurrentTry in GetGuss(); **main.cpp** ```cpp FBullCowGame BCGame; int main() {...} void PlayGame() { BCGame.GetMaxTries(); int MaxTries = BCGame.GetMaxTries(); for (int i = 0; i < MaxTries; i++) { std::string Guess = GetGuess(); std::cout << "Your guess was: " << Guess << std::endl; std::cout << std::endl; } } std::string GetGuess() { int CurrentTry = BCGame.GetCurrentTry(); std::cout << "Try " << BCGame.GetCurrentTry() << ". Enter your guess: "; std::string Guess = ""; std::getline(std::cin, Guess); return Guess; } ``` ### Introducing the Const Keyword + **const**’s meaning depends on context + Generally means “I promise not to change this” + What this is depends on exactly where it appears + At the end of a member function, for example **int GetCurrentTry() const;** it prevents the function from modifying any member variables + This is a good safety feature. By adding const at the end of a member function of a class, the variable is set at **compile time** and cannot be changed by the member function at **runtime** -> changing value of MyMaxTries = 12 somewhere inside the class member function will now result in an Error: int FBullCowGame::GetMaxTries() const { MyMaxTries = 12; return MyMaxTries; } **FBullCowGame.h** ```cpp int GetMaxTries() const; int GetCurrentTry() const; bool IsGameWon() const; ``` **FBullCowGame.cpp** ```cpp int FBullCowGame::GetMaxTries() const { return MyMaxTries; } int FBullCowGame::GetCurrentTry() const { return MyCurrentTry; } bool FBullCowGame::IsGameWon() const { return false; } ``` ### Constructors For Initialisation + Default constructor called when object created + Initialize in constructor when decided at runtime + Initialize in declaration if known at compile time + Constructor syntax simply: **ClassName()**; + Set the member variables in constructor Add constructor function to headerfile and move private variable Initialization to constructor in FBullCowGame.cpp - before they were initialized at compile time. the constructor now initializes them at runtime. Adding private: int MyCurrentTry = 666; in headerfile will now be overwritten by constructor at runtime! **FBullCowGame.h** ```cpp class FBullCowGame { public: FBullCowGame(); // constructor initialize state at BCGame ... private: // see FBullCowGame constructor for initialization int MyCurrentTry; int MyMaxTries; } ``` **FBullCowGame.cpp** ```cpp FBullCowGame::FBullCowGame() // constructor initialize state at BCGame start { int MyCurrentTry = 1; int MyMaxTries = 5; } ``` MyCurrentTry and MyMaxTries are now no longer set at compile time - can be BCGame.Reset() at the end of a game to allow the player to play again: **main.cpp** ```cpp void PlayGame() { BCGame.Reset(); int MaxTries = BCGame.GetMaxTries(); // loop for the number of turns asking guesses for (int i = 0; i < MaxTries; i++) { std::string Guess = GetGuess(); std::cout << "Your guess was: " << Guess << std::endl; std::cout << std::endl; } } ``` The constructor should also just call the Reset() and set the runtime default values: **FBullCowGame.cpp** ```cpp FBullCowGame::FBullCowGame() { Reset(); } void FBullCowGame::Reset() { constexpr int MAX_TRIES = 5; MyMaxTries = MAX_TRIES; MyCurrentTry = 1; return; } ``` ### Pseudocode Programming + More on Pseudocode Programming Practice (PPP) + Reviewing our code and architecture + Using **// TODO** as a comment prefix + Introducing Visual Studio’s Task List: View/Task List -> all your TODOs show up in that list + Planning our next wave of coding. **main.cpp** ```cpp void PlayGame() { BCGame.Reset(); int MaxTries = BCGame.GetMaxTries(); // loop for the number of turns asking guesses // TODO change from for- to while-loop once we use try validation for (int i = 0; i < MaxTries; i++) { std::string Guess = GetGuess(); // TODO make loop check validity // submit only valid guesses to the game // print number of bulls and cows std::cout << "Your guess was: " << Guess << std::endl; std::cout << std::endl; } // TODO Summarize game } ``` ### Using using for Type Aliases + We’re substituting types to be “Unreal ready” + The declaration is **using \<alias\> = \<type\>;** + For example **using int32 = int;** + Why Unreal uses **int32** rather than **int** + **FText** is for output, **FString** is “mutable” + Where to use each type of string + Map **FText** and **FString** to **std::string** ### Using struct for Simple Types + **struct** is almost identical to **class** + It’s member variables (data) is public by default + Ideal for simple value types like **BullCowCount** + Outline **BullCowCount SubmitGuess(FString)** ### Using if Statements in C++ + Why we need conditionals (selection) + Use **if** when it reads better (e.g. few conditions) + Use **switch** for multiple, simple conditions + (for loads of statements consider a table lookup) + The syntax of an **if** statement + Using **if** to write count bulls and cows. ### Debugging 101 + A very brief intro to Visual Studio’s debugger + Set a break-point by clicking in margin + Watch values by highlighting in debug mode + Use “Continue” to cycle back to breakpoint. ### A Place for Everything + Centralising the hidden word length + Making this a property of the game class + Writing a getter to access this value + Updating our intro to vary with word length. ### Introducing enumerations + An **enum**erated type consists of named values + Use instead of coded meaning + Makes the code more readable and meaningful + Only defined values can be used - more robust + A benefit of C++ 11’s strongly typed enums + Creating an **enum class** for error checking. ### Writing Error Checking Code + Use **else if** for the first time + Outline or **CheckGuessValidity()** method + Write working code for checking guess length + Use the debugger to test the return values. ### Using switch Statements + Use our error values to communicate with user + All our user interaction is via **GameManager.cpp** + We’ll use **FText** in this file, as it’s UI text + We can “switch” what we say based on the error + The syntax of a **switch** statement + Remember your **break** keywords! ### Warm Fuzzy Feelings + _Don’t_ get comfortable with compiler warnings + Refactor **GetValidGuess()** to remove warning + Rename **SubmitGuess()** to **SubmitValidGuess()** + Improve readability of **SubmitValidGuess()** + Get a warm fuzzy feeling! ### Handling Game Win Condition + Change our **PlayGame()** loop to a **while** + Implement our **IsGameWon()** function ### Win or Lose "Screen" Write a method to print a game summary to the screen once the game is over. ### Introducing Big O Notation + Algorithm: the recipe for solving a problem + or: 45th US Vice President’s dance style + Introducing the complexity of algorithms + A quick introduction to “Big O” notation + Comparing three ways of checking for isograms. ### TMap and map Data Structures + The importance of knowing your data types + Introducing the **std::map** data type + **#define TMap std::map** to keep it ‘Unreal’ + How we’ll be using the map + **TMap\<char, bool\> LetterSeen;** to declare + Using **LetterSeen[Letter]** to access + Wiring-up and pseudocoding **IsIsogram()** ### Range-based for Loop + Introducing containers and iterators + Using a range-based for loop in Unreal\* + Gently introducing the auto keyword + Finishing our IsIsogram() **Useful Links** + \* [Unreal Engine - Ranged Based For Loops](https://www.unrealengine.com/blog/ranged-based-for-loops) ### Design a Helper Function + Gain confidence with a multi-stage challenge + A word on implicit dependencies ### Playtesting Your Game + Having someone else play test your game is vital + Silently take notes, or record screen if possible + Immediately go away and fix obvious bugs + For improvements consider 2nd or 3rd opinion + Repeat until the bug / issue rate plateaus. ### Difficulty & Play Tuning + About the flow channel\* + **map** word length to max tries + Play test to determine correct difficulty. **Useful Links** + \* Read more in [<NAME>. Designing Games - O’Reilly](https://www.amazon.com/dp/B00AWKX1FO/) ### Polishing & Packaging + First impressions count (think reviews) + Don’t ship a half-baked product, even if digital + Check through your code (polish) + Ship to your customers (package). ### Section 2 Wrap-Up + HUGE congratulations on your progress + Over 5 hours of pure C++ learning + Over 30 challenges you’ve completed + The journey has only just begun + Share your source code for others to play + Here are some suggested improvements + Next we take the game logic into Unreal :-)<file_sep>--- date: "2019-06-14" title: "Downloading Docker Images for Offline Use" categories: - LINUX - Docker --- ![<NAME>](./photo-456t66d_64567fh6dgjkhg4_d.jpg) ## Why would you want to do it? I am planning to use a Windows or macOS Computer that is connected to a company VPN to download images from the Docker Hub. Those images need to be transfered to a local LINUX server (behind a firewall that makes downloading large file impossible, due to constant slow downs and disconnects) that cannot be connected to the VPN. ## Docker Save For this I want to use the [docker save](https://docs.docker.com/engine/reference/commandline/save/) function to download the [hello-world](https://hub.docker.com/_/hello-world) image: ```bash docker pull hello-world docker save -o hello-world_image.docker hello-world ``` ![Offline Docker Images](./Offline_docker_Images_01.png) Now transfer the file to the offline computer and load the image from the file: ```bash sudo docker load -i hello-world_image.docker sudo docker run hello-world ``` ![Offline Docker Images](./Offline_docker_Images_02.png) ## How to use offline Docker Images on CentOS8/Podman The `load` command is identical in Podman. But I keep having the problem that with the `podman run` command the system want's to download the latest version of the image first before running it (I think I should have pulled a specific version instead of using the `:latest`). ```bash podman load -i hello-world_image.docker podman run hello-world podman images podman container commit hello IMAGE:ID podman run hello ``` <file_sep>import sys import subprocess # Ping IP to see if available def ip_reach(list): for ip in list: ip = ip.rstrip("\n") ping_reply = subprocess.call("ping %s -n 2" % (ip,), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if ping_reply == 0: print("\n* {} is reachable \n".format(ip)) continue else: print('\n* {} is not reachable \n* exiting program\n'.format(ip)) sys.exit<file_sep>--- date: "2019-03-02" title: "Red Hat OpenShift 3 Container Platform" categories: - LINUX - Docker - OpenShift --- ![Harbin, China](./photo-8752545756_67fhcdfdj_o.jpg) <!-- TOC --> - [Installation](#installation) <!-- /TOC --> [OKD](https://www.okd.io) is a distribution of Kubernetes optimized for continuous application development and multi-tenant deployment. OKD adds developer and operations-centric tools on top of Kubernetes to enable rapid application development, easy deployment and scaling, and long-term lifecycle maintenance for small and large teams. OKD is the upstream Kubernetes distribution embedded in Red Hat OpenShift. OKD embeds Kubernetes and extends it with security and other integrated concepts. OKD is also referred to as [Origin in github](https://github.com/openshift/origin) and in the [documentation](https://docs.okd.io/latest/welcome/index.html). An OKD release corresponds to the Kubernetes distribution - for example, OKD 1.10 includes Kubernetes 1.10. ## Installation Let's start with the prerequisites: ```bash yum update yum install -y git docker net-tools ``` We can now use the [install script](https://github.com/gshipley/installcentos) by [<NAME>](https://github.com/gshipley/) to set up RedHat OKD 3.11 on your own server. This install method is targeted for a single node cluster that has a long life. This repository is a set of scripts that will allow you easily install the latest version (3.11) of OKD in a single node fashion. What that means is that all of the services required for OKD to function (master, node, etcd, etc.) will all be installed on a single host. The script supports a custom hostname which you can provide using the interactive mode. We will start by cloning the repository: ```bash git clone https://github.com/gshipley/installcentos.git cd installcentos ``` Define mandatory variables for the installation process: ``` # Domain name to access the cluster export DOMAIN=<public ip address>.nip.io # User created after installation export USERNAME=<current user name> # Password for the user export PASSWORD=<PASSWORD> ``` Define optional variables for the installation process: ```bash # Instead of using loopback, setup DeviceMapper on this disk. # !! All data on the disk will be wiped out !! $ export DISK="/dev/sda" ``` And then execute the installation script: ```bash ./install-openshift.sh ``` ![Openshift 3.11](./openshift_01.png)<file_sep>--- date: "2018-01-27" title: "Link your Node Source Code into a Docker Container" categories: - Javascript - Node - Docker --- ![<NAME>](./photo-15328436920_ac881c4b7f_o.png) As an example, we are going to use [express-generator](https://expressjs.com/en/starter/generator.html) to scaffold a slightly more complex ([than before](https://github.com/mpolinowski/node-express-docker-starter)) [Node.js](https://nodejs.org) Web App and link this source code into a Docker Container Volume. [Github Repository](https://github.com/mpolinowski/express-generator-app-docker) <!-- TOC --> - [Create a Node.js Web App](#Create-a-Nodejs-Web-App) - [Pointing the Container Volume to our Source Code](#Pointing-the-Container-Volume-to-our-Source-Code) <!-- /TOC --> ## Create a Node.js Web App We want to use express-generator to generate a basic Node Web App. We first need to install the generator globally on our machine: ```bash npm install -g express express-generator ``` We then run express-generator to scaffold an app for use, using the [EJS Templating Engine](http://ejs.co) - check out [their website](https://expressjs.com/en/starter/generator.html) for more options - and put the source code into a folder named _express-generator-app-docker_: ```bash express --view=ejs express-generator-app-docker ``` To install dependencies we need to enter the created directory and run _npm install_ : ```bash cd express-generator-app-docker & npm install ``` We can test our web app by running _npm start_ and accessing _http://localhos:3000_ with a web browser: ![Express App in Docker Container](./express-docker_01.png) ## Pointing the Container Volume to our Source Code In our [previous test](https://github.com/mpolinowski/node-express-docker-starter) we already pulled the latest Node.js Docker Images from the Docker Hub: ![Express App in Docker Container](./express-docker_02.png) To use this image, together with our Web App, we can run the following docker command: ```bash docker run -p 8080:3000 -v E:/express-generator-app-docker:/app -w "/app" node npm start ``` This command will run the node docker image, expose the internal port 3000 (coming from our express app) to port 8080. To point to our source code, we need to create a Volume __-v__ from the app directory __E:/express-generator-app-docker__ (_adjust the absolute path according to your system setup_) and link it to an __/app__ directory inside the container. To execute our code inside the container, we can add __npm start__ at the end of the command - _be aware_ that you have to set the working directory to the __/app__ directory by adding __-w "/app"__, to run the start command from there! ![Express App in Docker Container](./express-docker_03.png) The App is now running from inside the docker container on Port 8080: ![Express App in Docker Container](./express-docker_04.png) You can go into the sites source code and edit it - changes to the express app will show up in your browser when you reload the page. That means, we can now develop a Node Application on a system that doesn't have Node.js installed on it. The next step is actually put your source code into a container - which is, what we are going to try next!<file_sep>--- date: "2019-06-19" title: "User Login with Github for Discourse" categories: - LINUX - Discourse --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) Go to the [Github Developer page](https://github.com/settings/developers), then look for OAuth Apps in the left menu. Select New OAuth App. Fill in the required fields: ![Using Github Login in Discourse](./Github_Auth_for_Discourse_01.png) Be sure to set the Callback URL to use the path `/auth/github/callback` at your site domain. e.g., `https://forum.instar.com/auth/github/callback`. After creating your application, you should see the Client ID and Client Secret. Enter those in the Discourse fields: ``` github_client_id github_client_secret ``` ![Using Github Login in Discourse](./Github_Auth_for_Discourse_02.png)<file_sep>--- date: "2017-09-06" title: "Proxima Centauri II" categories: - LINUX --- ![<NAME>](./photo-76834w36_4vkhjk89776s_78.png) <!-- TOC --> - [CCProxy](#ccproxy) <!-- /TOC --> Using your Windows 10 PC as a Proxy for your LINUX Laptop or Raspberry Pi's - e.g. your computer is connected to a seperate network that you cannot get access to from your other devices. ## CCProxy Download and install [CCProxy](https://www.youngzsoft.net/ccproxy/proxy-server-download.htm) for Windows: ![CCProxy](./CCProxy_01.png) ![CCProxy](./CCProxy_02.png) Configure your LINUX laptop to use the proxy: ![CCProxy](./CCProxy_03.png)<file_sep>--- date: "2020-06-15" title: "Salty DevOps" categories: - LINUX --- ![Shenzhen, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Execution Modules](#execution-modules) - [The TEST Module](#the-test-module) - [The PKG Module](#the-pkg-module) - [The USER Module](#the-user-module) - [The SERVICE Module](#the-service-module) - [The STATUS Module](#the-status-module) - [The CMD Module](#the-cmd-module) - [The GRAINS Module](#the-grains-module) - [Internal Execution Modules](#internal-execution-modules) - [The CP Module](#the-cp-module) - [The MATCH Module](#the-match-module) - [Minion Invocation](#minion-invocation) <!-- /TOC --> ## Execution Modules The [Salt Module Index](https://docs.saltstack.com/en/latest/salt-modindex.html) lists [all available execution modules](https://docs.saltstack.com/en/latest/ref/modules/all/index.html). To check all loaded modules or functions on a Minion: ```bash sudo salt ubuntuAsus sys.list_modules | less sudo salt ubuntuAsus sys.list_functions | less ``` ### The TEST Module One module that we already used to see if our Minions were up was the [Test Module](https://docs.saltstack.com/en/latest/ref/modules/all/salt.modules.test.html#salt.modules.test.ping). Salt abstracts the OS layer for you. For example the __PKG__ command will automatically choose the correct way to install a software on your minions based on the operating system they are using. Another way to see the module documentation is using the `sys.doc` command: ```bash sudo salt ubuntuAsus sys.doc test.ping test.ping: Used to make sure the minion is up and responding. Not an ICMP ping. Returns ``True``. CLI Example: salt '*' test.ping ``` If you need an overview over all available test functions: ```bash sudo salt ubuntuAsus sys.doc test | less ``` Check installed version of Salt and dependencies: ```bash sudo salt ubuntuAsus test.version sudo salt ubuntuAsus test.versions_report ``` ### The PKG Module ```bash sudo salt ubuntuAsus sys.doc pkg.list_pkgs pkg.list_pkgs: List the packages currently installed in a dict:: {'<package_name>': '<version>'} removed If ``True``, then only packages which have been removed (but not purged) will be returned. purge_desired If ``True``, then only packages which have been marked to be purged, but can't be purged due to their status as dependencies for other installed packages, will be returned. Note that these packages will appear in installed Changed in version 2014.1.1 Packages in this state now correctly show up in the output of this function. CLI Example: salt '*' pkg.list_pkgs salt '*' pkg.list_pkgs versions_as_list=True ``` List all installed packages on a minion: ```bash sudo salt ubuntuAsus pkg.list_pkgs | less sudo salt -L ubuntuAsus,ubuntuBrix pkg.list_pkgs | grep docker ``` Verify a dependency is installed on all minions and install it where still missing: ```bash sudo salt '*' pkg.list_pkgs --out=txt | grep wget | cut -c -10 sudo salt ubuntuAsus pkg.install wget ``` ### The USER Module ```bash sudo salt ubuntuAsus sys.doc user | less ``` List all users on a Minion: ```bash sudo salt ubuntuAsus user.list_users ``` Get Info for a spefic user: ```bash sudo salt ubuntuAsus user.info ubuntu ubuntuAsus: ---------- fullname: ubuntu gid: 1000 groups: - adm - cdrom - dip - lxd - plugdev - sudo - ubuntu home: /home/ubuntu homephone: name: ubuntu other: passwd: x roomnumber: shell: /usr/bin/zsh uid: 1000 workphone: ``` ### The SERVICE Module ```bash sudo salt ubuntuAsus sys.doc service | less ``` Get all running services on Minion: ```bash sudo salt ubuntuAsus service.get_running ``` Restart a service: ```bash sudo salt ubuntuAsus service.restart salt-minion // takes a while to restart sudo salt ubuntuAsus service.status salt-minion ``` ### The STATUS Module ```bash sudo salt ubuntuAsus sys.doc status | less ``` Check logged in users and disk usage: ```bash sudo salt ubuntuAsus status.w sudo salt ubuntuAsus status.diskusage ``` ### The CMD Module ```bash sudo salt ubuntuAsus sys.doc cmd | less ``` List content of directory and read files on Minions: ```bash sudo salt ubuntuAsus cmd.run 'ls -lh /home/ubuntu' sudo salt ubuntuAsus cmd.run 'cat /etc/salt/minion.d/local.conf' ``` ### The GRAINS Module ```bash sudo salt ubuntuAsus sys.doc grains | less ``` Get or set value of a Grain ```bash sudo salt ubuntuAsus grains.get os sudo salt ubuntuAsus grains.get systemd:version sudo salt ubuntuAsus grains.set 'apps:port' 8888 sudo salt ubuntuAsus grains.get apps sudo salt ubuntuAsus grains.setval apps "{'port':'7777'}" sudo salt ubuntuAsus grains.get apps salt '*' grains.filter_by '{Debian: Debheads rule, RedHat: I love my hat}' ``` ## Internal Execution Modules ### The CP Module Copying files between Master and Minion: ```bash sudo salt ubuntuAsus sys.doc cp | less ``` ### The MATCH Module Copying files between Master and Minion: ```bash sudo salt ubuntuAsus sys.doc match | less ``` Return True if the minion matches the given `grain_pcre` target. ```bash sudo salt ubuntuAsus match.grain_pcre 'os:Ubuntu.*' ``` ## Minion Invocation The `salt-call` function can be used to trigger the Salt API from the Minion server: ```bash sudo salt-call test.ping local: True ``` This can be used to debug a Salt module directly on your Minion: ```bash sudo salt-call network.netstat -l debug [DEBUG ] Reading configuration from /etc/salt/minion [DEBUG ] Including configuration from '/etc/salt/minion.d/_schedule.conf' [DEBUG ] Reading configuration from /etc/salt/minion.d/_schedule.conf [DEBUG ] Including configuration from '/etc/salt/minion.d/local.conf' [DEBUG ] Reading configuration from /etc/salt/minion.d/local.conf [DEBUG ] Configuration file path: /etc/salt/minion [WARNING ] Insecure logging configuration detected! Sensitive data may be logged. [DEBUG ] Grains refresh requested. Refreshing grains. [DEBUG ] Reading configuration from /etc/salt/minion [DEBUG ] Including configuration from '/etc/salt/minion.d/_schedule.conf' [DEBUG ] Reading configuration from /etc/salt/minion.d/_schedule.conf [DEBUG ] Including configuration from '/etc/salt/minion.d/local.conf' [DEBUG ] Reading configuration from /etc/salt/minion.d/local.conf [DEBUG ] Elapsed time getting FQDNs: 0.017540931701660156 seconds [DEBUG ] Loading static grains from /etc/salt/grains [DEBUG ] LazyLoaded zfs.is_supported [DEBUG ] Connecting to master. Attempt 1 of 1 [DEBUG ] Master URI: tcp://192.168.2.110:4506 [DEBUG ] Initializing new AsyncAuth for ('/etc/salt/pki/minion', 'ubuntuAsus', 'tcp://192.168.2.110:4506') [DEBUG ] Generated random reconnect delay between '1000ms' and '11000ms' (10666) [DEBUG ] Setting zmq_reconnect_ivl to '10666ms' [DEBUG ] Setting zmq_reconnect_ivl_max to '11000ms' [DEBUG ] Initializing new AsyncZeroMQReqChannel for ('/etc/salt/pki/minion', 'ubuntuAsus', 'tcp://192.168.2.110:4506', 'clear') [DEBUG ] Connecting the Minion to the Master URI (for the return server): tcp://192.168.2.110:4506 [DEBUG ] Trying to connect to: tcp://192.168.2.110:4506 [DEBUG ] salt.crypt.get_rsa_pub_key: Loading public key [DEBUG ] Decrypting the current master AES key [DEBUG ] salt.crypt.get_rsa_key: Loading private key [DEBUG ] salt.crypt._get_key_with_evict: Loading private key [DEBUG ] Loaded minion key: /etc/salt/pki/minion/minion.pem [DEBUG ] salt.crypt.get_rsa_pub_key: Loading public key [DEBUG ] Closing AsyncZeroMQReqChannel instance [DEBUG ] Connecting the Minion to the Master publish port, using the URI: tcp://192.168.2.110:4505 [DEBUG ] salt.crypt.get_rsa_key: Loading private key [DEBUG ] Loaded minion key: /etc/salt/pki/minion/minion.pem [DEBUG ] Determining pillar cache [DEBUG ] Initializing new AsyncZeroMQReqChannel for ('/etc/salt/pki/minion', 'ubuntuAsus', 'tcp://192.168.2.110:4506', 'aes') [DEBUG ] Initializing new AsyncAuth for ('/etc/salt/pki/minion', 'ubuntuAsus', 'tcp://192.168.2.110:4506') [DEBUG ] Connecting the Minion to the Master URI (for the return server): tcp://192.168.2.110:4506 [DEBUG ] Trying to connect to: tcp://192.168.2.110:4506 [DEBUG ] salt.crypt.get_rsa_key: Loading private key [DEBUG ] Loaded minion key: /etc/salt/pki/minion/minion.pem [DEBUG ] Closing AsyncZeroMQReqChannel instance [DEBUG ] LazyLoaded jinja.render [DEBUG ] LazyLoaded yaml.render [DEBUG ] LazyLoaded platform.is_windows [DEBUG ] LazyLoaded network.netstat [DEBUG ] LazyLoaded direct_call.execute [DEBUG ] LazyLoaded path.which [DEBUG ] LazyLoaded cmd.run [INFO ] Executing command 'netstat -tulpnea' in directory '/root' [DEBUG ] stdout: Active Internet connections (servers and established) Proto Recv-Q Send-Q Local Address Foreign Address State User Inode PID/Program name tcp 0 0 127.0.0.1:1338 0.0.0.0:* LISTEN 0 39195 780/containerd tcp 0 0 127.0.0.1:10248 0.0.0.0:* LISTEN 0 41215 790/kubelet tcp 0 0 0.0.0.0:25000 0.0.0.0:* LISTEN 0 39187 1668/python3 tcp 0 0 127.0.0.1:10249 0.0.0.0:* LISTEN 0 40430 791/kube-proxy tcp 0 0 127.0.0.1:10251 0.0.0.0:* LISTEN 0 39190 800/kube-scheduler tcp 0 0 127.0.0.1:10252 0.0.0.0:* LISTEN 0 37783 787/kube-controller tcp 0 0 127.0.0.1:2380 0.0.0.0:* LISTEN 0 37379 788/etcd tcp 0 0 127.0.0.1:10256 0.0.0.0:* LISTEN 0 40428 791/kube-proxy [DEBUG ] Initializing new AsyncZeroMQReqChannel for ('/etc/salt/pki/minion', 'ubuntuAsus', 'tcp://192.168.2.110:4506', 'aes') [DEBUG ] Initializing new AsyncAuth for ('/etc/salt/pki/minion', 'ubuntuAsus', 'tcp://192.168.2.110:4506') [DEBUG ] Connecting the Minion to the Master URI (for the return server): tcp://192.168.2.110:4506 [DEBUG ] Trying to connect to: tcp://192.168.2.110:4506 [DEBUG ] Closing AsyncZeroMQReqChannel instance [DEBUG ] LazyLoaded nested.output ```<file_sep>--- date: "2017-10-03" title: "Search Engine Setup and Configuration" categories: - LINUX - Elasticsearch --- ![Search Engine Setup and Configuration](./photo-34139903180_fd0c397abc_o.jpg) <!-- TOC --> - [01 Search Engine Setup and Configuration](#01-search-engine-setup-and-configuration) - [Installing Elasticsearch 6.x on CentOS](#installing-elasticsearch-6x-on-centos) - [Import the Elasticsearch PGP Key](#import-the-elasticsearch-pgp-key) - [Installing from the RPM repository](#installing-from-the-rpm-repository) - [Running Elasticsearch with _systemd_](#running-elasticsearch-with-_systemd_) - [Checking that Elasticsearch is running](#checking-that-elasticsearch-is-running) - [Configuring Elasticsearch](#configuring-elasticsearch) - [Installing Kibana 6.x on CentOS](#installing-kibana-6x-on-centos) - [Running Kibana with _systemd_](#running-kibana-with-_systemd_) - [Install X-Pack](#install-x-pack) - [Elasticsearch Security](#elasticsearch-security) - [Kibana Security](#kibana-security) - [Enabling Anonymous Access](#enabling-anonymous-access) <!-- /TOC --> ## 01 Search Engine Setup and Configuration ### Installing Elasticsearch 6.x on CentOS Elasticsearch is a distributed, JSON-based search and analytics engine designed for horizontal scalability, maximum reliability, and easy management. #### Import the Elasticsearch PGP Key ``` rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch ``` ### Installing from the RPM repository Create a file called elasticsearch.repo in the _/etc/yum.repos.d/_ directory and add the following lines: ``` [elasticsearch-6.x] name=Elasticsearch repository for 6.x packages baseurl=https://artifacts.elastic.co/packages/6.x/yum gpgcheck=1 gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch enabled=1 autorefresh=1 type=rpm-md ``` And your repository is ready for use. You can now install Elasticsearch with one of the following commands: ``` sudo yum install elasticsearch ``` #### Running Elasticsearch with _systemd_ To configure Elasticsearch to start automatically when the system boots up, run the following commands: ``` sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable elasticsearch.service ``` Apparently there is no way to quietly reload the Elasticsearch service after changing the config file - you will be required to stop and restart instead: ``` sudo systemctl stop elasticsearch.service sudo systemctl start elasticsearch.service ``` These commands provide no feedback as to whether Elasticsearch was started successfully or not. Instead, this information will be written in the log files located in /var/log/elasticsearch/. #### Checking that Elasticsearch is running You can test that your Elasticsearch node is running by sending an HTTP request to port 9200 on localhost: ``` curl -XGET 'localhost:9200/?pretty' ``` ``` http://localhost:9200/_cat/indices?v&pretty ``` #### Configuring Elasticsearch Elasticsearch loads its configuration from the _/etc/elasticsearch/elasticsearch.yml_ file by default. Examples: * __cluster.name:__ e.g. _instar-wiki_ * __node.name__ e.g. _c21_ * __node.attr.rack:__ e.g _r44_ * __path.data:__ _/path/to/data_ * __path.logs:__ _/path/to/logs_ * __network.host:__ _localhost_ [see config](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-network.html#network-interface-values) __*__ * __http.port:__ _9200_ * __http.cors:__ _enabled:_ true , _allow-origin:_ /https?:\/\/localhost(:[0-9]+)?/, _allow-origin:_ /https?:\/\/localhost(:[0-9][0-9][0-9][0-9])?/ * __*__ _e.g. network.host: 127.0.0.1, 192.168.1.200, 172.16.31.10_ The RPM places config files, logs, and the data directory in the appropriate locations for an RPM-based system: | | | | |---|---|---|---| | __Type__ | __Description__ | __Default Location__ | __Setting__ | | home | Elasticsearch home directory or $ES_HOME | _/usr/share/elasticsearch_ | | | bin | Binary scripts including elasticsearch to start a node and elasticsearch-plugin to install plugins | _/usr/share/elasticsearch/bin_ | | | conf | Configuration files including elasticsearch.yml | _/etc/elasticsearch_ | ES_PATH_CONF | | conf | Environment variables including heap size, file descriptors. | _/etc/sysconfig/elasticsearch_ | | | data | The location of the data files of each index / shard allocated on the node. Can hold multiple locations. | _/var/lib/elasticsearch_ | path.data | | logs | Log files location. | _/var/log/elasticsearch_ | path.logs | | plugins | Plugin files location. Each plugin will be contained in a subdirectory. | _/usr/share/elasticsearch/plugins_ | | ### Installing Kibana 6.x on CentOS Kibana gives shape to your data and is the extensible user interface for configuring and managing all aspects of the Elastic Stack. Create a file called kibana.repo in the _/etc/yum.repos.d/_ directory and add the following lines: ``` [kibana-6.x] name=Kibana repository for 6.x packages baseurl=https://artifacts.elastic.co/packages/6.x/yum gpgcheck=1 gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch enabled=1 autorefresh=1 type=rpm-md ``` And your repository is ready for use. You can now install Kibana with one of the following command: ``` sudo yum install kibana ``` #### Running Kibana with _systemd_ To configure Kibana to start automatically when the system boots up, run the following commands: ``` sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable kibana.service ``` Kibana can be started and stopped as follows: ``` sudo systemctl stop kibana.service sudo systemctl start kibana.service ``` These commands provide no feedback as to whether Kibana was started successfully or not. Instead, this information will be written in the log files located in _/var/log/kibana/_. Kibana loads its configuration from the _/etc/kibana/kibana.yml_ file by default. Examples: * __elasticsearch.url:__ Default: _http://localhost:9200_ The URL of the Elasticsearch instance to use for all your queries. * __server.port:__ Server port for the Kibana web UI - _default 5601_ * __server.host:__ Specifies the address to which the Kibana server will bind. IP addresses and host names are both valid values. The default is _localhost_, which usually means remote machines will not be able to connect. To allow connections from remote users, set this parameter to a non-loopback address. * __console.enabled:__ Default: true Set to false to disable Console. * __elasticsearch.username:__ s. below * __elasticsearch.password:__ If your Elasticsearch is protected with basic authentication, these settings provide the username and password that the Kibana server uses to perform maintenance on the Kibana index at startup. Your Kibana users still need to authenticate with Elasticsearch, which is proxied through the Kibana server. (see X-Pack below) * __server.ssl.enabled:__ Default: "false" Enables SSL for outgoing requests from the Kibana server to the browser. When set to true, server.ssl.certificate and server.ssl.key are required * __server.ssl.certificate:__ s. below * __server.ssl.key:__ Paths to the PEM-format SSL certificate and SSL key files, respectively. * __server.ssl.certificateAuthorities:__ List of paths to PEM encoded certificate files that should be trusted. * __server.ssl.cipherSuites:__ Default: _ECDHE-RSA-AES128-GCM-SHA256, ECDHE-ECDSA-AES128-GCM-SHA256, ECDHE-RSA-AES256-GCM-SHA384, ECDHE-ECDSA-AES256-GCM-SHA384, DHE-RSA-AES128-GCM-SHA256, ECDHE-RSA-AES128-SHA256, DHE-RSA-AES128-SHA256, ECDHE-RSA-AES256-SHA384, DHE-RSA-AES256-SHA384, ECDHE-RSA-AES256-SHA256, DHE-RSA-AES256-SHA256, HIGH,!aNULL, !eNULL, !EXPORT, !DES, !RC4, !MD5, !PSK, !SRP, !CAMELLIA_. Details on the format, and the valid options, are available via the [OpenSSL cipher list format documentation](https://www.openssl.org/docs/man1.0.2/apps/ciphers.html#CIPHER-LIST-FORMAT) * __server.ssl.keyPassphrase:__ The passphrase that will be used to decrypt the private key. This value is optional as the key may not be encrypted. * __server.ssl.redirectHttpFromPort:__ Kibana will bind to this port and redirect all http requests to https over the port configured as server.port. * __server.ssl.supportedProtocols:__ _Default_: TLSv1, TLSv1.1, TLSv1.2 Supported protocols with versions. Valid protocols: TLSv1, TLSv1.1, TLSv1.2 * __status.allowAnonymous:__ Default: false If authentication is enabled, setting this to true allows unauthenticated users to access the Kibana server status API and status page. | Type | Description | Default Location | |---|---|---| | home | Kibana home directory or $KIBANA_HOME | _/usr/share/kibana_ | | bin | Binary scripts including kibana to start the Kibana server and kibana-plugin to install plugins | _/usr/share/kibana/bin_ | | config | Configuration files including kibana.yml | _/etc/kibana_ | | data | The location of the data files written to disk by Kibana and its plugins | _/var/lib/kibana_ | | optimize | Transpiled source code. Certain administrative actions (e.g. plugin install) result in the source code being retranspiled on the fly. | _/usr/share/kibana/optimize_ | | plugins | Plugin files location. Each plugin will be contained in a subdirectory. | _/usr/share/kibana/plugins_ | ### Install X-Pack X-Pack is a single extension that integrates handy features — security, alerting, monitoring, reporting, graph exploration, and machine learning — you can trust across the Elastic Stack. #### Elasticsearch Security We need to add a user athentication to our Elasticsearch / Kibana setup. We will do this by installing X-Pack. To get started with installing the Elasticsearch plugin, go to _/etc/elasticsearch/_ and call the following function: ``` bin/elasticsearch-plugin install x-pack ``` Now restart Elasticsearch: ``` sudo systemctl stop elasticsearch.service sudo systemctl start elasticsearch.service ``` You can either use the auto function to generate user passwords for Elasticsearch, Kibana (and the not yet installed Logstash): ``` bin/x-pack/setup-passwords auto ``` or swap the _auto_ flag with _interactive_ to use your own user logins. The auto output will look something like this: ``` Changed password for user kibana PASSWORD kibana = *&$*(<PASSWORD> Changed password for user logstash_system PASSWORD logstash_system = <PASSWORD> Changed password for user elastic PASSWORD elastic = jgfisg)#*%&(@*#) ``` __Now every interaction with Elasticsearch or Kibana will require you to authenticate with _username: elastic_ and _password: <PASSWORD>)#*%&(@*#)___ #### Kibana Security Now we repeat these steps with Kibana. First navigate to _/etc/kibana/_ and call the following function: ``` bin/kibana-plugin install x-pack ``` And we have to add the login that Kibana has to use to access Elasticsearch (auto generated above) to the _kibana.yml_ file in _/etc/kibana/_: ``` elasticsearch.username: "kibana" elasticsearch.password: <PASSWORD>" ``` Now restart Kibana: ``` sudo systemctl stop kibana.service sudo systemctl start kibana.service ``` Now navigate your browser _http://localhost:5601/_ and login with the "elastic" user we generated above. ### Enabling Anonymous Access Incoming requests are considered to be anonymous if no authentication token can be extracted from the incoming request. By default, anonymous requests are rejected and an authentication error is returned (status code 401). To allow anonymous user to send search queries (Read access to specified indices), we need to add the following lines to the _elasticsearch.yml_ file in _/etc/elasticsearch/_: ``` xpack.security.authc: anonymous: username: anonymous_user roles: wiki_reader authz_exception: true ``` Now we have to switch to the Kibana webUI on _http://localhost:5601/_ and create the _role:_ *wiki_reader* to allow read access to the wiki indices. First switch to the __Management__ tab and click on user: ![Add a Elasticsearch User with Read Access](./kibana_01.png) Then click on __Add a User__ and add a user with the __watcher_user__ role: ![Add a Elasticsearch User with Read Access](./kibana_02.png) Switch back to the __Management__ tab and click on role: ![Add a Elasticsearch User with Read Access](./kibana_03.png) Click on __Create Role__ and add the name **wiki_reader** that we choose for the role of the anonymous user inside the elasticsearch.yml file, assign the **monitor_watcher** privilege and choose the indices that you want the anonymous user to have __READ__ access to: ![Add a Elasticsearch User with Read Access](./kibana_04.png) Your configuration will be active after restarting Elasticsearch. Now you can use webservices to read from your ES database. But only the __elastic__ user has the privileg to __WRITE__ and to work in Kibana.<file_sep>--- date: "2017-07-24" title: "React TODO List" categories: - Javascript - React --- ![Shenzhen, China](./photo-33796026903_2023fedff3_o.jpg) Source Code [Github](https://github.com/mpolinowski/obligatory-react-todo-list-2017) Source [Medium](https://medium.com/codingthesmartway-com-blog/the-2017-react-development-starter-guide-f717e4e13de7) Every web-dev should have one or two of them on Github ~ <!-- TOC --> - [01 create-react-app](#01-create-react-app) - [02 Set Initial State](#02-set-initial-state) - [03 JSX, Font-Awesome and Bootstrap](#03-jsx-font-awesome-and-bootstrap) - [04 Add Remove TODO function](#04-add-remove-todo-function) - [05 Add a Add TODO function](#05-add-a-add-todo-function) <!-- /TOC --> ![React TODO List](./todolist_02.png) ## 01 create-react-app n the following tutorial we’ll use create-react-app to bootstrap our application. It’s an opinionated zero-configuration starter kit for React introduced by Facebook in 2016. We can install create-react-app by using npm: ```bash npm install -g create-react-app ``` Having completed the installation successfully we're able to use create-react-app to initiate a new React project: ```bash create-react-app obligatory-react-todo-list-2017 ``` This creates a new initial React project in the folder obligatory-react-todo-list-2017. Dependencies are installed automatically. Change into the folder and start the app with **npm start** on *localhost:3000*. ## 02 Set Initial State Now open the file ./src/App.js inside your code editor and add some Todo's right below the import statements (delete the <App /> component, that was created below): ```js // add initial data model array var todos = [ { todoTitle: 'Do some coding', todoResponsible: 'Me', todoDescription: 'Todo description', todoPriority: 'medium' }, { todoTitle: 'Drink Coffee', todoResponsible: 'Me', todoDescription: 'Todo description', todoPriority: 'high' }, { todoTitle: 'Do some more coding', todoResponsible: 'Me', todoDescription: 'Todo description', todoPriority: 'low' } ] ``` Now add the todos array to the state of app component. This is done by introducing a class constructor where we can set the initial component state like you can see in the following: ```js class App extends Component { // set initial component state to todos array constructor(props) { super(props); this.state = { todos }; } [...] } ``` ## 03 JSX, Font-Awesome and Bootstrap We want to use Bootstrap CSS for our rendered app, which we include via CDN links (see [getbootstrap.com](http://getbootstrap.com/)) inside the public/index.html page. ```html <head> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous"> </head> <body> <script src="https://code.jquery.com/jquery-3.2.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.11.0/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script> </body> ``` Now we can use bootstrap classNames directly inside the render statement of our <App /> component in src/app.js. For Font Awesome, we download the zip archive from [fontawesome.io](http://fontawesome.io) and copy only the \*.css and web-font files to src/fonts. All web-font files are referenced inside font-awesome.css - the relative path has to be changed from ../fonts/[filename] to ./[filename]! Font-Awesome can now be added to our JSX code inside the render method - but remember to change class to className! ```html <i className="fa fa-user-circle-o" aria-hidden="true"></i> ``` Our render function now looks like this, giving us a beautiful Bootstrap UI with some Font-Awesome goodness: ```js render() { return ( <div className="container"> <nav className="navbar fixed-top navbar-dark bg-dark"> <img src={logo} className="App-logo" alt="logo" /> <h4 className="navbar-brand"> Todo Count: <span className="badge badge-pill badge-primary">{this.state.todos.length}</span> </h4> </nav> <div className="row mt-5"> <div className="col"> <ul className="list-group"> { this.state.todos.map((todo, index) => <li className="list-group-item" key={index}> <h4 className="list-group-item-heading">{todo.todoTitle} <small><span className="badge badge-secondary">{todo.todoPriority}</span></small></h4> <p><i className="fa fa-user-circle-o" aria-hidden="true"></i> {todo.todoResponsible}</p> <p className="text-justify">{todo.todoDescription}</p> <button className="btn btn-danger btn-sm float-right" onClick={this.handleRemoveTodo.bind(this, index)}><span><i className="fa fa-trash-o" aria-hidden="true"></i></span>&nbsp;&nbsp; Delete</button> </li> )} </ul> </div> </div> </div> ); } ``` The app should automatically reload inside of your browser and display the basic bootstrap layout of our app, using the data from the todos-array: ![React TODO List](./todolist_01.png) ## 04 Add Remove TODO function Now we want to add a Delete function to the Delete button we added above. We do this, by adding an onClick event handler to the button: ```html <button className="btn btn-danger btn-sm float-right" onClick={this.handleRemoveTodo.bind(this, index)}> <span> <i className="fa fa-trash-o" aria-hidden="true"></i> </span>&nbsp;&nbsp; Delete</button> ``` Then we have to define the handleRemoveTodo function inside src/App.js above the render method of <App />: ```js handleRemoveTodo(index) { this.setState({ todos: this.state.todos.filter(function(e, i) { return i !== index; }) }) } ``` ## 05 Add a Add TODO function For now we just want to add a method to add TODOs to our list - so we create a new function below <App /> called <TodoInput /> ```js class TodoInput extends Component { constructor(props) { super(props); this.state = { todoTitle: '', todoResponsible: '', todoDescription: '', todoPriority: 'lowest' } } render() { return ( <div className="col"> <br/><br/><br/> <h4>Add New Todo</h4><br/> <form onSubmit={this.handleSubmit}> <div className="form-group"> <input name="todoTitle" type="text" className="form-control" id="inputTodoTitle" value={this.state.todoTitle} onChange={this.handleInputChange} aria-describedby="Todo Title" placeholder="Enter Title"></input> </div> <div className="form-group"> <label htmlFor="inputTodoPriority" className="control-label text-muted"><small>Priority</small></label> <select name="todoPriority" type="text" className="form-control" id="inputTodoPriority" value={this.state.todoPriority} onChange={this.handleInputChange} aria-describedby="Todo Priority"> <option>lowest</option> <option>low</option> <option>medium</option> <option>high</option> <option>emergency</option> </select><br/> </div> <div className="form-group"> <label htmlFor="inputTodoDescription" className="control-label text-muted"><small>Description</small></label> <textarea name="todoDescription" type="text" className="form-control" id="inputTodoDescription" value={this.state.todoDescription} onChange={this.handleInputChange} aria-describedby="Todo Description"></textarea> </div> <div className="form-group"> <label htmlFor="inputTodoResponsible" className="control-label text-muted"><small>Responsible</small></label> <select name="todoResponsible" type="text" className="form-control" id="inputTodoResponsible" value={this.state.todoResponsible} onChange={this.handleInputChange} aria-describedby="Todo Responsible"> <option>someone else</option> <option><NAME></option> <option>Micro Aggressions</option> <option><NAME></option> <option>Climate Change</option> </select><br/> </div> <div className="form-group"> <button type="submit" className="btn btn-primary float-right">Add Todo</button> </div> </form> </div> ) } } ``` Now we have to define handleInputChange and handleSubmit above the render method: ```js handleInputChange(event) { const target = event.target; const value = target.value; const name = target.name; this.setState({ [name]: value }) } handleSubmit(event) { event.preventDefault(); this.props.onAddTodo(this.state); this.setState({ todoTitle: '', todoResponsible: '', todoDescription: '', todoPriority: 'lowest' }) } ``` And bind **this** to those functions inside the constructor - so we get access to the state of **todos**: ```js constructor(props) { super(props); this.state = { todoTitle: '', todoResponsible: '', todoDescription: '', todoPriority: 'lowest' } this.handleInputChange = this.handleInputChange.bind(this); this.handleSubmit = this.handleSubmit.bind(this); } ``` Now we just need to pass down the state of todo via props in <App />: ```js <TodoInput onAddTodo={this.handleAddTodo}/> ``` Define the handleAddTodo method above the render call: ```js handleAddTodo(todo) { this.setState({todos: [...this.state.todos, todo]}); ``` And bind **this** inside the constructor: ```js constructor(props) { super(props); this.state = { todos }; this.handleAddTodo = this.handleAddTodo.bind(this); } ```<file_sep>--- date: "2019-09-14" title: "Magento 2 Manual Theme Installation" categories: - LINUX - Magento --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Download and Unzip your Theme](#download-and-unzip-your-theme) - [Theme Installation](#theme-installation) - [Importing Demo SQL Data](#importing-demo-sql-data) - [Lot's of Problems on the way...](#lots-of-problems-on-the-way) - [How to clear out a manually installed theme ?](#how-to-clear-out-a-manually-installed-theme) - [Error Collection](#error-collection) <!-- /TOC --> ## Download and Unzip your Theme After unzipping your theme, you will find a folder named __themeXXXX_. Upload all folders from this directory into your Magento root directory - e.g. `/var/www/html/magento`. In my case the template only had a folder `data`: ![Magento Theme Installation](./Magento_Themes_Installation_01.png) Next switch to `sources/sample_data` and copy the folder `pub` to the Magento root directory: ![Magento Theme Installation](./Magento_Themes_Installation_02.png) Make sure that all files that you uploaded will be accessible by the default user, e.g. `www-data`: ```bash chown -R www-data:www-data . ``` ## Theme Installation 1. Run Upgrade: ```bash php bin/magento setup:upgrade php bin/magento setup:di:compile php bin/magento setup:static-content:deploy -f ``` > If you are not running the Magento scripts with the `www-data` user, make sure to change the owner of every folder and file inside `var` and `pub` after every recompile: ``` chown -R www-data:www-data ./var chown -R www-data:www-data ./pub ``` ![Magento Theme Installation](./Magento_Themes_Installation_06.png) >NOTE: Manual static content deployment is not required in "default" and "developer" modes. In "default" and "developer" modes static contents are being deployed automatically on demand. If you still want to deploy in these modes, use -f option: `bin/magento setup:static-content:deploy -f`. 1. Setup the new Theme: On the Admin sidebar, __Content__ > Design > __Configuration__ > __Select your current theme__ > : ![Magento Theme Installation](./Magento_Themes_Installation_04.png) 3. Click on __Edit__ to select your theme: ![Magento Theme Installation](./Magento_Themes_Installation_05.png) ## Importing Demo SQL Data To use the sample SQL DATA that comes with your template, you have to start with a blank database. First create a backup of your database to be able to roll it back later: ```bash mysqldump -u username -p database_name > file.sql ``` Delete all tables from the database - or delete the db and recreate. Then import the sample data from your template. Bow download a fresh version of Magento and run the init script - make sure that `--db-name` is set to the database you just prepared above: ```bash bin/magento setup:install \ --base-url=http://mysite.dev \ --db-host=localhost \ --db-name=magento \ --db-user=magento \ --db-password=<PASSWORD> \ --admin-firstname=admin \ --admin-lastname=admin \ --admin-email=<EMAIL> \ --admin-user=admin \ --admin-password=<PASSWORD> \ --language=en_US \ --currency=USD \ --timezone=America/Chicago \ --use-rewrites=1 ``` Now go through the regular setup through the Magento Admin panel. The Theme should already be selected and ready to be configured. If your run into the following issue when trying to load the frontend: ```bash 1 exception(s): Exception #0 (Magento\Framework\Exception\LocalizedException): Unable to find a physical ancestor for a theme 'Magetigue'. ``` ![Magento2](./Magento_Themes_Installation_10.png) Make sure that `type` for the theme is set to `0`: ```sql SELECT * FROM theme; UPDATE theme SET type = 0 WHERE theme_id = 4; ``` ## Lot's of Problems on the way... The original theme that I was using did not work... all just went downhill... Error message by error message. I found that the theme supports PHP up to v7.0.4 __but not__ v7.1. As I am using PHP v7.2 this probably is the issue here. The free theme I downloaded above was __labeled Magento 2.3 compatible__ and __worked right away__. ### How to clear out a manually installed theme ? 1. File Operations Delete the content from `app/design/frontend/<Vendor>`. You might also find files in `var/view_preprocessed` & `pub/static/frontend`. 2. DB Operations Go to your theme table and delete the entry for your created theme. ```sql mysql -u magento -p SHOW databases; USE magento; SHOW tables; DESCRIBE theme; SELECT * FROM theme; DELETE FROM theme WHERE theme_id=4 EXIT; ``` ![Magento Theme Installation](./Magento_Themes_Installation_07.png) The template was stored under `theme_id=4` in the theme table inside the Magento database. 3. Flush cache ```bash php bin/magento cache:flush ``` 4. Recompile ```bash rm -rf generated/* var/* cache/* php bin/magento setup:di:compile php bin/magento setup:static-content:deploy -f ``` 5. CleanUP I am still getting an error message when trying to access the storefront: ![Magento Theme Installation](./Magento_Themes_Installation_08.png) If the theme has a configuration in Magento Admin, then it is important to delete all references to the theme in table `eav_attribute`: ```sql SELECT * FROM eav_attribute; DELETE FROM eav_attribute WHERE attribute_id=151; ``` ![Magento Theme Installation](./Magento_Themes_Installation_09.png) A quick re-compile, deploy and flush later everything was up and running... ```bash rm -rf generated/* var/* cache/* php bin/magento setup:di:compile php bin/magento setup:static-content:deploy -f php bin/magento cache:flush chown -R www-data:www-data . ``` ### Error Collection _If you see these error messages_: ``` ReflectionException: Class Magento\Framework\App\Http\Interceptor does not exist in /var/www/html/magento/vendor/magento/framework/Code/Reader/ClassReader.php:19 Stack trace: #0 /var/www/html/magento/vendor/magento/framework/Code/Reader/ClassReader.php(19): ReflectionClass->__construct('Magento\\Framewo...') #1 /var/www/html/magento/vendor/magento/framework/ObjectManager/Definition/Runtime.php(49): Magento\Framework\Code\Reader\ClassReader->getConstructor('Magento\\Framewo...') #2 /var/www/html/magento/vendor/magento/framework/ObjectManager/Factory/Dynamic/Developer.php(48): Magento\Framework\ObjectManager\Definition\Runtime->getParameters('Magento\\Framewo...') #3 /var/www/html/magento/vendor/magento/framework/ObjectManager/ObjectManager.php(56): Magento\Framework\ObjectManager\Factory\Dynamic\Developer->create('Magento\\Framewo...', Array) #4 /var/www/html/magento/vendor/magento/framework/App/Bootstrap.php(235): Magento\Framework\ObjectManager\ObjectManager->create('Magento\\Framewo...', Array) #5 /var/www/html/magento/pub/index.php(39): Magento\Framework\App\Bootstrap->createApplication('Magento\\Framewo...') #6 {main} ``` ![Magento Theme Installation](./Magento_Themes_Installation_03.png) Clear all directories and files under Magento’s var subdirectory and install the Magento software again. As the Magento file system owner or as a user with root privileges, enter the following commands: ```bash cd <your Magento install directory>/var rm -rf cache/* di/* generation/* page_cache/* ``` _And more problems_: ``` 1 exception(s): Exception #0 (Magento\Framework\Config\Dom\ValidationException): Element 'block', attribute 'type': The attribute 'type' is not allowed. Line: 1001 Element 'block', attribute 'type': The attribute 'type' is not allowed. Line: 1011 Element 'block', attribute 'type': The attribute 'type' is not allowed. Line: 1020 Element 'block', attribute 'type': The attribute 'type' is not allowed. Line: 1027 Element 'block', attribute 'type': The attribute 'type' is not allowed. Line: 1034 Element 'block', attribute 'type': The attribute 'type' is not allowed. Line: 1042 ``` This is [can be caused](https://magento.stackexchange.com/questions/199151/magento-2-2-0-upgrade-error-attribute-type-is-not-allowed) by themes that use `<block type="..."` instead of `<block class="..."`. I found type declarations in the footer part of the theme and deleted them. Also check all custom blog layout files in your theme (app/code/design/ThemeVendor/themename/Magefan_Blog/layout), they may have a code e.g.: ``` class="\Magento ``` you need to change it to ``` class="Magento ``` Then flush Magento cache (Magento Admin Panel __System__ > __Cache Management__ > __Flush Magento Cache__). This solved the problem above but had me ending up with new errors: ``` 1 exception(s): Exception #0 (Exception): Notice: Undefined index: src in /var/www/html/magento/vendor/magento/framework/View/Page/Config/Generator/Head.php on line 126 Exception #0 (Exception): Notice: Undefined index: src in /var/www/html/magento/vendor/magento/framework/View/Page/Config/Generator/Head.php on line 126 ``` This error refers to: ```php protected function processAssets(Structure $pageStructure) { foreach ($pageStructure->getAssets() as $name => $data) { if (isset($data['src_type']) && in_array($data['src_type'], $this->remoteAssetTypes)) { if ($data['src_type'] === self::SRC_TYPE_CONTROLLER) { $data['src'] = $this->url->getUrl($data['src']); } $this->pageConfig->addRemotePageAsset( $data['src'], isset($data['content_type']) ? $data['content_type'] : self::VIRTUAL_CONTENT_TYPE_LINK, $this->getAssetProperties($data), $name ); } else { $this->pageConfig->addPageAsset($name, $this->getAssetProperties($data)); } } return $this; } ``` And a problem with the compilation: ```bash php bin/magento setup:di:compile Compilation was started. Interception cache generation... 6/7 [========================>---] 85% 35 secs 365.0 MiBErrors during compilation: TemplateMonster\Megamenu\Plugin\Block\Topmenu Incompatible argument type: Required type: \Magento\Catalog\Model\ResourceModel\Category\StateDependentCollectionFactory. Actual type: \Magento\Catalog\Model\ResourceModel\Category\CollectionFactory; File: /var/www/html/magento/app/code/TemplateMonster/Megamenu/Plugin/Block/Topmenu.php Total Errors Count: 1 In Log.php line 92: Error during compilation setup:di:compile ``` <file_sep>--- date: "2019-09-22" title: "Installing Oh-My-Zsh on CentOS8" categories: - LINUX --- ![Central, Hong Kong](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Install ZSH](#install-zsh) - [Make it as Default Shell](#make-it-as-default-shell) - [If chsh not found](#if-chsh-not-found) - [Verify](#verify) - [Install Oh-My-Zsh Framework](#install-oh-my-zsh-framework) - [Change Oh-My-Zsh Themes](#change-oh-my-zsh-themes) - [Install external Themes](#install-external-themes) <!-- /TOC --> In order for [Oh-My-Zsh](https://ohmyz.sh) to work on the system ,`zsh` need to be installed. ## Install ZSH ```bash sudo yum update && sudo yum -y install zsh ``` ## Make it as Default Shell ```bash chsh -s (which zsh) root ``` ### If chsh not found ```bash chsh -s /bin/zsh root -bash: chsh: command not found ``` ```bash yum install util-linux-user ``` ### Verify Logout from the root user and login again to verify the shell ```bash [root@CentOS8 ~]# su [root@CentOS8]~# echo $SHELL /bin/zsh ``` ## Install Oh-My-Zsh Framework Once Z Shell is installed and confirmed install the Framework from the GIT ```bash yum install wget git wget https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh -O - | zsh ``` Oh-My-Zsh will be installed in the home directory `~/.oh-my-zsh`.Copy the zshrc.zsh-template config file to the home directory .zshrc and apply the config: ```bash cp ~/.oh-my-zsh/templates/zshrc.zsh-template ~/.zshrc source ~/.zshrc ``` ### Change Oh-My-Zsh Themes Oh-My-Zsh framework [provides large number of themes](https://github.com/ohmyzsh/ohmyzsh/wiki/Themes). You can change the default themes by editing the config file .zshrc which will be under `./oh-my-zsh/themes/` Set ZSH_THEME to the name of the theme in your ~/.zshrc and run `su` to activate it: ```bash # If you come from bash you might have to change your $PATH. # export PATH=$HOME/bin:/usr/local/bin:$PATH # Path to your oh-my-zsh installation. export ZSH=$HOME/.oh-my-zsh # Set name of the theme to load --- if set to "random", it will # load a random theme each time oh-my-zsh is loaded, in which case, # to know which specific one was loaded, run: echo $RANDOM_THEME # See https://github.com/ohmyzsh/ohmyzsh/wiki/Themes ZSH_THEME="agnoster" ... ``` ### Install external Themes see [external Themes](https://github.com/ohmyzsh/ohmyzsh/wiki/External-themes). __Example__: [PowerLevel10k](https://github.com/romkatv/powerlevel10k#oh-my-zsh) ```bash git clone --depth=1 https://github.com/romkatv/powerlevel10k.git $ZSH_CUSTOM/themes/powerlevel10k ``` Set `ZSH_THEME="powerlevel10k/powerlevel10k"` in `~/.zshrc`. ```bash su ```<file_sep>--- date: "2019-09-11" title: "Magento 2 Installation with NGINX on Debian Buster" categories: - NGINX - LINUX - Magento --- ![<NAME>, Cambodia](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Install PHP 7.3](#install-php-73) - [Set the Timezone](#set-the-timezone) - [Increase PHP memory limit](#increase-php-memory-limit) - [Disable asp_tags](#disable-asptags) - [Enable opcache.save_comments](#enable-opcachesavecomments) - [Install NGINX](#install-nginx) - [NGINX installation:](#nginx-installation) - [Install and configure php-fpm](#install-and-configure-php-fpm) - [Configure PHP](#configure-php) - [Install MySQL](#install-mysql) - [Install MySQL on Debian 10](#install-mysql-on-debian-10) - [Connect MySQL](#connect-mysql) - [max_allowed_packet](#maxallowedpacket) - [Configuring the Magento database](#configuring-the-magento-database) - [Verify the database by logging in with the Magento user](#verify-the-database-by-logging-in-with-the-magento-user) - [Install Magento](#install-magento) - [Install composer](#install-composer) - [Set file permissions](#set-file-permissions) - [Install Magento](#install-magento-1) - [Switch to Developer Mode](#switch-to-developer-mode) - [Configure NGINX](#configure-nginx) <!-- /TOC --> ## Install PHP 7.3 Run below commands to upgrade the current packages to the latest version: ```bash sudo apt update sudo apt upgrade ``` Now setup PPA on Debian 10. Then import packages signing key. After that configure PPA for the PHP packages on your system: ```bash sudo apt install ca-certificates apt-transport-https wget -q https://packages.sury.org/php/apt.gpg -O- | sudo apt-key add - echo "deb https://packages.sury.org/php/ buster main" | sudo tee /etc/apt/sources.list.d/php.list ``` Now run the following commands to install PHP 7.3: ```bash sudo apt update sudo apt install php7.3 ``` Install the necessary PHP modules: <!-- ```bash sudo apt install php7.2-cli php7.2-common php7.2-curl php7.2-gd php7.2-json php7.2-mbstring php7.2-mysql php7.2-xml php7.2-bcmath php7.2-gd php7.2-intl php7.2-opcache php7.2-soap php7.2-imagick ``` --> ```bash sudo apt install php7.3-cli php7.3-common php7.3-curl php7.3-gd php7.3-json php7.3-mbstring php7.3-mysql php7.3-xml php7.3-bcmath php7.3-gd php7.3-intl php7.3-opcache php7.3-soap php7.3-imagick ``` <!-- ```bash sudo apt install php7.0-cli php7.0-common php7.0-curl php7.0-gd php7.0-json php7.0-mbstring php7.0-mysql php7.0-xml php7.0-bcmath php7.0-gd php7.0-intl php7.0-opcache php7.0-soap php-imagick php7.0-mcrypt php-mhash libapache2-mod-php7.0 ``` --> Now we need to configure PHP v7.3 - to find out which `php.ini` file is the one that is loaded run `php -i`: ```bash php -i phpinfo() PHP Version => 7.3.26-1+0~20191218.33+debian10~1.gbpb5a34b System => Linux Magento2 4.19.0-6-amd64 #1 SMP Debian 4.19.67-2+deb10u2 (2019-11-11) x86_64 Build Date => Dec 18 2019 15:01:47 Server API => Command Line Interface Virtual Directory Support => disabled Configuration File (php.ini) Path => /etc/php/7.3/cli Loaded Configuration File => /etc/php/7.3/cli/php.ini Scan this dir for additional .ini files => /etc/php/7.3/cli/conf.d Additional .ini files parsed => /etc/php/7.3/cli/conf.d/10-mysqlnd.ini, ``` > Note: on my system there was a second configuration file in `/etc/php/7.3/apache2/php.ini`. In a later step I am going to install NGINX that is going to add more files in `/etc/php/7.3/fpm/php.ini` and `/etc/php/7.3/cli/php.ini`. The Magento documentation recommend to do all changes to all `php.ini` files on your system. We can see that the loaded configuration file is `/etc/php/7.3/cli/php.ini`: ```bash nano /etc/php/7.3/cli/php.ini ``` ### Set the Timezone Edit the `php.ini` configuration file and update the [timezone](https://www.php.net/manual/en/timezones.php) value in `date.timezone` setting tag: ```yaml date.timezone = "Europe/Berlin" ``` ### Increase PHP memory limit Simply increase the default value to the recommended value: * Compiling code or deploying static assets: `756M` * Installing and updating Magento components from Magento Marketplace: `2G` * Testing: `~3-4G` ```yaml memory_limit = 2G ``` ### Disable asp_tags ASP style tags have been removed from PHP in version 7. If you still use a template that uses them, they need to be [deactivated on the theme side](https://stackoverflow.com/questions/14694116/how-to-disable-asp-style-php-tags-within-a-specific-file) ### Enable opcache.save_comments Enable [opcache.save_comments](https://www.php.net/manual/en/opcache.configuration.php#ini.opcache.save_comments) and it is recommended to enable the [PHP OpCache](http://php.net/manual/en/intro.opcache.php) for performance reasons. ```yaml [opcache] ; Determines if Zend OPCache is enabled opcache.save_comments=1 opcache.enable=1 ``` ## Install NGINX ### NGINX installation: ```bash apt -y install nginx ``` ### Install and configure php-fpm The `php-fpm` extension is needed for NGINX in addition to the modules we already installed earlier: <!-- ```bash apt -y install php7.2-fpm php7.2-cli ``` --> ```bash apt -y install php7.3-fpm php7.3-cli ``` ### Configure PHP Open the following two, new `php.ini` files in an editor: <!-- ```bash nano /etc/php/7.2/fpm/php.ini nano /etc/php/7.2/cli/php.ini ``` --> ```bash nano /etc/php/7.3/fpm/php.ini nano /etc/php/7.3/cli/php.ini ``` And edit both files to match the following lines: ```bash memory_limit = 2G max_execution_time = 1800 zlib.output_compression = On ``` Now restart all services: ```bash systemctl restart php7.3-fpm service nginx restart ``` ## Install MySQL MySQL team provides official MySQL PPA for Debian Linux. You can download and install the package on your Debian system, which will add PPA file to your system. Run below command to enable PPA: ```bash wget http://repo.mysql.com/mysql-apt-config_0.8.13-1_all.deb sudo dpkg -i mysql-apt-config_0.8.13-1_all.deb ``` During the installation of MySQL apt config package, It will prompt to select MySQL version to install. Select the MySQL 5.7 option to install on your system: ![MySQL Installation for Magento 2](./magento_nginx_01.png) ### Install MySQL on Debian 10 Run the following commands to install MySQL on a Debian machine. ```bash sudo apt update && apt upgrade sudo apt install mysql-server ``` Input a secure password and same to confirm password window. This will be MySQL root user password required to log in to MySQL server: ![MySQL Installation for Magento 2](./magento_nginx_02.png) ### Connect MySQL Now connect to the MySQL database using the command line: ```bash mysql -u root -p Enter password: Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 2 Server version: 5.7.28 MySQL Community Server (GPL) Copyright (c) 2000, 2019, Oracle and/or its affiliates. All rights reserved. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> ``` ### max_allowed_packet If you expect to import large numbers of products into Magento, you can increase the value for max_allowed_packet that is larger than the default, 16MB. To increase the value, open `/etc/mysql/mysql.cnf` in a text editor and locate the value for max_allowed_packet. Save your changes to `mysql.cnf`, close the text editor, and restart MySQL (`service mysql restart`). ### Configuring the Magento database Enter the following commands in the order shown to create a database instance named magento with username magento (make sure that you are on the MySQL CLI `mysql -u root -p`): ```bash create database magento; create user magento IDENTIFIED BY 'magento'; GRANT ALL ON magento.* TO magento@localhost IDENTIFIED BY '<PASSWORD>'; flush privileges; ``` Enter exit to quit the command prompt. ### Verify the database by logging in with the Magento user ```bash mysql -u magento -p ``` If you can login with the password `<PASSWORD>` and have access to the created database named `magento` everything is looking fine: ```bash mysql -u magento -p Enter password: Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 3 Server version: 5.7.28 MySQL Community Server (GPL) Copyright (c) 2000, 2019, Oracle and/or its affiliates. All rights reserved. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> show databases; +--------------------+ | Database | +--------------------+ | information_schema | | magento | +--------------------+ 2 rows in set (0.00 sec) mysql> ``` ## Install Magento ### Install composer ``` sudo apt-get install composer ``` ```bash sudo useradd -r -s /bin/false magento su - magento ``` If your web server docroot directory is `/var/www/html` run the following command to have composer install magento to `/var/www/html/magento` ```bash composer create-project --repository=https://repo.magento.com/ magento/project-community-edition /var/www/html/magento ``` When prompted, enter your Magento authentication keys. Public and private keys are created and configured in your [Magento Marketplace](https://marketplace.magento.com/customer/accessKeys/). The public key will be the username and the private key will be the password. __ERROR Message__: The zip extension and unzip command are both missing, skipping. Your command-line PHP is using multiple ini files. Run `php --ini` to show them. Install `apt install zip unzip php7.3-zip` - but I can only see the cli `php.ini` when I run `php -i`? ### Set file permissions You must set read-write permissions for the web server group before you install the Magento software. This is necessary so that the Setup Wizard and command line can write files to the Magento file system. ```bash cd /var/www/html/magento find var generated vendor pub/static pub/media app/etc -type f -exec chmod g+w {} + find var generated vendor pub/static pub/media app/etc -type d -exec chmod g+ws {} + chown -R :www-data . chmod u+x bin/magento ``` ### Install Magento This example assumes that the Magento install directory is named magento2ee, the db-host is on the same machine (localhost), and that the db-name, db-user, and db-password are all magento: ```bash bin/magento setup:install \ --base-url=http://mysite.dev \ --db-host=localhost \ --db-name=magento \ --db-user=magento \ --db-password=<PASSWORD> \ --admin-firstname=admin \ --admin-lastname=admin \ --admin-email=<EMAIL> \ --admin-user=admin \ --admin-password=<PASSWORD> \ --language=en_US \ --currency=USD \ --timezone=America/Chicago \ --use-rewrites=1 ``` > __Don't forget__ to write down the Admin panel URL that will be displayed at the end of the installation process! It will looks something like this `/af345azfd`. Once the Installation is through, you will be able access your web shop via the base URL that you assigned above, e.g. http://mysite.dev (make sure that it is an URL that resolves to your development server, obviously). The Admin panel can then be reached by adding the admin URL to the end of this domain: `http://mysite.dev/af345azfd`. ### Switch to Developer Mode ```bash cd /var/www/html/magento/bin ./magento deploy:mode:set developer ``` ### Configure NGINX 1. Create a new virtual host for your Magento site: ```bash nano /etc/nginx/sites-available/magento.conf ``` 2. Add the following configuration: ```bash upstream fastcgi_backend { server unix:/run/php/php7.3-fpm.sock; } server { listen 88; server_name www.magento-dev.com; set $MAGE_ROOT /var/www/html/magento; include /var/www/html/magento/nginx.conf.sample; } ``` Replace www.magento-dev.com with your domain name. Activate the newly created virtual host by creating a symlink to it in the `/etc/nginx/sites-enabled` directory: ```bash ln -s /etc/nginx/sites-available/magento.conf /etc/nginx/sites-enabled ``` 4. Save and exit the editor and verify that the syntax is correct: ```bash nginx -t nginx: the configuration file /etc/nginx/nginx.conf syntax is ok nginx: configuration file /etc/nginx/nginx.conf test is successful ``` Restart nginx: ```bash systemctl reload nginx ``` You can now access the webshop through the URL you set as [base-url in the Magento installation script](#install-magento-1): ![MySQL Installation for Magento 2](./magento_nginx_03.png)<file_sep># Append system config settings, so the ElasticSearch Docker container will work, # and so Nginx can handle more connections. [BACKLGSZ] if ! grep -q 'Talkyard' /etc/sysctl.conf; then log_message 'Amending the /etc/sysctl.conf config...' cat <<-EOF >> /etc/sysctl.conf ################################################################### # Talkyard settings # vm.swappiness=1 # turn off swap, default = 60 net.core.somaxconn=8192 # Up the max backlog queue size (num connections per port), default = 128. Sync with conf/web/server-listen-http(s).conf. vm.max_map_count=262144 # ElasticSearch wants this, default = 65530 # See: https://www.elastic.co/guide/en/elasticsearch/reference/current/vm-max-map-count.html EOF log_message 'Reloading the system config...' sysctl --system fi # Make Redis happier: # Redis doesn't want Transparent Huge Pages (THP) enabled, because that creates # latency and memory usage issues with Redis. Disable THP now directly, and also # after restart: (as recommended by Redis) echo 'Disabling Transparent Huge Pages (for Redis)...' echo never > /sys/kernel/mm/transparent_hugepage/enabled if ! grep -q 'transparent_hugepage/enabled' /etc/rc.local; then echo 'Disabling Transparent Huge Pages after reboot, in /etc/rc.local...' # Insert ('i') before the last line ('$') in rc.local, which always? is # 'exit 0' in a new Ubuntu installation. sed -i -e '$i # For Talkyard and the Redis Docker container:\necho never > /sys/kernel/mm/transparent_hugepage/enabled\n' /etc/rc.local fi # Simplify troubleshooting: if ! grep -q 'HISTTIMEFORMAT' ~/.bashrc; then log_message 'Adding history settings to .bashrc...' cat <<-EOF >> ~/.bashrc ################################################################### export HISTCONTROL=ignoredups export HISTCONTROL=ignoreboth export HISTSIZE=10100 export HISTFILESIZE=10100 export HISTTIMEFORMAT='%F %T %z ' EOF fi<file_sep>--- date: "2019-06-15" title: "Installing Discourse on Centos 8" categories: - LINUX - Docker - Discourse --- ![Mongkok, Hongkong](./photo-456t66d_64567fh6dgjkhg4_d.jpg) <!-- TOC --> - [Configure FirewallD](#configure-firewalld) - [Download Discourse from Github](#download-discourse-from-github) - [Configure Discourse](#configure-discourse) - [Set Mail Credentials](#set-mail-credentials) - [Set Domain](#set-domain) - [Optional: Tune Memory Settings](#optional-tune-memory-settings) - [Bootstrap Discourse](#bootstrap-discourse) - [Access Discourse](#access-discourse) - [Sign Up and Create Admin Account](#sign-up-and-create-admin-account) - [Upgrade](#upgrade) - [Install Plugins](#install-plugins) - [Missing Certificate](#missing-certificate) - [Forced Manual Renew](#forced-manual-renew) <!-- /TOC --> ## Configure FirewallD ```bash firewall-cmd --zone=public --permanent --add-service=http firewall-cmd --zone=public --permanent --add-service=https firewall-cmd --zone=public --permanent --add-service=pop3s firewall-cmd --zone=public --permanent --add-service=smtp firewall-cmd --reload firewall-cmd --zone=public --list-services ``` Not sure if those are necessary (needs testing): firewall-cmd --permanent --zone=trusted --add-interface=docker0 firewall-cmd --permanent --zone=trusted --add-port=25/tcp ## Download Discourse from Github Create a `/opt/discourse` folder, where all the Discourse-related files will reside: ```bash sudo mkdir /opt/discourse ``` Clone the [official Discourse Docker Image](https://github.com/discourse/discourse_docker) into this /opt/discourse folder: ```bash sudo git clone https://github.com/discourse/discourse_docker.git /opt/discourse ``` ## Configure Discourse In this section we will configure your initial Discourse settings. Switch to the `/opt/discourse` directory: ```bash cd /opt/discourse ``` Copy the `samples/standalone.yml` file into the containers folder as `app.yml`: ```bash sudo cp samples/standalone.yml containers/app.yml ``` Edit the Discourse configuration in the `app.yml` file: ```bash sudo nano containers/app.yml ``` ### Set Mail Credentials Enter your SMTP credentials in the lines for `DISCOURSE_SMTP_ADDRESS`, `DISCOURSE_SMTP_PORT`, `DISCOURSE_SMTP_USER_NAME`, and `DISCOURSE_SMTP_PASSWORD`. (Be sure you remove the comment # character from the beginnings of these lines as necessary.) ```yaml env: LANG: en_US.UTF-8 ## TODO: How many concurrent web requests are supported? ## With 2GB we recommend 3-4 workers, with 1GB only 2 #UNICORN_WORKERS: 3 ## ## TODO: List of comma delimited emails that will be made admin and developer ## on initial signup example '<EMAIL>,<EMAIL>' DISCOURSE_DEVELOPER_EMAILS: '<EMAIL>' ## ## TODO: The mailserver this Discourse instance will use DISCOURSE_SMTP_ADDRESS: smtp.mail.com # (mandatory) DISCOURSE_SMTP_PORT: 587 # (optional) DISCOURSE_SMTP_USER_NAME: <EMAIL> # (optional) DISCOURSE_SMTP_PASSWORD: <PASSWORD> # (optional) ``` The SMTP settings are required to send mail from your Discourse instance; for example, to send registration emails, password reset emails, reply notifications, etc. Having trouble setting up mail credentials? See the [Discourse Email Troubleshooting guide](https://meta.discourse.org/t/troubleshooting-email-on-a-new-discourse-install/16326). Setting up mail credentials is required, or else you will not be able to bootstrap your Discourse instance. The credentials must be correct, or else you will not be able to register users (including the admin user) for the forum. ### Set Domain Set `DISCOURSE_HOSTNAME` to discourse.example.com. This means you want your Discourse forum to be available at http://discourse.example.com/. You can use an IP address here instead if you don’t have a domain pointing to your server yet. Only one domain (or IP) can be listed here. ```bash DISCOURSE_HOSTNAME: 'discourse.example.com' ``` ### Optional: Tune Memory Settings Also in the env section of the configuration file, set `db_shared_buffers` to 128MB and `UNICORN_WORKERS` to 2 so you have more memory room. ```yaml db_shared_buffers: "128MB" UNICORN_WORKERS: 2 ``` Save the `app.yml` file, and exit the text editor. ## Bootstrap Discourse Now use the bootstrap process to build Discourse and initialize it with all the settings you configured in the previous section. This also starts the Docker container. ```bash sudo /opt/discourse/launcher bootstrap app ``` At this point I got an error message `/usr/bin/env: ‘bash\r’: No such file or directory` which suggests that the file `launcher` has Windows-style __\r\n__ line endings instead of the __\n-only__ line endings bash expects. To remove the \r chars run the following command: ```bash sed $'s/\r$//' /opt/discourse/launcher > /opt/discourse/launcher-unix chmod u+x /opt/discourse/launcher-unix ``` The `chmod` command makes the script executable. Then rerun the command above `sudo /opt/discourse/launcher-unix bootstrap app`. At this point I ran into the problem that Docker was unable to download the discourse base image. And I had to [download and save it on a different machine](/downloading-docker-images-for-offline-use): ```bash docker pull discourse/base:2.0.20191013-2320 docker save -o discourse.docker discourse/base:2.0.20191013-2320 ``` The image file can then be transfered to the CentOS 8 machine and be loaded with the following command `sudo docker load -i discourse.docker` This command will take about 8 minutes to run while it configures your Discourse environment. (Early in this process you will be asked to generate a SSH key; press Y to accept.) After the bootstrap process completes, start Discourse: ```bash sudo /opt/discourse/launcher start app ``` ## Access Discourse Visit the domain or IP address (that you set for the Discourse hostname previously) in your web browser to view the default Discourse web page. ![Discourse on CentOS 8](./Discourse_01.png) If you receive a 502 Bad Gateway error, try waiting a minute or two and then refreshing so Discourse can finish starting. ## Sign Up and Create Admin Account Use the Sign Up button at the top right of the page to register a new Discourse account. You should use the email address you provided in the `DISCOURSE_DEVELOPER_EMAILS` setting previously. Once you confirm your account, that account will automatically be granted admin privileges. Once you sign up and log in, you should see the Staff topics and the [Admin Quick Start Guide](https://github.com/discourse/discourse/blob/master/docs/ADMIN-QUICK-START-GUIDE.md). It contains the next steps for further configuring and customizing your Discourse installation. You can access the admin dashboard by visting `/admin`. If you don’t get any email from signing up, and are unable to register a new admin account, please see the [Discourse email troubleshooting checklist](https://meta.discourse.org/t/troubleshooting-email-on-a-new-discourse-install/16326). If you are still unable to register a new admin account via email, see the [Create Admin Account from Console](https://meta.discourse.org/t/create-admin-account-from-console/17274) walkthrough, but please note that you will have a broken site until you get normal SMTP email working. ## Upgrade To upgrade Discourse to the latest version, visit `/admin/upgrade` and follow the instructions. ## Install Plugins In this tutorial, we’ll install [Discourse Spoiler Alert](https://meta.discourse.org/t/discourse-spoiler-alert/12650) and [Discourse Sitemap](https://meta.discourse.org/t/discourse-sitemap-plugin/40348) plugin. To install a [plugin in Discourse](https://meta.discourse.org/tags/plugins), it needs to be placed in `/var/www/discourse/plugins`. However, this is inside the container - and changes to the container are wiped when it is rebuilt! So, the container definition is what needs to be edited. 1. On your Centos system enter the Discourse directory: `cd /opt/discourse`. 2. Edit the app configuration file: `nano containers/app.yml`. 3. Access the Github page ([discourse-sitemap](https://github.com/discourse/discourse-sitemap), [discourse-spoiler-alert](https://github.com/discourse/discourse-spoiler-alert)) of each plugin you want to install, click on the green __Clone or download__ button and copy the web URL. 4. Now paste the URLs into the plugin section of the `app.yml` file: ```yaml ## Plugins go here ## see https://meta.discourse.org/t/19157 for details hooks: after_code: - exec: cd: $home/plugins cmd: - git clone https://github.com/discourse/docker_manager.git - git clone https://github.com/discourse/discourse-spoiler-alert.git - git clone https://github.com/discourse/discourse-sitemap.git ``` 5. And rebuild the container: `./launcher rebuild app` What to do when you receive this error message: ```bash [root@localhost discourse]# ./launcher rebuild app Ensuring launcher is up to date Fetching origin remote: Enumerating objects: 3, done. remote: Counting objects: 100% (3/3), done. remote: Compressing objects: 100% (3/3), done. remote: Total 3 (delta 0), reused 2 (delta 0), pack-reused 0 Unpacking objects: 100% (3/3), done. From https://github.com/discourse/discourse_docker b637998..20e812e master -> origin/master Updating Launcher Updating b637998..20e812e error: Your local changes to the following files would be overwritten by merge: discourse-setup Please commit your changes or stash them before you merge. Aborting failed to update ``` Stash your changes and rerun the rebuild command: ```bash cd /opt/discourse git stash git pull ./launcher rebuild app ``` Once the rebuild process is done open Discourse again inside your browser and head to `/admin/plugins` to activate your new plugins: ![Discourse on CentOS 8](./Discourse_02.png) All Plugins are update through `/admin/upgrade`: ![Discourse on CentOS 8](./Discourse_03.png) The sitemap plugin publishes two files: 1. `/sitemap.xml` : the regular Google Sitemap format 2. `/news.xml` : special sitemap for Google News containing all new topics in the last three days To test the spoiler plugin start a new thread and post: ``` [spoiler]This is a spoiler[/spoiler] ``` ## Missing Certificate Error message with `/opt/discourse/launcher logs app`: ```bash [emerg] cannot load certificate "/shared/ssl/forum.instar.com.cer": PEM_read_bio_X509_AUX() failed (SSL: error:0909006C:PEM routines:get_name:no start line:Expecting: TRUSTED CERTIFICATE) ``` Remove old certificate and rebuild: ```bash rm -rf /opt/discourse/shared/standalone/ssl rm -rf /opt/discourse/shared/standalone/letsencrypt ./launcher rebuild app ``` ### Forced Manual Renew If this does not solve the issue try a manual renewal: ```bash cd /opt/discourse ./launcher enter app /shared/letsencrypt/acme.sh --issue -d forum.instar.com --force --debug /shared/letsencrypt/acme.sh --renew -d forum.instar.com --force --debug ``` This will show you the issue - e.g. rate limit: ```bash Create new order error. Le_OrderFinalize not found. { "type": "urn:ietf:params:acme:error:rateLimited", "detail": "Error creating new order :: too many certificates already issued for exact set of domains: forum.instar.com: see https://letsencrypt.org/docs/rate-limits/", "status": 429 } ``` Retry the renewal after a while or assign a different domain to your forum: ```bash /opt/discourse/launcher enter app service nginx stop /usr/sbin/nginx -c /etc/nginx/letsencrypt.conf LE_WORKING_DIR=/shared/letsencrypt DEBUG=1 /shared/letsencrypt/acme.sh --issue -d example.com -k 4096 -w /var/www/discourse/public LE_WORKING_DIR=/shared/letsencrypt /shared/letsencrypt/acme.sh --installcert -d example.com --fullchainpath /shared/ssl/forum.instar.com.cer --keypath /shared/ssl/forum.instar.com.key --reloadcmd "sv reload nginx" /usr/sbin/nginx -c /etc/nginx/letsencrypt.conf -s stop exit ```<file_sep>--- date: "2018-01-01" title: "Securing Elasticsearch with ReadOnlyREST" categories: - LINUX - Elasticsearch --- ![<NAME>](./photo-15514459555_50b13064fa_o.png) <!-- TOC --> - [Securing Elasticsearch with ReadonlyREST](#securing-elasticsearch-with-readonlyrest) - [Install Elasticsearch](#install-elasticsearch) - [Install Kibana](#install-kibana) - [Secure Elasticsearch](#secure-elasticsearch) - [Disable X-Pack security module](#disable-x-pack-security-module) - [Upgrading the plugin](#upgrading-the-plugin) - [Securing Kibana](#securing-kibana) <!-- /TOC --> ## Securing Elasticsearch with ReadonlyREST Neither [Elasticsearch](https://www.elastic.co/downloads/elasticsearch) nor [Kibana](https://www.elastic.co/downloads/kibana) offer a user authentication. In earlier [Projects](https://github.com/mpolinowski/express-static) we circumvented this issue by blocking all access - only allowing our Website and Kibana to access the database via localhost. But now we need an anonymous user account that is only allowed to Request and Read search results - while Writing to the database is forbidden. Elastic offer their own solution for it called [X-Pack](https://www.elastic.co/downloads/x-pack) (On how to set it up - [read more](https://mpolinowski.github.io/nginx-node-elasticsearch/)) - which is a premium extension to the ELK stack and nobody seems to know how much it would cost to buy it. But as the wise man from the vintage sport car dealership knows - if you have to ask for the prize, you cannot afford it anyway. So are there free solutions out there? Yes! Searching for alternatives lead me to 2 solutions that are mentioned often - there are more if you keep searching: 1. [ReadOnlyREST](https://github.com/sscarduzio/elasticsearch-readonlyrest-plugin) 2. [SearchGuard](https://github.com/floragunncom/search-guard) Today we are going to set up the first of them. The first thing I noticed is, that those plugins are written for the exact Version number of Elasticsearch. The newest version of RestOnlyREST supports Elasticsearch Version 6.2.3 - I am using 6.2.4, which unfortunately means that I have to downgrade my ES version.... and since there is no downgrade option with ES, I have to shut off the service and go in manually to delete every folder that ES has generated on my CentOS server (really ? That is the only option that I could find online.. but it is really a mess...). ### Install Elasticsearch __I. Download and install the public signing key__ ```bash rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch ``` __II. Add the following in your /etc/yum.repos.d/ directory in a file with a .repo suffix, for example elasticsearch.repo__ ```yaml [elasticsearch-6.x] name=Elasticsearch repository for 6.x packages baseurl=https://artifacts.elastic.co/packages/6.x/yum gpgcheck=1 gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch enabled=1 autorefresh=1 type=rpm-md ``` __III. Install a specific version of Elasticsearch__ ReadOnlyREST requires us to install a specific version (6.2.3) of Elasticsearch. Let's check what versions are available to install (CentOS/yum): ```bash yum --showduplicates list elasticsearch | expand Installed Packages elasticsearch.noarch 6.2.4-1 @elasticsearch-6.x Available Packages elasticsearch.noarch 6.0.0-1 elasticsearch-6.x elasticsearch.noarch 6.0.0-1 kibana-6.x elasticsearch.noarch 6.0.1-1 elasticsearch-6.x elasticsearch.noarch 6.0.1-1 kibana-6.x elasticsearch.noarch 6.1.0-1 elasticsearch-6.x elasticsearch.noarch 6.1.0-1 kibana-6.x elasticsearch.noarch 6.1.1-1 elasticsearch-6.x elasticsearch.noarch 6.1.1-1 kibana-6.x elasticsearch.noarch 6.1.2-1 elasticsearch-6.x elasticsearch.noarch 6.1.2-1 kibana-6.x elasticsearch.noarch 6.1.3-1 elasticsearch-6.x elasticsearch.noarch 6.1.3-1 kibana-6.x elasticsearch.noarch 6.1.4-1 elasticsearch-6.x elasticsearch.noarch 6.1.4-1 kibana-6.x elasticsearch.noarch 6.2.0-1 elasticsearch-6.x elasticsearch.noarch 6.2.0-1 kibana-6.x elasticsearch.noarch 6.2.1-1 elasticsearch-6.x elasticsearch.noarch 6.2.1-1 kibana-6.x elasticsearch.noarch 6.2.2-1 elasticsearch-6.x elasticsearch.noarch 6.2.2-1 kibana-6.x elasticsearch.noarch 6.2.3-1 elasticsearch-6.x elasticsearch.noarch 6.2.3-1 kibana-6.x elasticsearch.noarch 6.2.4-1 elasticsearch-6.x elasticsearch.noarch 6.2.4-1 kibana-6.x ``` To install the version 6.2.3 of elasticsearch type: ```bash yum install elasticsearch-6.2.3-1 ``` Here I ran into issues due to the messy uninstall of the earlier (newer) version of Elasticsearch - __if someone knows a cleaner way to do this, please tell :)__ Yum still had the older version in its DB leading to an "package already installed. Checking for update. Nothing to do" error. This can be fixed by: ``` rpm -e --justdb --nodeps elasticsearch rpm -e --justdb --nodeps kibana ``` Now re-run the install command above: ```bash yum install elasticsearch-6.2.3-1 Dependencies Resolved ==================================================================================================== Package Arch Version Repository Size ==================================================================================================== Installing: elasticsearch noarch 6.2.3-1 elasticsearch-6.x 28 M Transaction Summary ==================================================================================================== Install 1 Package Total download size: 28 M Installed size: 31 M Is this ok [y/d/N]:y ``` __IV. Restrict access to your Elasticsearch instance__ To configure Elasticsearch open the following file inside your text editor: _/etc/elasticsearch/elasticsearch.yml_. We want to limit access to localhost and a public domain that we are going to configure in NGINX. This can be done with the variable __network.host__: ```yaml # ---------------------------------- Network ----------------------------------- # # Set the bind address to a specific IP (IPv4 or IPv6): # network.host: 127.0.0.1, my.domain.com # # Set a custom port for HTTP: # http.port: 9200 ``` The HTTP port 9200 is the default port and should be changed - but we are only going to use it on localhost. NGINX will take care of it on the outside - so we will just leave it at it's default value. The webserver will also add a security layer to our app - which means, we will need to enable [CORS header](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) for the transaction. Add the following lines below the Network Block: ```yaml # --------------------------------- CORS ---------------------------------- # # #http.cors: # enabled: true # allow-origin: /https?:\/\/my.domain.com(:[0-9]+)?/ http.cors: enabled: true allow-origin: /https?:\/\/my.domain.com(:[0-9][0-9][0-9][0-9])?/ ``` Both examples above allow Cross-Origin Resource Sharing for your domain on every available port - but for some reasons the first regular expression stopped to work in Elasticsearch 6.2.x. You just need one of them. __V. Set up the Elasticsearch Service__ To configure Elasticsearch to start automatically when the system boots up, run the following commands: ``` sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable elasticsearch.service ``` Elasticsearch can be started and stopped as follows: ``` sudo systemctl start elasticsearch.service sudo systemctl stop elasticsearch.service ``` ### Install Kibana Since we installed a specific version (6.2.3) of Elasticsearch we now need to install the same version of the admin panel Kibana. First Create and edit a new yum repository file for Kibana in _/etc/yum.repos.d/kibana.repo_: ```yaml [kibana-6.x] name=Kibana repository for 6.x packages baseurl=https://artifacts.elastic.co/packages/6.x/yum gpgcheck=1 gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch enabled=1 autorefresh=1 type=rpm-md ``` Then install the correct version as listed earlier: ```bash yum install kibana-6.2.3-1 ``` Now set the Elasticsearch Connection URL for Kibana in _/etc/kibana/kibana.yml_: ```yaml elasticsearch.url: "http://localhost:9200" ``` To configure Kibana to start automatically when the system boots up, run the following commands: ```bash sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable kibana.service ``` Kibana can be started and stopped as follows: ``` sudo systemctl start kibana.service sudo systemctl stop kibana.service ``` ### Secure Elasticsearch Now we can install RestOnlyREST to secure the database. First [download](https://readonlyrest.com/download.html) the correct package for the installed version of Elasticsearch and place it inside the _./tmp_ directory. First set up the configuration file in _/etc/elasticsearch/readonlyrest.yml_ to allow all access from localhost (required by Kibana) and restrict outside access to specific indices to read only: ```yaml readonlyrest: #optional response_if_req_forbidden: Sorry, your request is forbidden. access_control_rules: - name: Accept all requests from localhost hosts: [127.0.0.1] - name: Just certain indices, and read only actions: ["indices:data/read/*"] indices: ["all_my_public_indices_start_with*"] # index aliases are taken in account! ``` Then install the plugin to the elasticsearch plugin directory: ```bash cd /usr/share/elasticsearch/bin ./elasticsearch-plugin install file:///tmp/readonlyrest-1.16.18_es6.2.3.zip -> Downloading file:///tmp/readonlyrest-1.16.18_es6.2.3.zip [=================================================] 100%   @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @ WARNING: plugin requires additional permissions @ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ * java.io.FilePermission << ALL FILES >> read * java.lang.RuntimePermission accessDeclaredMembers * java.lang.RuntimePermission getClassLoader * java.lang.reflect.ReflectPermission suppressAccessChecks * java.net.SocketPermission * connect,accept,resolve * java.security.SecurityPermission getProperty.ssl.KeyManagerFactory.algorithm * java.util.PropertyPermission * read,write See http://docs.oracle.com/javase/8/docs/technotes/guides/security/permissions.html for descriptions of what these permissions allow and the associated risks. Continue with installation? [y/N]y -> Installed readonlyrest ``` ### Disable X-Pack security module __(applies to ES 6.4.0 or greater)__ ReadonlyREST and X-Pack security module can't run together, so the latter needs to be disabled. Edit _elasticsearch.yml_ and append `xpack.security.enabled: false`: ```bash $ES_HOME/conf/elasticsearch.yml ``` ### Upgrading the plugin To upgrade ReadonlyREST for Elasticsearch: 1. Stop Elasticsearch. Either kill the process manually, or use: ``` service stop elasticsearch ``` depending on your environment. 2. Uninstall ReadonlyREST ``` bin/elasticsearch-plugin remove readonlyrest ``` 3. Install the new version of ReadonlyREST into Elasticsearch. ``` bin/elasticsearch-plugin install file://<download_dir>/readonlyrest-<ROR_VERSION>_es<ES_VERSION>.zip ``` e.g. bin/elasticsearch-plugin install file:///tmp/readonlyrest-1.16.15_es6.1.1.zip 4. Restart Elasticsearch. ``` service start elasticsearch ``` ### Securing Kibana Remember to [secure Kibana with NGINX](/nginx-node-security/), since it is not protected by the free version of ReadOnlyREST!<file_sep>--- date: "2019-09-22" title: "CentOS 8 Network Configuration" categories: - LINUX --- ![<NAME>](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) Check your network settings with `ip a` and `ip r`: ```bash ip a 2: enp2s0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc fq_codel state UP group default qlen 1000 link/ether b0:6e:bf:1d:d3:2a brd ff:ff:ff:ff:ff:ff inet 192.168.2.111/24 brd 192.168.2.255 scope global noprefixroute enp2s0 valid_lft forever preferred_lft forever inet6 fe80::2a48:7e58:daf5:872d/64 scope link noprefixroute valid_lft forever preferred_lft forever ip r default via 192.168.2.5 dev enp2s0 proto static metric 100 192.168.2.0/24 dev enp2s0 proto kernel scope link src 192.168.2.111 metric 100 ``` The current ethernet interface is `enp2s0` and the gateway that is used is `192.168.2.5`. Alternatively, you can also run the following to find your active network interface: ```bash nmcli connection show NAME UUID TYPE DEVICE enp2s0 280ed14d-7c8b-4586-853d-420df9f65412 ethernet enp2s0 ``` To edit the interface run: ```bash nmtui-edit enp2s0 ``` Make your changes and then restart your network interface: ```bash ifdown enp2s0 && ifup enp2s0 ``` Verify the changes you made, e.g. setting a different gateway: ```bash ip r default via 192.168.2.1 dev enp2s0 proto static metric 100 192.168.2.0/24 dev enp2s0 proto kernel scope link src 192.168.2.111 metric 100 ``` <file_sep>--- date: "2020-06-18" title: "Salt Execution Order" categories: - LINUX --- ![Guangzhou, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Complex State Trees](#complex-state-trees) - [Execution Order](#execution-order) - [Requisites](#requisites) - [Watch](#watch) <!-- /TOC --> ## Complex State Trees You can combine `*.sls` files by using import statements or by the use of __Top Files__. We can use this data for example in our Apache landing page (see previous tutorial): __welcome.sls__ ```yaml # Adding a blank front page include: - apache {% set name = salt.pillar.get('name') %} check_pillar_values: test.check_pillar: - present: - name - failhard: True welcome_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body> <h1>{{ name }}.</h1> </body> ``` The include statement on top will add the Apache installation - we can now execute the `welcome.sls` directly and get the complete Apache setup: ```bash sudo salt ubuntuAsus state.sls apache.welcome ``` You can accomplish the same by creating a `/srv/salt/top.sls` file: ```yaml base: '*': - apache - apache.welcome ``` The Apache setup can now be executed by running: ```bash sudo salt '*' state.highstate ``` The `state.highstate` command will setup the whole infrastructure as defined inside your top file. If you create multiple top files for different setup use the [state.top](https://docs.saltstack.com/en/master/ref/modules/all/salt.modules.state.html) command and specify the file you want to execute: ```bash sudo salt '*' state.top prod_top.sls ``` ### Execution Order Salt's YAML render picks up every instruction inside an `*.sls` file and assigns an order key to them. This makes sure that they are executed in the same order they are written down inside your file. If you need to make sure that one of the instruction is __ALWAYS__ either executed __FIRST__ or __LAST__, you can specify this inside your file: __init.sls__ ```yaml # Install vanilla Apache on Debian/RedHat {% from "apache/map.sls" import apache with context %} install_apache: pkg.installed: - name: {{ apache.pkg }} - order: last enable_apache: service.running: - name: {{ apache.srv }} # Will be enabled automatically on Debian but has to be enabled manually on RedHat - enable: True - order: first ``` This order stops working reliably once you have include or require statements inside your file. ### Requisites Requisites bring explicit ordering to your file execution: __init.sls__ ```yaml # Install vanilla Apache on Debian/RedHat {% from "apache/map.sls" import apache with context %} install_apache: pkg.installed: - name: {{ apache.pkg }} enable_apache: service.running: - name: {{ apache.srv }} # Will be enabled automatically on Debian but has to be enabled manually on RedHat - enable: True - require: - pkg: install_apache ``` The `require` statement makes sure that Apache is installed before it will attempt to enable the Apache service. ### Watch The watch module reacts to a specified instruction being executed and then triggers another function. A practical use case is to restart the Apache service once the Apache configuration was modified: __init.sls__ ```yaml # Install vanilla Apache on Debian/RedHat {% from "apache/map.sls" import apache with context %} install_apache: pkg.installed: - name: {{ apache.pkg }} enable_apache: service.running: - name: {{ apache.srv }} # Will be enabled automatically on Debian but has to be enabled manually on RedHat - enable: True - watch: - file: danger_config danger_config: file.managed: - name /bar/foo - contents: foo ``` We can also extend the watch service to another SLS file: __mods.sls__ ```yaml include: - apache extend: start_apache: service: - watch: - file: danger_config {% for conf in ['status', 'info'] %} mod_{{ conf }}: file.managed: - name: /etc/apache2/conf-available/mod_{{ conf }}.conf - contents: | <Location "/{{ conf }}"> SetHandler server-{{ conf }} </Location> {% if salt.grains.get('os_family') == 'Debian' %} cmd.run: - name: a2enmod {{ conf }} && a2enconf mod_{{ conf }} - creates: /etc/apache2/conf-enabled/mod_{{ conf }}.conf {% endif %} {% endfor %} ``` The `mods.sls` configures Apache - by including the `init.sls` file we can now execute mods and be certain that Apache will be installed first before the configuration is attempted. We can now define the watch task here instead of the init file. We can also use the `watch_in` statement: __mods.sls__ ```yaml include: - apache {% for conf in ['status', 'info'] %} mod_{{ conf }}: file.managed: - name: /etc/apache2/conf-available/mod_{{ conf }}.conf - contents: | <Location "/{{ conf }}"> SetHandler server-{{ conf }} </Location> - watch_in: - service: enable_apache {% if salt.grains.get('os_family') == 'Debian' %} cmd.run: - name: a2enmod {{ conf }} && a2enconf mod_{{ conf }} - creates: /etc/apache2/conf-enabled/mod_{{ conf }}.conf {% endif %} {% endfor %} ``` If `mod_status` or `mod_info` is changed -> restart `enable_apache`. You can test the execution order by doing a dry-run: ```bash salt '*' state.sls apache.mods test=true ubuntuAsus: ---------- ID: install_apache Function: pkg.installed Name: apache2 Result: True Comment: All specified packages are already installed Started: 18:26:02.852277 Duration: 38.55 ms Changes: ---------- ID: mod_status Function: file.managed Name: /etc/apache2/conf-available/mod_status.conf Result: None Comment: The file /etc/apache2/conf-available/mod_status.conf is set to be changed Note: No changes made, actual changes may be different due to other states. Started: 18:26:02.899635 Duration: 1.456 ms Changes: ---------- newfile: /etc/apache2/conf-available/mod_status.conf ---------- ID: mod_info Function: file.managed Name: /etc/apache2/conf-available/mod_info.conf Result: None Comment: The file /etc/apache2/conf-available/mod_info.conf is set to be changed Note: No changes made, actual changes may be different due to other states. Started: 18:26:02.901184 Duration: 1.077 ms Changes: ---------- newfile: /etc/apache2/conf-available/mod_info.conf ---------- ID: enable_apache Function: service.running Name: apache2 Result: None Comment: Service is set to be restarted Started: 18:26:02.940131 Duration: 19.022 ms Changes: ---------- ID: mod_status Function: cmd.run Name: a2enmod status && a2enconf mod_status Result: None Comment: Command "a2enmod status && a2enconf mod_status" would have been executed Started: 18:26:02.961747 Duration: 378.228 ms Changes: ---------- ID: mod_info Function: cmd.run Name: a2enmod info && a2enconf mod_info Result: None Comment: Command "a2enmod info && a2enconf mod_info" would have been executed Started: 18:26:03.340098 Duration: 4.92 ms Changes: Summary for ubuntuAsus ------------ Succeeded: 6 (unchanged=5, changed=2) Failed: 0 ------------ Total states run: 6 Total run time: 443.253 ms ```<file_sep>--- date: "2017-07-04" title: "Random Password Generator" categories: - Javascript - React --- import GifContainer from "../../src/components/ImageContainer"; ![Tana Island, Vanuatu](./photo-34445476982_dccd0eb8e7_o.png) > This is a create-react-app demo based on the article [Create React App with Express in Production](https://daveceddia.com/create-react-app-express-production/) by <NAME>. We want to use the React App generator to create static pages from our React Pass-Gen App, routed through a Node/Express Server. > > The original article went on to deploy the app to Heroku - please refer to the original article about that. What we do differently here, is adding a way to comfortably edit (hot reloading) our app in an development environment using the package [Concurrently](https://github.com/kimmobrunfeldt/concurrently). This allows us to run both, the Express and React App, inside one terminal and automatically switch to serving a static representation of our React App inside Express, once we switch to a production environment. [Github](https://github.com/mpolinowski/random-pass-gen) <!-- TOC --> - [Create the Express App](#create-the-express-app) - [Create the React App](#create-the-react-app) - [Concurrently](#concurrently) - [Create Static Version of our React App](#create-static-version-of-our-react-app) <!-- /TOC --> ## Create the Express App Create a directory and cd in to it with your Terminal or Git Bash. You can initialize your app with npm: ```bash npm init ``` Now we need to add two packages through npm: our [express router](https://expressjs.com) and a [password generator](https://www.npmjs.com/package/password-generator), and we will use Express to provides a single API endpoint, **/api/passwords**. We use the *--save* to add those dependencies to our package.json file. ```bash npm install --save express password-generator ``` Create a file called index.js, which will be the Express app, and type this in: ```js const express = require('express'); const path = require('path'); const generatePassword = require('password-generator'); const app = express(); // Serve static files from the React app // Use this to just simulate a production environment - it always! serves the static files /client/build // ------------------------------------------------ // app.use(express.static(path.join(__dirname, 'client/build'))); // ------------------------------------------------ // this statement will use the live react app in development, // but will expect you to have a static version of the app in /client/build once you switch to production // remove if above case is uncommented! if (process.env.NODE_ENV === "production") { app.use(express.static("client/build")); } // Put all API endpoints under '/api' app.get('/api/passwords', (req, res) => { const count = 5; // Generate some passwords const passwords = Array.from(Array(count).keys()).map(i => generatePassword(12, false) ) // Return them as json res.json(passwords); console.log(`Sent ${count} passwords`); }); // The "catchall" handler: for any request that doesn't // match one above, send back React's index.html file. app.get('*', (req, res) => { res.sendFile(path.join(__dirname+'/client/build/index.html')); }); const port = process.env.PORT || 5000; app.listen(port); console.log(`Password generator listening on ${port}`); ``` We’re also going to need a “start” script in package.json, to be able to start our app with the **npm start** command. Open package.json and add a scripts section at the bottom. The full file should look something like this: ```json { "name": "random-pass-gen", "version": "1.0.0", "description": "create-react-app demo", "main": "index.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, "author": "<NAME>", "license": "ISC", "dependencies": { "express": "^4.15.4", "password-generator": "^2.1.0" }, "scripts": { "start": "node index.js" } } ``` We can test our app with **npm start** and opening the following URL inside our Browser (or CURL): *http://localhost:5000/api/passwords*. The start command starts our Node/Express Server on our localhost with the port defined inside index.js (5000). Calling the API endpoint /api/password gives us access to the password generator. ![React Password Generator](./password-generator-working.png) ## Create the React App We’re going to use [Create React App](https://github.com/facebookincubator/create-react-app) to generate a project. We will run this React App in our dev environment inside the subfolder */client*. But we will create a static version of our app inside *client/build* (as already defined inside **index.js** of our Express App) once we move to a production environment. This generation of a static version is part of the functionality offered by create-react-app. If you don’t have Create React App installed yet, do that first: ```bash npm install -g create-react-app ``` Generate the React app inside the Express app directory: ```bash create-react-app client ``` To test the app, cd in to the */client* directory and run **npm start**. This will run the create-react-app starter app with some boilerplate code on port 3000: ![React Password Generator](./boilerplate-page.png) Create React App will proxy API requests from the React app to the Express app if we add a “proxy” key in package.json like this: ```json "proxy": "http://localhost:5000" ``` This goes in client/package.json, not in the Express app’s package.json! Now we want to replace the boilerplate app with our own - the code is located inside */client/src/app.js*. We will replace it with the following code: ```jsx import React, { Component } from 'react'; import './App.css'; class App extends Component { // Initialize state state = { passwords: [] } // Fetch passwords after first mount componentDidMount() { this.getPasswords(); } getPasswords = () => { // Get the passwords and store them in state fetch('/api/passwords') .then(res => res.json()) .then(passwords => this.setState({ passwords })); } render() { const { passwords } = this.state; return ( <div className="App"> {/* Render the passwords if we have them */} {passwords.length ? ( <div> <h1>5 Passwords.</h1> <ul className="passwords"> {/* Generally it's bad to use "index" as a key. It's ok for this example because there will always be the same number of passwords, and they never change positions in the array. */} {passwords.map((password, index) => <li key={index}> {password} </li> )} </ul> <button className="more" onClick={this.getPasswords}> Get More </button> </div> ) : ( // Render a helpful message otherwise <div> <h1>No passwords :(</h1> <button className="more" onClick={this.getPasswords}> Try Again? </button> </div> )} </div> ); } } export default App; ``` And while we are add it, just add some styling in */client/src/app.css* to make our app look pretty: ```css .App { text-align: center; font-family: "Courier New", monospace; width: 100%; } h1 { font-weight: normal; font-size: 42px; } .passwords { list-style: none; padding: 0; font-size: 32px; margin-bottom: 2em; } .more { font-size: 32px; font-family: "Courier New", monospace; border: 2px solid #000; background-color: #fff; padding: 10px 25px; } .more:hover { background-color: #FDD836; } .more:active { background-color: #FFEFA9; } ``` When you restart your app now (inside the client director), you will see our Password Generator in all it's glory on *localhost:3000*. ## Concurrently Our React app (on Port 3000) is already proxying to our Express App (on Port 5000). But we would have to use two terminals to run both apps at once - which is something you can try now, if you like. Our React app is already up. Now open a second terminal inside the top-level directory and **npm start** the express app. When you open your browser on Port 5000, you will see our app! This setup is nice, because it allows you to edit your app and have it hot-reloading inside your browser! But we want to have this dev-environment run with a single command. As well as have our app decide whether to serve directly from the React App or look for a static build instead, based on the NODE_ENV (Production or Development). This is where [Concurrently](https://github.com/kimmobrunfeldt/concurrently) comes to the rescue. We already explained how this works [here](https://github.com/mpolinowski/caloric-burn#concurrently) - so we don't have to go into too much details. We first have to install the package on the top-level directory: ```bash npm install --save-dev concurrently ``` Then add npm start scripts to the top-level package.json file: ```json "scripts": { "start-dev": "concurrently \"npm run server\" \"npm run client\"", "server": "node index.js", "client": "node start-client.js", "start": "node index.js" }, ``` And add the **start-client.js** file to start our React app from the top-level: ```js const args = [ 'start' ]; const opts = { stdio: 'inherit', cwd: 'client', shell: true }; require('child_process').spawn('npm', args, opts); ``` Now run **npm start** in the top level and you will see that both the Express App and React App will be started: ```bash $ npm start > [email protected] start E:\random-pass-gen > concurrently "npm run server" "npm run client" ... Password generator listening on 5000 ... Starting the development server... You can now view client in the browser. http://localhost:3000/ ``` ## Create Static Version of our React App Make sure that you are inside the */client* directory and execute: ```bash npm run build ``` which is a command that was defined by create-react-app (**react-scripts**) inside */client/package.json*. It will create a optimized static version of our app inside */client/build*. This is the folder that we already told our Express App (see index.js in the top-level directory) would contain our static content. To test if the static content is served correctly by our Express App, just set your NODE_ENV to production - or just comment out the following inside */index.js*: ```js if (process.env.NODE_ENV === "production") { app.use(express.static("client/build")); } ``` and use this statement instead: ```js app.use(express.static(path.join(__dirname, 'client/build'))); ``` So by opening our Express App on port 5000 we will have our Express App serving our React App as static content from the /build folder: ```bash npm run server ``` You can access it from your browser: ```bash http://localhost:3000 ``` <GifContainer gifUrl="/assets/gif/password-generator.gif" alt="React Password Generator" /><file_sep>--- date: "2019-09-28" title: "Adding Homekit to your Node-RED Container in Podman" categories: - LINUX - Smarthome - IoT - Docker --- ![<NAME>, Cambodia](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Bonjour](#bonjour) - [FFMPEG](#ffmpeg) - [Commit your Changes to the Container Image](#commit-your-changes-to-the-container-image) - [Adding Homekit Nodes](#adding-homekit-nodes) - [hk switch](#hk-switch) - [hk outlet](#hk-outlet) - [hk thermostat](#hk-thermostat) - [hk alarm](#hk-alarm) - [hk motion](#hk-motion) - [hk contact](#hk-contact) - [hk lock](#hk-lock) - [hk garage door](#hk-garage-door) - [hk camera](#hk-camera) - [Adding your Homekit Devices to Home Assistant](#adding-your-homekit-devices-to-home-assistant) <!-- /TOC --> ## Bonjour First we need to add libraries for [Apple Bonjour mDNSResponder](https://pkgs.alpinelinux.org/package/edge/main/x86/avahi-compat-libdns_sd) support to our [Node-RED Container](/home-assistant-open-hab-node-red-io-broker-motion-eye-containerized#node-red): ``` podman exec -ti nodered /bin/bash apk add avahi-compat-libdns_sd ``` _On a Debian based container image use_ `apt-get install libavahi-compat-libdnssd-dev` _instead_ ## FFMPEG The camera node requires FFMPEG to be installed: ``` apk add --no-cache ffmpeg ffmpeg -version ``` ### Commit your Changes to the Container Image Find out the ID of your working Node-RED container: ```bash podman ps -a CONTAINER ID IMAGE CREATED STATUS NAMES 5a800b30d707 localhost/nodered/node-red:latest 2 hours ago Up 2 hours ago nodered ``` And commit the changes to your Node-RED image: ```bash podman commit 5a800b30d707 localhost/nodered/node-red:latest Getting image source signatures Copying blob 5216338b40a7 skipped: already exists Copying blob 71bfbf950c11 skipped: already exists Copying blob be137f464809 skipped: already exists Copying blob f67c97fd4436 skipped: already exists Copying blob 35460e116648 skipped: already exists Copying blob 3fd3eff2f520 skipped: already exists Copying blob 9647323e45c3 skipped: already exists Copying blob 630dbc93193b skipped: already exists Copying blob d4e3c4f2011c skipped: already exists Copying blob 15b62db68044 skipped: already exists Copying blob 70f17cb313f5 skipped: already exists Copying blob 4004cf0f3090 done Copying config b98bf74922 done Writing manifest to image destination Storing signatures b98bf74922ffaaeb1a24fba5ec27cc94dc8a82dbdea5e5b530daea0efa0b7136 ``` ## Adding Homekit Nodes Install the Homekit nodes into Node-RED from the palette menu - I am choosing [node-red-contrib-homekit-preconfigured](http://flows.nodered.org/node/node-red-contrib-homekit-preconfigured) that comes with a couple of easy to use nodes for smarthome devices. ![Node-RED with Homekit](./Node-RED_with_Homekit_01.png) To GET an overview over all available characteristics contained within a node, simply inject the string `GetCharacteristics` and check the debug panel: ![Node-RED with Homekit](./Node-RED_with_Homekit_02.png) The switch node only offers the characteristic `On` which you can switch from `0` to `1` by injecting the following JSON object: ![Node-RED with Homekit](./Node-RED_with_Homekit_03.png) ### hk switch ```json [{"id":"dab26010.30d35","type":"hk-switch","z":"e6aaa2fa.a9d3a","pincode":"794-77-253","username":"28:E7:B0:75:80:0C","name":"TestSwitch","port":"51093","interceptget":"No","originalId":"dab26010.30d35","x":230,"y":60,"wires":[["a25b193c.e530b8"]]},{"id":"a25b193c.e530b8","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":386,"y":60,"wires":[]},{"id":"de29bd4d.a3c6b","type":"inject","z":"e6aaa2fa.a9d3a","name":"On","topic":"","payload":"{\"On\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":85,"y":61,"wires":[["dab26010.30d35"]]},{"id":"879ccff8.550e4","type":"inject","z":"e6aaa2fa.a9d3a","name":"Off","topic":"","payload":"{\"On\":0}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":84,"y":101,"wires":[["dab26010.30d35"]]},{"id":"5a0ba7f0.b25be8","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":85,"y":23,"wires":[["dab26010.30d35"]]}] ``` ### hk outlet ```json [{"id":"5bfbcc57.ae0584","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":84,"y":160,"wires":[["53cd054d.71517c"]]},{"id":"ae543ca6.70421","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":407,"y":240,"wires":[]},{"id":"53cd054d.71517c","type":"hk-outlet","z":"e6aaa2fa.a9d3a","pincode":"559-11-933","username":"3E:37:41:D5:BF:22","name":"Power Outlet","port":"56821","interceptget":"Yes","originalId":"53cd054d.71517c","x":239,"y":240,"wires":[["ae543ca6.70421"]]},{"id":"54092f53.ccc79","type":"inject","z":"e6aaa2fa.a9d3a","name":"Off","topic":"","payload":"{\"On\":0}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":84,"y":239,"wires":[["53cd054d.71517c"]]},{"id":"88f4911e.d87d5","type":"inject","z":"e6aaa2fa.a9d3a","name":"On","topic":"","payload":"{\"On\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":85,"y":199,"wires":[["53cd054d.71517c"]]},{"id":"c308fa64.b349e8","type":"inject","z":"e6aaa2fa.a9d3a","name":"Not In Use","topic":"","payload":"{\"OutletInUse\":0}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":91,"y":320,"wires":[["53cd054d.71517c"]]},{"id":"aae5ea4e.139478","type":"inject","z":"e6aaa2fa.a9d3a","name":"In Use","topic":"","payload":"{\"OutletInUse\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":82,"y":280,"wires":[["53cd054d.71517c"]]}] ``` ### hk thermostat ```json [{"id":"7e793a50.1a4ed4","type":"inject","z":"e6aaa2fa.a9d3a","name":"Current/Target Temp","topic":"","payload":"{\"CurrentTemperature\":21,\"TargetTemperature\":24,\"TemperatureDisplayUnits\":\"°C\"}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":124,"y":413,"wires":[["970b8ade.44fb88"]]},{"id":"970b8ade.44fb88","type":"hk-thermostat","z":"e6aaa2fa.a9d3a","pincode":"577-50-506","username":"90:D1:75:A3:59:89","name":"Thermostat","port":"52378","interceptget":"Yes","supportsCooling":"No","originalId":"970b8ade.44fb88","x":327,"y":375,"wires":[["5c7fd16a.bdaaf"]]},{"id":"5c7fd16a.bdaaf","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":487,"y":375,"wires":[]},{"id":"fe72146a.0de2c8","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":84,"y":375,"wires":[["970b8ade.44fb88"]]},{"id":"e97830e2.6813a","type":"inject","z":"e6aaa2fa.a9d3a","name":"Current/TargetState","topic":"","payload":"{\"CurrentHeatingCoolingState\":0,\"TargetHeatingCoolingState\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":124,"y":451,"wires":[["970b8ade.44fb88"]]}] ``` ### hk alarm ```json [{"id":"d1c360eb.a33f5","type":"hk-alarm","z":"e6aaa2fa.a9d3a","pincode":"771-22-989","username":"B4:B1:DB:16:E6:88","name":"Intruder Alarm","port":"49288","interceptget":"Yes","originalId":"d1c360eb.a33f5","x":324,"y":512,"wires":[["d6de3190.19907"]]},{"id":"c0e56e96.9af34","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":87,"y":512,"wires":[["d1c360eb.a33f5"]]},{"id":"d6de3190.19907","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":492,"y":512,"wires":[]},{"id":"a6a0c561.9081d8","type":"inject","z":"e6aaa2fa.a9d3a","name":"Current/Target State","topic":"","payload":"{\"SecuritySystemTargetState\":1,\"SecuritySystemCurrentState\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":126,"y":551,"wires":[["d1c360eb.a33f5"]]},{"id":"3f1d4147.2185ee","type":"inject","z":"e6aaa2fa.a9d3a","name":"Tampered/Fault State","topic":"","payload":"{\"StatusFault\":1,\"StatusTampered\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":135,"y":590,"wires":[["d1c360eb.a33f5"]]}] ``` ### hk motion ```json [{"id":"bcdcd6c0.8530c8","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":85,"y":647,"wires":[["11606b68.e07905"]]},{"id":"1e440055.ba776","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":487,"y":647,"wires":[]},{"id":"11606b68.e07905","type":"hk-motion","z":"e6aaa2fa.a9d3a","pincode":"199-28-428","username":"C7:DA:E5:C2:49:71","name":"Motion Sensor","port":"57989","interceptget":"Yes","originalId":"11606b68.e07905","x":321,"y":647,"wires":[["1e440055.ba776"]]},{"id":"c7b07991.6079e8","type":"inject","z":"e6aaa2fa.a9d3a","name":"MotionDetected","topic":"","payload":"{\"MotionDetected\":true}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":115,"y":684,"wires":[["11606b68.e07905"]]},{"id":"c7c10f08.c9f6d","type":"inject","z":"e6aaa2fa.a9d3a","name":"Active/Charging","topic":"","payload":"{\"StatusActive\":0, \"ChargingState\": 1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":116,"y":723,"wires":[["11606b68.e07905"]]},{"id":"d6204201.c36fe","type":"inject","z":"e6aaa2fa.a9d3a","name":"Level/LowBatt","topic":"","payload":"{\"BatteryLevel\":99, \"StatusLowBattery\": 0}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":107,"y":762,"wires":[["11606b68.e07905"]]}] ``` ### hk contact ```json [{"id":"bf0a64d3.f53088","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":91,"y":828,"wires":[["61ceb5eb.e3ea3c"]]},{"id":"800d69f1.ee8418","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":505,"y":828,"wires":[]},{"id":"14614a57.b85056","type":"inject","z":"e6aaa2fa.a9d3a","name":"ContactSensorState","topic":"","payload":"{\"ContactSensorState\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":131,"y":870,"wires":[["61ceb5eb.e3ea3c"]]},{"id":"61ceb5eb.e3ea3c","type":"hk-contact","z":"e6aaa2fa.a9d3a","pincode":"174-31-966","username":"1B:2B:CD:87:2B:85","name":"Backdoor","port":"47675","interceptget":"Yes","originalId":"61ceb5eb.e3ea3c","x":332,"y":828,"wires":[["800d69f1.ee8418"]]},{"id":"6a853bbd.3c79e4","type":"inject","z":"e6aaa2fa.a9d3a","name":"Tampered/Fault State","topic":"","payload":"{\"StatusFault\":1, \"StatusTampered\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":140,"y":909,"wires":[["61ceb5eb.e3ea3c"]]},{"id":"ef12f77f.7f8ee8","type":"inject","z":"e6aaa2fa.a9d3a","name":"Tampered/Fault State","topic":"","payload":"{\"BatteryLevel\":99, \"ChargingState\":0, \"StatusLowBattery\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":140,"y":948,"wires":[["61ceb5eb.e3ea3c"]]}] ``` ### hk lock ```json [{"id":"853f8be1.9b5d98","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":90,"y":1018,"wires":[["f2c56a8d.3f6638"]]},{"id":"74c13b20.ec2da4","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":419,"y":1018,"wires":[]},{"id":"c09d1e63.070f6","type":"inject","z":"e6aaa2fa.a9d3a","name":"Target/CurrentState","topic":"","payload":"{\"LockTargetState\":0, \"LockCurrentState\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":130,"y":1060,"wires":[["f2c56a8d.3f6638"]]},{"id":"f2c56a8d.3f6638","type":"hk-lock","z":"e6aaa2fa.a9d3a","pincode":"277-44-193","username":"33:66:1F:D4:67:3D","name":"Door Lock","port":"52873","interceptget":"Yes","originalId":"f2c56a8d.3f6638","x":250,"y":1018,"wires":[["74c13b20.ec2da4"]]}] ``` ### hk garage door ```json [{"id":"43593ce2.bf48f4","type":"inject","z":"e6aaa2fa.a9d3a","name":"GET","topic":"","payload":"GetCharacteristics","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":90,"y":1140,"wires":[["a543a389.09561"]]},{"id":"7d018199.be1cd","type":"debug","z":"e6aaa2fa.a9d3a","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":492,"y":1140,"wires":[]},{"id":"1c155e45.7fe2b2","type":"inject","z":"e6aaa2fa.a9d3a","name":"Current/TargetState","topic":"","payload":"{\"CurrentDoorState\":1, \"TargetDoorState\":0}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":130,"y":1220,"wires":[["a543a389.09561"]]},{"id":"a543a389.09561","type":"hk-garage-door","z":"e6aaa2fa.a9d3a","pincode":"287-67-232","username":"47:4D:3C:E5:92:9D","name":"Garage Door","port":"44037","interceptget":"Yes","originalId":"a543a389.09561","x":331,"y":1140,"wires":[["7d018199.be1cd"]]},{"id":"8a04f667.1aa958","type":"inject","z":"e6aaa2fa.a9d3a","name":"LockCurrent/TargetState","topic":"","payload":"{\"LockCurrentState\":1, \"LockTargetState\":1}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":150,"y":1260,"wires":[["a543a389.09561"]]},{"id":"34bb2003.96a8b","type":"inject","z":"e6aaa2fa.a9d3a","name":"Obstruction","topic":"","payload":"{\"ObstructionDetected\": true}","payloadType":"json","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":110,"y":1180,"wires":[["a543a389.09561"]]}] ``` ### hk camera The use of the camera node will require FFMPEG to be installed in its default location. (for *nix that is the bin directory. For windows users, ensure the FFMPEG directory is setup in the 'PATH' environment variable) Configure the accessory details below. * __Name__ : The name as it appears in Homeapp/Homekit based applications. * __Pincode__ : The required code to enter when enrolling the accessory in Homekit. * __Username__ : An identifier for your device, it must follow the mac address format (00:00:00:00:00:00). * __Port__ : The network port your accessory will listen on. * __Max Streams__ : The max number of live streams that can occur. * __Max Width/Heigh__ : The maximum size of the video stream (refer to your IP camera settings). * __Max FPS__ : Max framerate of the live stream (refer to your IP camera settings). * __Source Stream__ : The network stream used to capture live and still footage, still footage is derived from a 1 second frame. * __Encoder__ : The ffmpeg video encoder to use, when streaming footage. Port, Pincode & Username should be unique across your devices. ![Node-RED with Homekit](./Node-RED_with_Homekit_11.png) ## Adding your Homekit Devices to Home Assistant In Home Assistant head over to the __Configuration__ / __Integrations__ tab. All your devices should already be listed there: ![Node-RED with Homekit](./Node-RED_with_Homekit_12.png) Click on configure and add the corresponding Pin Code (as assigned in Node-RED). <file_sep>--- date: "2017-12-09" title: "Next.js Server Side Rendering" categories: - Javascript - React - Next --- ![Tanna Island, Vanuatu](./photo-34221454260_1d42dbe06f_o.png) [Github](https://github.com/mpolinowski/next-start) # Next.js <!-- TOC --> - [Next.js](#nextjs) - [On my way to figure out to find the best way to deploy a React App](#on-my-way-to-figure-out-to-find-the-best-way-to-deploy-a-react-app) - [01 Basic Setup](#01-basic-setup) - [02 Styling](#02-styling) - [Example](#example) - [03 Bootstrap 4](#03-bootstrap-4) - [04 Advanced Routing](#04-advanced-routing) <!-- /TOC --> ## On my way to figure out to find the best way to deploy a React App 01. [create-react-app](https://github.com/mpolinowski/react-router-4) 02. [GatsbyJS](https://github.com/mpolinowski/gatsby-wiki) 03. Next.js Today I want to try out [Next.js](https://zeit.co/docs/examples/next) ([Github](https://github.com/zeit/next.js)). More specifically the new [Next4](https://zeit.co/blog/next4) release that includes [React16](https://reactjs.org/blog/2017/09/26/react-v16.0.html) and [React Router 4](https://zeit.co/blog/next4) - as I already gathered some [positive experiences](https://github.com/mpolinowski/react-router-4) with those. There is also an update to [Styled JSX 2](https://github.com/zeit/styled-jsx/releases/tag/2.0.1), that now allows us to __prop__-up our styles, to avoid having to overwrite defaults inline inside components - here the documented example: ```js export default ({ color }) => ( <div> Hello there <span><NAME></span> <style jsx>{` /* this style only applies to the span within lexical scope */ span { color: ${color}; } `}</style> </div> ) ``` There are many different [examples available](https://github.com/zeit/next.js/tree/master/examples) to start building your App. All righty, lets go then! --- ![](./next_01.png) The final Application __TOC__ - [Next.js](#nextjs) - [On my way to figure out to find the best way to deploy a React App](#on-my-way-to-figure-out-to-find-the-best-way-to-deploy-a-react-app) - [01 Basic Setup](#01-basic-setup) - [02 Styling](#02-styling) - [Example](#example) - [03 Bootstrap 4](#03-bootstrap-4) - [04 Advanced Routing](#04-advanced-routing) ## 01 Basic Setup First create a directory for our app and _npm init -y_ it - then install Next.js and React: ``` npm install next@latest react@latest react-dom@latest --save ``` We can add the following npm scripts to the __package.json__ file to make things easier: ```json "scripts": { "dev": "next", "build": "next build", "start": "next start" } ``` After that, the file-system is the main API. Every .js file becomes a route that gets automatically processed and rendered. Populate _./pages/index.js_ inside your project: ```js export default () => <div>Welcome to next.js!</div> ``` and then just run npm run dev and go to http://localhost:3000. To use another port, you can run npm run dev -- -p "your port here". So far, we get: * Automatic transpilation and bundling (with webpack and babel) * Hot code reloading * Server rendering and indexing of ./pages * Static file serving. ./static/ is mapped to /static/ * Automatic code splitting - Every import you declare gets bundled and served with each page. That means pages never load unnecessary code! ## 02 Styling Next.js 4 bundles styled-jsx 2 to provide support for isolated scoped CSS - the [documentation can be found here](https://www.npmjs.com/package/styled-jsx). A few examples are: __isolated scoped CSS__ ```js export default () => ( <div> <p>only this paragraph will get the style :)</p> { /* you can include <Component />s here that include other <p>s that don't get unexpected styles! */ } <style jsx>{` p { color: red; } `}</style> </div> ) ``` Styles can be defined in separate JavaScript modules by tagging with css any template literal that contain CSS. css must be imported from styled-jsx/css: __Keeping CSS in separate files__ ```js /* styles.js */ import css from 'styled-jsx/css' export const button = css`button { color: purple; }` export default css`div { color: teal; }` ``` and imported as regular strings: ```js import styles, { button } from './styles' export default () => ( <div> <button>styled-jsx</button> <style jsx>{styles}</style> <style jsx>{button}</style> </div> ) ``` in this example all "potential" \<div\>'s inside the components JSX - as well as the enclosing root \<div\> is targeted by {styles}. If you just want to target the root: __Targeting The Root__ ```js export default () => ( <div className="root"> <style jsx>{` .root { color: green; } `}</style> </div> ) ``` To skip scoping entirely, you can make the global-ness of your styles explicit by adding global: __Global styles__ ```js export default () => ( <div> <style jsx global>{` body { background: red } `}</style> </div> ) ``` You can also use modifiers for your CSS classes to create components that can be called in different "versions", e.g. \<Button\>Hi\</Button\> or \<Button large\>Big\</Button\>. __className toggling__ ```js const Button = (props) => ( <button className={ 'large' in props && 'large' }> { props.children } <style jsx>{` button { padding: 20px; background: #eee; color: #999 } .large { padding: 50px } `}</style> </button> ) ``` You can override the CSS you configure via inline-styles: __inline style__ ```js const Button = ({ padding, children }) => ( <button style={{ padding }}> { children } <style jsx>{` button { padding: 20px; background: #eee; color: #999 } `}</style> </button> ) ``` In this example, the padding defaults to the one set in \<style\> (20), but the user can pass a custom one via \Button padding={30}\. It is possible to use constants like so: __Constants__ ```js import { colors, spacing } from '../theme' import { invertColor } from '../theme/utils' const Button = ({ children }) => ( <button> { children } <style jsx>{` button { padding: ${ spacing.medium }; background: ${ colors.primary }; color: ${ invertColor(colors.primary) }; } `}</style> </button> ) ``` or to pass them down by Props: __Dynamic styles__ ```js const Button = (props) => ( <button> { props.children } <style jsx>{` button { padding: ${ 'large' in props ? '50' : '20' }px; background: ${props.theme.background}; color: #999; display: inline-block; font-size: 1em; } `}</style> </button> ) ``` ## Example _./pages/index.js_ ```js import styles, { hipsterum, imagefloat } from './styles/hipsterum' export default () => <div> <h1>Welcome to next.js!</h1> <h3>The awesome World of Server-side Rendering</h3> <h5>lets see what this is all about</h5> <img src="/static/test.png" className="imagefloat"/> <p className="hipsterum"> [ipsum...] </p> <style jsx>{` h1, h5 { color: white; } @media (max-width: 600px) { h1, h5 { color: black; } } `}</style> <style global jsx>{` body { background: black; } @media (max-width: 600px) { body { background-image: url("/static/test.png"); } } `}</style> <style jsx>{styles}</style> <style jsx>{imagefloat}</style> <style jsx>{hipsterum}</style> </div> ``` _./pages/styles/hipsterum.js_ ```js import css from 'styled-jsx/css' export const hipsterum = css`.hipsterum { color: blue; text-align: justify; }` export const imagefloat = css`.imagefloat { float:right; margin-left:10px; }` export default css`h3 { color: red; }` ``` This leads you to an style abomination that will make you cringe :) but uses all the important features of Style-JSX: * \@media queries for responsive designs * usage of static assets for your designs from the _./static_ folder * inline css and separated css styles in their own files * separation of styles into their own functions to allow you to import only necessary styles ## 03 Bootstrap 4 Trying to figure out the best way to add Bootstrap to the Mix - for now I will just add the CDN links to the layout Component in _./components/layout_. This works just like the template component with React-Helmet in create-react-app: ```js import Link from 'next/link' import Head from 'next/head' import NavBar from './navbar' export default ({ children, title = 'This is the default title' }) => ( <div> <Head> <title>{ title }</title> <meta charSet='utf-8' /> <meta name='viewport' content='initial-scale=1.0, width=device-width' /> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/css/bootstrap.min.css" integrity="<KEY>" crossOrigin="anonymous" /> </Head> <header> <NavBar /> </header> <div className="container"> <div className="row"> <br/><br/><br/><br/> { children } </div> </div> <footer> {'I`m a Footer Component'} <script src="https://code.jquery.com/jquery-3.2.1.slim.min.js" integrity="<KEY>" crossOrigin="anonymous" /> <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.3/umd/popper.min.js" integrity="<KEY>" crossOrigin="anonymous" /> <script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/js/bootstrap.min.js" integrity="<KEY>" crossOrigin="anonymous" /> </footer> </div> ) ``` As a first bootstrap component, I added the __Bootstrap 4 Navbar__ we [created earlier](https://github.com/mpolinowski/react-router-4/blob/master/src/index.js) and imported it into the header area above: ```js import Link from 'next/link' const NavBar = () => ( <div> <nav className="navbar navbar-expand-md navbar-dark bg-dark fixed-top mb"> <Link href="/"><a className="nav-item nav-link"><img src="/static/logo.svg" alt="INSTAR Wiki" /></a></Link> <button className="navbar-toggler" type="button" data-toggle="collapse" data-target="#TopNavbar" aria-controls="navbarsExampleDefault" aria-expanded="false" aria-label="Toggle navigation"> <span className="navbar-toggler-icon"></span> </button> <div className="collapse navbar-collapse" id="TopNavbar"> <ul className="navbar-nav mr-auto"> <li className="nav-item"> <Link href="/page-1"><a className="nav-item nav-link">Page 1</a></Link> </li> <li className="nav-item"> <Link href="/page-2"><a className="nav-item nav-link">Page 2</a></Link> </li> <li className="nav-item"> <Link href="/nested-routes"><a className="nav-item nav-link">Nested Routes</a></Link> </li> </ul> </div> </nav> </div> ) export default NavBar ``` ## 04 Advanced Routing As we already see, Next.js builds routes for each component it finds inside the _./pages_ directory. So our index component shows up at the root URL, without us having to do anything - sweet. But what if we need nested routes for our components - say _page01_ is a child of _chapter01_ and we want it to appear when we type in _localhost:3000/chapter01/page01_ ? There are two ways that I found so far: * next-routes: a neat little [npm module](https://www.npmjs.com/package/next-routes) from also featured as an [Example @Zeit](https://github.com/zeit/next.js/tree/master/examples/with-next-routes). * Using an Express.js webserver as featured [@Zeit](https://github.com/zeit/next.js/tree/master/examples/custom-server-express) and [@Medium](https://medium.com/@diamondgfx/nextjs-lessons-learned-part-2-f1781237cf5c) ~~Lets try out __next-routes__ for this example:~~ ~~npm install next-routes --save~~ [...] __Ok, this basically wrecked the whole application__ I will copy the code to _./next-routes-wtf_ and - maybe - try it again later... The result is very inconsistent. You can click on a link and the page loads just fine. If you click on the same link again, or just reload the page, or copy it's URL into another browser, you are very likely to end up seeing the 404 page. **Update** It might just have been the way I linked components - the solution, that is coming up below, showed a similar behaviour, when you forget the "as=" attribute in a link tag. Ok - so lets try Express.js now, since I wanted to use it for deployment anyhow. [<NAME>](https://medium.com/@diamondgfx/nextjs-lessons-learned-part-2-f1781237cf5c) says, that he ran into the same problems I had with _next-routes_, when using the [official documentation](https://github.com/zeit/next.js/tree/master/examples) for the _custom server.js_ setup. So I will stay away from it for now and try his version. First install [Express.js](http://expressjs.com) from npm: ``` npm install --save express ``` then create _./server.js_: ```js const express = require('express'); const { parse } = require('url'); const next = require('next'); const dev = process.env.NODE_ENV !== 'production'; const app = next({ dev }); const handle = app.getRequestHandler(); app.prepare().then(() => { const server = express(); // CUSTOM ROUTES GO HERE server.get('/Products/:slug', (req, res) => { const mergedQuery = Object.assign({}, req.query, req.params); return app.render(req, res, '/Products', mergedQuery); }); // THIS IS THE DEFAULT ROUTE, DON'T EDIT THIS server.get('*', (req, res) => { return handle(req, res); }); const port = process.env.PORT || 3000; server.listen(port, err => { if (err) throw err; console.log(`> Ready on port ${port}...`); }); }); ``` This will give you a param that gets sent to your blog.js component inside of your pages/ directory and give you the custom routing that you want! The client-side linking, assuming we have the route setup above /products/:slug, your links to specific slugs would need to be structure using next/link’s Link component via the following: ```js <Link href={`/base?slug=${slug}`} as={`/base/${slug}`} prefetch> ... </Link> ``` e.g. ```js <Link href={`/Products?slug=${'Outdoor_Cameras'}`} as={`/Products/${'Outdoor_Cameras'}`} prefetch> ``` __as__ is what the user will see in their browser, but __href__ is what next.js will interpret to figure out how things need to get routed. _Both of these steps are required to make the link behavior and routing behavior behave the same no matter where the page is rendered from!_ Now create a Product component in _./pages/Products.js_: ```js import React from 'react' import Layout from '../components/layout' const posts = [ { slug: 'Indoor_Cameras', title: 'Indoor Cameras' }, { slug: 'Outdoor_Cameras', title: 'Outdoor Cameras' } ] export default class extends React.Component { static async getInitialProps ({ query, res }) { const post = posts.find(post => post.slug === query.slug) if (!post && res) { res.statusCode = 404 } return { post } } render () { const { post } = this.props if (!post) return <Layout><h1>Products</h1></Layout> return <Layout><h1>{post.title}</h1></Layout> } } ``` This will load the corresponding posts when you add the right slugs for it - _/Products/Outdoor_Cameras_ or _/Products/Indoor_Cameras_ or defaults to whatever you add here: _if (!post) return \<Layout\>\<h1\>Products\</h1\>\</Layout\>_ in case that no match is found. Finally, you’ll need to modify your package.json file to include everything so that next.js knows how to run the server.js file: ```json "scripts": { "build": "next build", "start": "node server.js", } ``` Now you can continue to run your dev server with npm run dev and you can build/start your production server as well!<file_sep>--- date: "2019-08-03" title: "Debugging MQTT Sensor Networks" categories: - MQTT - Windows - IoT --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Debugging MQTT Networks](#debugging-mqtt-networks) - [Windows and macOS Networks](#windows-and-macos-networks) - [MQTT Explorer](#mqtt-explorer) - [Connect to your Cameras Broker](#connect-to-your-cameras-broker) - [Controlling your Camera](#controlling-your-camera) - [MQTT.fx](#mqttfx) <!-- /TOC --> ### Windows and macOS Software There are plenty of free tools available that you can use to test a MQTT network and to keep an eye on all messages that are processed through it. This is extremely useful when connecting new clients. In the following we are going to take a look at two of these tools that are both available for Windows, macOS and Linux: * [MQTT Explorer by <NAME>](http://mqtt-explorer.com) * [MQTT.fx](https://mqttfx.jensd.de) ## MQTT Explorer ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_01.png) Once you [downloaded](http://mqtt-explorer.com), installed and started the app click on the __Plus__ icon in the top left to add your MQTT broker - in our case, our INSTAR Full HD camera. ### Connect to your Cameras Broker ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_02.png) Type in your cameras local IP address as __Host__, e.g. `192.168.2.165`, with the __Port__ `8883` (with __Encryption (tls)__ enabled) or `1883` (_without encryption_). The last line is your MQTT broker login that you have set in your cameras WebUI: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_03.png) If you have chosen to use the __TLS Encryption__ you now have to click on __Advanced__ back inside the MQTT Explorer broker setup: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_04.png) In the screenshot above you can see that we already have a wildcard (`#`) subscription by default. That means that once we connect to our MQTT Broker we will receive updates for every MQTT Topic that has been registered with the Broker. In the following window click on __Client Certificate__ and select the __INSTAR Trusted Client Certificate__ that you can download from your cameras WebUI: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_05.png) Once you uploaded the certificate you are able to connect to your MQTT broker. Click on __Back__ twice and then __Connect__: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_06.png) Once connected you are able to see all MQTT Topics that are registered with the server. The important once are `instar/all` and `instar/000389888811`, where the number __000389888811__ represents the MAC address of our camera and will be different on every camera you connect to. ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_07a.png) You can find your cameras MAC address by opening the web user interface and going to __System__ / __Overview__: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_07b.png) All topics under `instar/all` will affect all INSTAR Full HD cameras that are connected to your MQTT broker - if an topic is updated here all cameras will be switched accordingly. All topics under `instar/000389888811` only affect the camera with the MAC address corresponding with the number __000389888811__. If you want to send a command to the camera that is running the active MQTT broker, you can either use the MAC address, or simply use the `instar/local` prefix instead. ### Controlling your Camera To control your camera select a topic you want to update: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_08.png) In this case we choose the topic `alarm/area1/enable` to activate the motion detection area 1 on our camera. Note that the topics that are listed on the left are the __STATUS topics__ that display the current status on our camera. In our example the topic is `instar/000389888811/status/alarm/area1/enable` and it's current value is `{"val":"0"}`. To update this status topic and activate our detection area we have to send an update to the __COMMAND Topic__. This is identical to the status topic minus the word _status_: `instar/000389888811/alarm/area1/enable`. Copy this command into the topic field and add the payload `{"val":"1"}` and hit __PUBLISH__: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_09.png) You can see that the __STATUS topic__ was updated to the value 1. When you open your cameras webUI you will also find that the detection area 1 is now active! Congratulations, you have successfully taken control over your camera using the MQTT interface. ## MQTT.fx Once you [downloaded](https://mqttfx.jensd.de), installed and started the app click on the __Cog Wheel__ icon in the top center to add your MQTT broker - in our case, our INSTAR Full HD camera. ![INSTAR MQTT Broker and MQTT.fx](./MQTT_fx_01.png) Choose the __MQTT Broker__ as __Profile Type__ and type in your cameras local IP address as __Broker Address__, e.g. `192.168.2.165`, with the __Broker Port__ `8883` (with __Encryption (SSL/TLS)__ enabled) or `1883` (_without encryption_). ![INSTAR MQTT Broker and MQTT.fx](./MQTT_fx_02.png) Under __User Credentials__ type in your MQTT broker login that you have set in your cameras WebUI: ![INSTAR MQTT Broker and MQTT.fx](./MQTT_fx_03.png) If you have chosen to use the __TLS Encryption__ via __Broker Port__ `8883` you now have to switch to the __SSL/TLS__ tab: ![INSTAR MQTT Broker and MQTT.fx](./MQTT_fx_04.png) __Enable SSL/TLS__ and click on __CA Certificate file__ and select the __INSTAR Trusted Client Certificate__ that you can download from your cameras WebUI: ![INSTAR MQTT Broker and the MQTT Explorer](./MQTT_Explorer_03.png) Click on __Ok__ and back on the main Window select the broker you just set up and click on __Connect__: ![INSTAR MQTT Broker and MQTT.fx](./MQTT_fx_05.png) By typing `#` into the input field and clicking on __Subscribe__ inside the __Subscribe__ tab you get a wildcard subscription for all MQTT topics that are registered on your MQTT broker. You can also use the __Publish__ tab to update a MQTT topic and switching the corresponding function on your camera. For example the topic `instar/000389888811/alarm/area1/enable`, where the number __000389888811__ represents the MAC address of our camera and will be different on every camera you connect to, can have the payload `{"val":"1"}` or `{"val":"0"}` to either switch the alarm detection area 1 __on__ or __off__. ![INSTAR MQTT Broker and MQTT.fx](./MQTT_fx_06.png) Switching back the __Subscribe__ tab shows you that your MQTT broker received your topic update for `instar/000389888811/alarm/area1/enable`, your camera received the command to switch on the alarm detection area on and in turn updated the topic `instar/000389888811/status/alarm/area1/enable` with the new status the camera is now in: ![INSTAR MQTT Broker and MQTT.fx](./MQTT_fx_07.png)<file_sep>--- date: "2018-10-28" title: "Getting Started with Node-RED (Windows 10 Edition)" categories: - Windows - Node-RED --- ![Harbin, China](./photo-34605589525_8f576ddb84_o.jpg) This guide will help you get Node-RED installed and running in just a few minutes. <!-- TOC --> - [Installation](#installation) - [Adding Nodes](#adding-nodes) - [Creating a flow](#creating-a-flow) - [Add an Inject node](#add-an-inject-node) - [Add a Debug node](#add-a-debug-node) - [Wire the two together](#wire-the-two-together) - [Deploy](#deploy) - [Add a Function node](#add-a-function-node) - [Source Export / Import](#source-export--import) - [Creating a flow that does an HTTP Request](#creating-a-flow-that-does-an-http-request) - [Add an Inject node](#add-an-inject-node-1) - [Add an HttpRequest node](#add-an-httprequest-node) - [Add a CSV node](#add-a-csv-node) - [Add a Debug node](#add-a-debug-node-1) - [Wire them all together](#wire-them-all-together) - [Add a Switch node](#add-a-switch-node) - [Add a Change node](#add-a-change-node) - [Add a Debug node](#add-a-debug-node-2) - [Deploy](#deploy-1) - [Export](#export) <!-- /TOC --> ## Installation Install Node-RED is to use the node package manager, npm, that comes with Node.js. Installing as a global module adds the command node-red to your system path: ```bash npm install -g --unsafe-perm node-red ``` We should now go to the root directory inside our Windows User folder and define a public directory (in the screenshot below I created a folder `./static` inside the root for that purpose) and create a Admin Login - this can be done in `./settings.js`: ![Node-RED](./node-red_01.png) The Admin password is securely hashed using the bcrypt algorithm. To generate a suitable password hash, you can use the node-red-admin command-line tool: ```bash npm install -g node-red-admin node-red-admin hash-pw ``` The tool will prompt you for the password you wish to use and then print out the hash that can be copied into the settings file. ![Node-RED](./node-red_02.png) You can now use the `node-red` command to start the application: ![Node-RED](./node-red_03.png) The Node-RED can now be accessed via `http://1172.16.17.32:1880/` - login with the created Admin account: ![Node-RED](./node-red_04.png) ![Node-RED](./node-red_05.png) ## Adding Nodes Node-RED comes with a core set of useful nodes, but there are a growing number of additional nodes available for install from both the Node-RED project as well as the wider community. You can search for available nodes in the [Node-RED library](http://flows.nodered.org/). You can install nodes directly using the editor. To do this select Manage Palette from the menu (top right), and then select the install tab in the palette. You can now search for new nodes to install, update, and enable and disable existing nodes. ![Node-RED](./node-red_06.png) ## Creating a flow ### Add an Inject node > The __Inject__ node allows you to inject messages into a flow, either by clicking the button on the node, or setting a time interval between injects. Drag one onto the workspace from the palette. ### Add a Debug node > The __Debug__ node causes any message to be displayed in the Debug sidebar. By default, it just displays the payload of the message, but it is possible to display the entire message object. ### Wire the two together > Connect the Inject and Debug nodes together by dragging between the output port of one to the input port of the other. ### Deploy > At this point, the nodes only exist in the editor and must be deployed to the server. Click the __Deploy button__. With the Debug sidebar tab selected, click the Inject button. You should see numbers appear in the sidebar. By default, the Inject node uses the number of milliseconds since January 1st, 1970 as its payload. ![Node-RED](./node-red_07.png) ### Add a Function node > The Function node allows you to pass each message though a JavaScript function. > > Wire the Function node in between the Inject and Debug nodes. You may need to delete the existing wire (select it and hit delete on the keyboard). > > Double-click on the Function node to bring up the edit dialog. Copy the follow code into the function field: ```js var date = new Date(msg.payload); // Change the payload to be a formatted Date string msg.payload = date.toString(); // Return the message so it can be sent on return msg; ``` > Click Ok to close the edit dialog and then click the deploy button. > > Now when you click the Inject button, the messages in the sidebar will be more readable time stamps. ![Node-RED](./node-red_08.png) ![Node-RED](./node-red_09.png) ### Source Export / Import > The flow created in this example is represented by the following json. It can be imported straight into the editor by pasting the json into the Import dialog (Ctrl-I or via the dropdown menu): ```json [[{"id":"aafc0f94.6fc66","type":"tab","label":"Flow 1","disabled":false,"info":""},{"id":"43533492.9531bc","type":"inject","z":"aafc0f94.6fc66","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":89,"y":67,"wires":[[]]},{"id":"6cee6953.f69478","type":"inject","z":"aafc0f94.6fc66","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":92,"y":169,"wires":[["f2173234.8ac2"]]},{"id":"99de335e.f26fc","type":"debug","z":"aafc0f94.6fc66","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":334,"y":376,"wires":[]},{"id":"f2173234.8ac2","type":"function","z":"aafc0f94.6fc66","name":"time format","func":"// Create a Date object from the payload\nvar date = new Date(msg.payload);\n// Change the payload to be a formatted Date string\nmsg.payload = date.toString();\n// Return the message so it can be sent on\nreturn msg;","outputs":1,"noerr":0,"x":212,"y":273,"wires":[["99de335e.f26fc"]]}]] ``` ![Node-RED](./node-red_10.png) ## Creating a flow that does an HTTP Request > This example is slightly more complex and starts to bring in data from external sources to do something useful locally. > > * It will go out to an external web site > * grab some information > * read and convert that into a useful form ### Add an Inject node > For this example, the Inject node will be configured to trigger the flow at a regular interval. > > Drag an Inject node onto the workspace from the palette. > > Double click the node to bring up the edit dialog. Set the repeat interval to every 5 minutes. > > Click Ok to close the dialog. ![Node-RED](./node-red_11.png) ### Add an HttpRequest node > The HttpRequest node can be used to retrieve a web-page when triggered. > > After adding one to the workspace, edit it to set the URL property to `https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/significant_week.csv`: ![Node-RED](./node-red_12.png) ### Add a CSV node > Add a CSV node and edit the properties, and tick the `Input - [x] First row contains column names`: ![Node-RED](./node-red_13.png) ### Add a Debug node > Add a Debug node to the output. ### Wire them all together > * Wire the Inject node output to the HttpRequest node input. > * Wire the HttpRequest node output to the CSV node input. > * Wire the CSV node output to the Debug node input. ### Add a Switch node > * Wire a Switch node to the output of the CSV node. > * Configure the property to be `msg.payload.mag` > * Configure the test to be >= and the value to be `7` ![Node-RED](./node-red_14.png) ### Add a Change node > * Wire a Change node to the output of the Switch node. > * Configure the node to Set, `msg.payload` to be `PANIC!`. ![Node-RED](./node-red_15.png) ### Add a Debug node > * Wire a Debug node to the output of the Change node ![Node-RED](./node-red_16.png) ### Deploy > Click the Deploy button. > > With the Debug sidebar tab selected (Ctrl-Space, or via the dropdown menu, then click the Debug tab), click the Inject button. You should see a list of entries with some contents that look like: ![Node-RED](./node-red_17.png) > You can now click on the little arrow to the left of each property to expand them and examine the contents > > If there were any quakes with a magnitude greater than 7 you will also see some output like: ``` msg.payload : string(6) PANIC! ``` > You can use the green buttons to the right of each debug node to turn on and off that particular debug node - for example deactivate the first debug node to only be notified if there was an earthquake with mag 7 or higher. ### Export ```json [{"id":"79a02957.897548","type":"tab","label":"Flow 3","disabled":false,"info":""},{"id":"843d22f1.0f3db","type":"inject","z":"79a02957.897548","name":"Check every 5min","topic":"","payload":"","payloadType":"date","repeat":"300","crontab":"","once":false,"onceDelay":0.1,"x":110,"y":69,"wires":[["9bfe934.885317"]]},{"id":"9bfe934.885317","type":"http request","z":"79a02957.897548","name":"sig. earthquake","method":"GET","ret":"txt","url":"https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/significant_week.csv","tls":"","x":154,"y":169,"wires":[["3553ec48.07f1d4"]]},{"id":"3553ec48.07f1d4","type":"csv","z":"79a02957.897548","name":"","sep":",","hdrin":true,"hdrout":"","multi":"one","ret":"\\n","temp":"","skip":"0","x":185,"y":275,"wires":[["3fe68794.f74ce8","38ac2b9d.22ded4"]]},{"id":"3fe68794.f74ce8","type":"debug","z":"79a02957.897548","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":257,"y":371,"wires":[]},{"id":"38ac2b9d.22ded4","type":"switch","z":"79a02957.897548","name":"","property":"payload.mag","propertyType":"msg","rules":[{"t":"gte","v":"7","vt":"str"}],"checkall":"true","repair":false,"outputs":1,"x":351,"y":217,"wires":[["a59b3ed8.5c641"]]},{"id":"a59b3ed8.5c641","type":"change","z":"79a02957.897548","name":"","rules":[{"t":"set","p":"payload","pt":"msg","to":"PANIC!","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":495,"y":143,"wires":[["1ac95f41.27a1c1"]]},{"id":"1ac95f41.27a1c1","type":"debug","z":"79a02957.897548","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":629,"y":45,"wires":[]}] ```<file_sep>--- date: "2017-12-17" title: "Python Network Logger" categories: - Python --- ![Harbin, China](./photo-33vfghg3253_sd5767gw324d90_o.png) <!-- TOC --> - [Python SSH Logger](#python-ssh-logger) <!-- /TOC --> # Python SSH Logger We want to build a small python program that allows us to log server states over the network by SSH. We start with creating 3 environment files that hold the server ip address, the SSH user login and the commands that we have to send to the server to get the information: 1. [serverip.env](./serverip.env) 2. [userlogin.env](./userlogin.env) 3. [commands.env](./commands.env) * [ip_file_valid.py](https://github.com/mpolinowski/python-ssh-logger/blob/master/ip_file_valid.py) : Ask for `serverip.env` file location and read IP address * [ip_addr_valid.py](https://github.com/mpolinowski/python-ssh-logger/blob/master/ip_addr_valid.py) : Check if Server IP is a valid IP and does not belong to a restricted range. * [ip_reach.py](https://github.com/mpolinowski/python-ssh-logger/blob/master/ip_reach.py) : Check if IP address can be pinged, * [ssh_connect.py](https://github.com/mpolinowski/python-ssh-logger/blob/master/ssh_connect.py) : Verify `userlogin.env` and `commands.env` and use __Paramiko__ to connect to your server and send commands via SSHv2. * [create_threads.py](https://github.com/mpolinowski/python-ssh-logger/blob/master/create_threads.py) create parallel threads for each SSH connection (only useful if you contact more than 1 server at once) The command I want to use is the `top -n 1` that will give me an overview over the server load: ![Python Network Logger](./python-network-logger_01.png) To extract the CPU load from the server reply, I am going to use the following regular expression - not that we cannot prepend our RegEx with `r` to get the RAW string, but have to use `b` to handle the response that Python calls __byte-like__: ```python # Searching for the CPU utilization value within the output of "show processes top once" cpu = re.search(b"(%Cpu\(s\): ) (.+?)(us)", server_response) # cpu = server_response # Extracting the second group, which matches the actual value of the CPU utilization and decoding to the UTF-8 format from the binary data type utilization = cpu.group(2).decode("utf-8") # utilization = cpu.decode("utf-8") # Printing the CPU utilization value to the screen # print(utilization) # Opening the CPU utilization text file and appending the results with open("E:\\python-ssh-logger\\cpu-load.txt", "a") as f: # f.write("{},{}\n".format(str(datetime.datetime.now()), utilization)) f.write(utilization + "\n") ``` A good way to develop a fitting regular expression for your task is to test it on [regex101.com](https://regex101.com/): ![Python Network Logger](./python-network-logger_02.png) Once the txt file with the CPU utilization is created, we can use [matplotlib](https://matplotlib.org) to plot the results in [graph.py](https://github.com/mpolinowski/python-ssh-logger/blob/master/graph.py) (You might have to install the library first `python -m pip install matplotlib`). Running both programs parallel shows us the real-time CPU utilization of our server: ![Python Network Logger](./python-network-logger_03.png)<file_sep>--- date: "2017-12-05" title: "React Search Interface" categories: - Javascript - React - Elasticsearch --- ![Tanna Island, Vanuatu](./photo-34445485832_9f5f2a9aea_o.png) <!-- TOC --> - [elasticsearch-react-example](#elasticsearch-react-example) - [Prerequisites](#prerequisites) - [To run the example:](#to-run-the-example) - [Original createClass Syntax](#original-createclass-syntax) - [Update to a Elasticsearch 5.x index](#update-to-a-elasticsearch-5x-index) - [ES6 Class Syntax](#es6-class-syntax) <!-- /TOC --> ## elasticsearch-react-example An example project showing how to use ElasticSearch with React - based on [elasticsearch-react-example](https://github.com/scotchfield/elasticsearch-react-example) by [scotchfield](https://github.com/scotchfield) ## Prerequisites To run this example, you will need to configure Elasticsearch to accept requests from the browser using [CORS](http://en.wikipedia.org/wiki/Cross-origin_resource_sharing). To enable CORS, add the following to Elasticsearch's config file. Usually, this file is located near the elasticsearch executable at `config/elasticsearch.yml`. [source](https://github.com/spalger/elasticsearch-angular-example) ```yaml http.cors: enabled: true allow-origin: /https?:\/\/localhost(:[0-9]+)?/ ``` ## To run the example: 1. Clone this repo locally (or just download and unzip it) ```bash git clone https://github.com/mpolinowski/elasticsearch-react-example.git ``` 2. Move into the project ```bash cd elasticsearch-react-example ``` 3. Run npm install ```bash npm install ``` 4. Run webpack (or webpack-dev-server) to build the index.js source file. --- ## Original createClass Syntax ```js import React from 'react' import { render } from 'react-dom' import elasticsearch from 'elasticsearch' let client = new elasticsearch.Client({ host: 'localhost:9200', log: 'trace' }) const App = React.createClass({ getInitialState() { return { results: [] } }, handleChange(event) { const search_query = event.target.value client.search({ q: search_query }).then(function (body) { this.setState({ results: body.hits.hits }) }.bind(this), function (error) { console.trace(error.message); }); }, render() { return ( <div className="container"> <input type="text" onChange={this.handleChange} /> <SearchResults results={this.state.results} /> </div> ) } }) const SearchResults = React.createClass({ propTypes: { results: React.PropTypes.array }, getDefaultProps() { return { results: [] } }, render() { return ( <div className="search_results"> <hr /> <ul> {this.props.results.map((result) => { return <li key={result._id}>{result._source.name}</li> })} </ul> </div> ) } }) render(<App />, document.getElementById('main')) ``` ## Update to a Elasticsearch 5.x index ```js import React from 'react' import { render } from 'react-dom' import elasticsearch from 'elasticsearch' const connectionString = 'localhost:9200'; const _index = 'wiki2_de_2017_09_09'; const _type = 'article'; const App = React.createClass({ getInitialState() { return { results: [] } }, handleChange(event) { const search_query = event.target.value client.search({ index: _index, type: _type, q: search_query, body: { query: { multi_match: { query: search_query, fields: ['title^100', 'tags^100', 'abstract^20', 'description^10', 'chapter^5', 'title2^10', 'description2^10'], fuzziness: 1, }, }, }, }).then(function (body) { this.setState({ results: body.hits.hits }) }.bind(this), function (error) { console.trace(error.message); }); }, render() { return ( <div className="container"> <input type="text" onChange={this.handleChange} /> <SearchResults results={this.state.results} /> </div> ) } }) const SearchResults = React.createClass({ propTypes: { results: React.PropTypes.array }, getDefaultProps() { return { results: [] } }, render() { return ( <div className="search_results"> <hr /> <ul> {props.results.map((result) => { return <li key={result._id}> <h3>{result._source.title}</h3><br /> <a href={`${result._source.link}`}><img src={result._source.image} alt={result._source.abstract} /><br /></a> <p>{result._source.abstract}</p> </li> })} </ul> </div> ) } }) render(<App />, document.getElementById('main')) ``` ## ES6 Class Syntax ```js import React, { Component } from "react"; import { render } from "react-dom"; import elasticsearch from "elasticsearch"; const connectionString = 'localhost:9200'; const _index = 'wiki2_de_2017_09_09'; const _type = 'article'; let client = new elasticsearch.Client({ host: connectionString, log: "trace" }); class App extends Component { constructor(props) { super(props) this.state = { results: [] }; this.handleChange = this.handleChange.bind(this) } handleChange(event) { const search_query = event.target.value; client.search({ index: _index, type: _type, body: { query: { multi_match: { query: search_query, fields: ['title^100', 'tags^100', 'abstract^20', 'description^10', 'chapter^5', 'title2^10', 'description2^10'], fuzziness: 1, }, }, }, }).then(function (body) { this.setState({ results: body.hits.hits }); }.bind(this), function (error) { console.trace(error.message); } ); } render() { return ( <div className="container"> <input type="text" onChange={this.handleChange} /> <SearchResults results={this.state.results} /> </div> ); } } const SearchResults = ({ results }) => ( <div className="search_results"> <hr /> <table> <thead> <tr> <th>Title</th> </tr> </thead> <tbody> {results.map((result, i) => <ResultRow key={i} title={result._source.title2} /> )} </tbody> </table> </div> ) const ResultRow = ({ title }) => ( <tr> <td> {title} </td> </tr> ) render(<App />, document.getElementById("main")); ``` https://www.newmediacampaigns.com/blog/refactoring-react-components-to-es6-classes https://babeljs.io/blog/2015/06/07/react-on-es6-plus https://medium.com/dailyjs/we-jumped-the-gun-moving-react-components-to-es2015-class-syntax-2b2bb6f35cb3<file_sep>--- date: "2018-11-29" title: "Node-RED and MQTT" categories: - IoT - Node-RED - MQTT --- ![Mongkok, <NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Debian Installation](#debian-installation) - [Creating a Node Administrator User](#creating-a-node-administrator-user) - [Installing Node-RED](#installing-node-red) - [Run Node-RED as a Service](#run-node-red-as-a-service) - [Create the SSL Certificate](#create-the-ssl-certificate) - [Configuring Node-RED](#configuring-node-red) - [The Node-RED Admin Panel](#the-node-red-admin-panel) - [Installing Mosquitto](#installing-mosquitto) - [Configuring Mosquitto](#configuring-mosquitto) - [Testing Mosquitto from an external Machine](#testing-mosquitto-from-an-external-machine) - [Adding SSL Encryption](#adding-ssl-encryption) - [Using Node-RED to communicate with your Camera](#using-node-red-to-communicate-with-your-camera) - [Alarm SET Flow](#alarm-set-flow) - [Alarm Recording](#alarm-recording) - [FTP Snapshot](#ftp-snapshot) <!-- /TOC --> ## Debian Installation We choose to install Node-RED through Node.js under __Debian 9 Linux__ on our mini PC - you can check the [Node.js Wiki](https://nodejs.org/en/download/package-manager/) for more Linux flavours. To create the installation USB stick we [downloaded the minimal amd64 image](https://www.debian.org/CD/netinst/) and used the tool __Etcher__ to prepare the USB stick. Once the USB stick is ready, deactivate secure boot in your mini PC's BIOS and boot from the stick. We are not going to need a desktop environment. But __make sure that you install the SSH service__ - as we are going to use the SSH service to set up the Debian server. ## Creating a Node Administrator User Let's create a user to administer the Node.js processes: ```bash adduser nodeadmin ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x01.png) --- And make sure that the user is allowed to use the `sudo` command and switch users: ```bash apt-get install sudo addgroup nodeadmin sudo su - nodeadmin ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x02.png) --- ## Installing Node-RED We first need to install the [Node.js](https://nodejs.org/en/) runtime and will use the Node Packet Manager (npm) to install [Node-RED](https://nodered.org) in the second step: ```bash curl -sL https://deb.nodesource.com/setup_11.x | bash - sudo apt-get install -y nodejs ``` Type in the install command and use your `nodeadmin` password to be allowed to execute it with `sudo`: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x03.png) --- _(in case of the screenshot above, Node.js was already installed from a setup)_ Now, with the Node runtime installed, we can install Node-RED using the Node Package Manager `npm`: ```bash sudo npm install -g --unsafe-perm node-red ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x04.png) --- ### Run Node-RED as a Service [PM2](https://pm2.io/doc/en/runtime/guide/process-management/) is a process manager for Node.js. It makes it easy to run applications on boot and ensure they are restarted if necessary. Just like Node-RED, we can install PM2 from npm: ```bash sudo npm install -g pm2 pm2 start /usr/bin/node-red ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x05.png) ![Node-RED and INSTAR IP Cameras](./Node-RED_x06.png) --- The second command takes the Node-RED process and runs it inside the Node.js runtime. PM2 is able to generate and configure a start-up script suitable for the platform it is being run on. Run these commands and follow the instructions it provides: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x07.png) --- ```bash pm2 save pm2 startup sudo env PATH=$PATH:/usr/bin /usr/lib/node_modules/pm2/bin/pm2 startup systemd -u nodeadmin --hp /home/nodeadmin ``` The default port for the Node-RED admin panel is `1880` - let's make sure that FirewallD allows this traffic: ```bash sudo firewall-cmd --permanent --zone=public --add-port=1880/tcp ``` ### Create the SSL Certificate We can create a self-signed key and certificate pair with OpenSSL in a single command: ```bash sudo openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /home/nodeadmin/.node-red/certs/node-red-selfsigned.key -out /home/nodeadmin/.node-red/certs/node-red-selfsigned.crt ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x08.png) --- __Note__ that the server name is set to the local IP address of our Debian server (you have to change it according to your setup). If you want to access your Node-RED installation over a domain, you have to use this one here instead. These options will create both a key file and a certificate. We will be asked a few questions about our server in order to embed the information correctly in the certificate. Those certificates will be used in the next step to set up the access over HTTPS to the Node-RED Admin-Panel. ### Configuring Node-RED You will be able to find the Node-RED configuration file under `/home/nodeadmin/.node-red/`. Let's open the file to add some additional configuration: ```bash nano /home/nodeadmin/.node-red/settings.js ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x09.png) --- With all the comments removed the file should boil down to the following: ```js var fs = require("fs"); module.exports = { uiPort: process.env.PORT || 1880, mqttReconnectTime: 15000, serialReconnectTime: 15000, debugMaxLength: 1000, httpAdminRoot: '/admin', httpStatic: '/home/nodeadmin/.node-red/static/', ui: { path: "reddash" }, adminAuth: { type: "credentials", users: [{ username: "admin", password: <PASSWORD>", permissions: "*" }], default: { permissions: "read" } }, https: { key: fs.readFileSync('/home/nodeadmin/.node-red/certs/node-red-selfsigned.key'), cert: fs.readFileSync('/home/nodeadmin/.node-red/certs/node-red-selfsigned.crt') }, requireHttps: true, functionGlobalContext: {}, logging: { console: { level: "info", metrics: false, audit: false } }, editorTheme: { projects: { enabled: false } }, } ``` Noteable changes: * Uncommented the import of `fs` (needed to add the SSL certificate) * httpAdminRoot is set to `/admin` - that means that Node-RED admin panel is available under `https://<IP Address>:1880/admin/` instead of `https://<IP Address>:1880/` * We created a folder `/home/nodeadmin/.node-red/static/` to host static content for the Node-RED flow. * The [node-red-dashboard](https://flows.nodered.org/node/node-red-dashboard) is an optional plugin for Node-RED that allows us to create - well, a dashboard... The line `ui: { path: "reddash" }` sets the URL for this dashboard to `https://<IP Address>:1880/reddash/`. * For the `adminAuth` we create an Admin Login and set the default permission to `read` - which means that everyone can open the dashboard and see our Node-RED flows - but only the admin has the right to change them (you can remove the default permission, if not necessary). * For `https` we linked in the certificate we created in the step before. Don't forget to set `requireHttps: true` to enforce an encrypted connection. ### The Node-RED Admin Panel If you followed the recommendations above, you are now able to access Node-RED over `https://<IP Address>:1880/admin/` - in my case this is `https://192.168.2.111:1880/admin/`. Click on the user icon in the top left, login with the admin login you generated in the previous step and click on menu icon (on the right of the user login) and open the __Manage Palette__ menu: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x10.png) --- In the following tutorials we are going to use a couple of functions that don't come with Node-RED directly. But we can install them from the [Node-RED Flow Library](https://flows.nodered.org) - feel free to browse it and see if there is something that you can use. You can install those from the palette menu - I already did that and highlighted them in my list of installed nodes: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x11.png) --- Switch to the __Install Tab__ and search for those entries and click the install button: * node-red-contrib-alexa-local * node-red-contrib-bigtimer * node-red-contrib-mjpgcamera * node-red-contrib-string * node-red-dashboard --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x12.png) --- ## Installing Mosquitto MQTT is a machine-to-machine messaging protocol, designed to provide lightweight publish/subscribe communication to "Internet of Things" devices. It is commonly used for geo-tracking fleets of vehicles, home automation, environmental sensor networks, and utility-scale data collection. Mosquitto is a popular MQTT broker that has great community support and is easy to install and configure. We are going to use it to exchange states between subflows in Node-RED. MQTT will also come in handy once we start using other home automation systems like [OpenHAB](https://www.openhab.org), [Home Assistant](https://www.home-assistant.io) or [ioBroker](http://iobroker.net). We will be able to let those programs control our INSTAR IP cameras using the MQTT protocol. First, download their repository signing key and install it: ``` wget http://repo.mosquitto.org/debian/mosquitto-repo.gpg.key sudo apt-key add mosquitto-repo.gpg.key ``` Now tell apt-get where to find the software by adding the repository URL to a file in `/etc/apt/sources.list.d/`: ``` sudo nano /etc/apt/sources.list.d/mosquitto.list ``` This will open up a new, blank file. Paste the following line into the file. ``` deb http://repo.mosquitto.org/debian stretch main ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x13.png) --- Save and close out of the editor, then update with apt-get to pull in the new package information. Then install the mosquitto package and its client software: ``` sudo apt-get update sudo apt-get install mosquitto mosquitto-clients ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x14.png) --- To test the Mosquitto service log in to your server a second time, so you have two terminals side-by-side. In the new terminal, use mosquitto_sub to subscribe to the test topic: ```bash mosquitto_sub -h localhost -t test ``` The -h flag specifies the hostname of the MQTT server, and -t is the topic name. You'll see no output after hitting ENTER because mosquitto_sub is waiting for messages to arrive. Switch back to your other terminal and publish a message: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x15.png) --- ```bash mosquitto_pub -h localhost -t test -m "hello world" ``` The options for mosquitto_pub are the same as mosquitto_sub, though this time we use the additional -m option to specify our message. Hit ENTER, and you should see hello world pop up in the other terminal. You've sent your first MQTT message! --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x16.png) --- ### Configuring Mosquitto Mosquitto includes a utility to generate a special password file called mosquitto_passwd. This command will prompt you to enter a password for the specified username (we choose __debian__ in the example), and place the results in `/etc/mosquitto/passwd`: ```bash mosquitto_passwd -c /etc/mosquitto/passwd debian ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x17.png) --- Now create a new configuration file for Mosquitto and tell it to use this password file to require logins for all connections: ```bash nano /etc/mosquitto/conf.d/default.conf ``` This should open an empty file. Paste in the following: ```bash allow_anonymous false password_file /etc/mosquitto/passwd ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x18.png) --- `allow_anonymous false` disables all non-authenticated connections, and the `password_file` line tells Mosquitto where to look for user and password information. Save and exit the file. Now you need to restart Mosquitto and test your changes. ```bash service mosquitto reload ``` ### Testing Mosquitto from an external Machine Since your devices that you want to connect will probably contacting your MQTT Server from an external IP address, let's test this and make sure everything is working. First let's now open the MQTT Port in our FirewallD configuration: ``` sudo firewall-cmd --permanent --zone=public --add-port=1883/tcp sudo firewall-cmd --reload sudo firewall-cmd --list-all ``` [We use MQTT.fx to debug our MQTT Server. Let's install it on another machine (Linux, Windows or macOS) and click on the Cog Wheel icon left to the Connect and Disconnect button to add our newly setup MQTT server: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x19.png) --- Replace the IP address ("192.168.2.111") with the address of your Linux machine that runs the MQTT service and add the login that you created for the Mosquitto server. Save the settings and connect to the server - the green light on the right should show that the connection was successfully establish. Change to the __Subscribe__ tab type in `test` and hit __Subscribe__: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x20.png) --- Switch to the __Publish__ tab, type in test - as the topic you want to publish a message under - and add a message as payload in the text field below, e.g. `{"hello":"world"}`. Then hit publish: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x21.png) --- Switch back to the __Subscribe__ tab to verify that MQTT.fx successfully published the message and that our Mosquitto server notified clients with subscriptions to the topic about the change - in this case only our MQTT.fx instance: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x22.png) --- ### Adding SSL Encryption We are only going to use the MQTT protocol on our secure local network and decided to not encrypt the messages send by our IoT devices. If you plan to use the MQTT communication over the internet there is no way around wrapping those messages in an SSL/TLS encryption layer. There are two (external) tutorials that will help you setting this up. The first tutorial shows you how to [work with a self-signed certificate](http://www.steves-internet-guide.com/mosquitto-tls/) and the second one helps you to set up [Certbot and use Lets-Encrypt](https://www.digitalocean.com/community/tutorials/how-to-install-and-secure-the-mosquitto-mqtt-messaging-broker-on-ubuntu-16-04) to create the certificate and auto-renew it. ## Using Node-RED to communicate with your Camera The Node-RED admin panel is a visual tool to string function nodes into functional flows. all the flows used in this tutorial can be downloaded from here and imported into the admin panel using __Import Dialogue__. #### Alarm SET Flow Please copy the [Set Alarm Flow](https://wiki.instar.com/Node-RED_Flows/nodered_flow_set-alarm-office.json) and open the import window on the Node-RED panel: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x23.png) --- Paste the content into the input text field and hit the __Import__ button: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x24.png) --- The Node-RED flow should be imported into a new tab, called __SET Alarm Office__. Hit the red __Deploy__ button to save and load the flow: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x25.png) --- We [configured Node-RED](#configuring-node-red) to display the admin panel on `https://<IP of your Server>/admin`. You can switch to `https://<IP of your Server>/reddash` to see the corresponding __Node-RED Dashboard__. Just type in your __INSTAR Full HD cameras__ IP address and hit submit: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x26.png) --- This flow can, of course, also be used with any other IP camera model - you just have to exchange the CGI commands used with the one that you need from our documentation for [Full HD](https://wiki.instar.com/1080p_Series_CGI_List/), [HD](https://wiki.instar.com/720p_Series_CGI_List/) and [VGA](https://wiki.instar.com/Advanced_User/CGI_Commands/VGA_Series_CGI_List/) camera models: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x27.png) --- #### Alarm Recording You can download this [Alarm Recording Flow](https://wiki.instar.com/Node-RED_Flows/nodered_flow_alarm_recording_outdoor.json) here. --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x37.png) --- The first one enables you to manually upload a snapshot via FTP. And the second uses the PIR Motion Detector of your camera (of course, only applicable to camera's with PIR sensors) to detect motions, activate the nightvision LEDs and start a 1min Recording on the internal SD card of your camera every time an alarm is triggered. The use case is - you want to disable the IR nightvision LEDs during the night, which saves you electricity, makes your camera less intrusive and prevents the spider web problem (insects love the warm and illuminated front of your camera). ##### FTP Snapshot To use this flow, we will need to configure an FTP server on Debian 9 Stretch Linux allowing our INSTAR IP cameras to upload snapshots and videos when a motion alarm was triggered. The following tutorial will explain how to install and configure FTP server using `vsFTPd` daemon. ```bash sudo apt-get install vsftpd ftp ``` By default, the vsFTPd server comes configured to allow system users to access their home directories with read-only access. We will have to make changes to the vsFTPd config file `/etc/vsftpd.conf`: ```bash sudo nano /etc/vsftpd.conf ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x28.png) --- The result should look something like this: ```bash listen=YES listen_ipv6=NO local_enable=YES write_enable=YES dirmessage_enable=YES use_localtime=YES xferlog_enable=YES connect_from_port_20=YES secure_chroot_dir=/var/run/vsftpd/empty pam_service_name=vsftpd rsa_cert_file=/etc/ssl/certs/nginx-selfsigned.crt rsa_private_key_file=/etc/ssl/private/nginx-selfsigned.key ssl_enable=YES # implicit_ssl=YES listen_port=21 allow_anon_ssl=NO force_local_data_ssl=NO force_local_logins_ssl=NO ssl_tlsv1=YES ssl_sslv2=NO ssl_sslv3=NO userlist_file=/etc/vsftpd.userlist userlist_enable=YES userlist_deny=NO ``` We are using the [SSL Certificate](#create-the-ssl-certificate) that we generated for our NGINX server (of course you can also create a separate certificate for Node-RED following the instructions from the link above) and tell Node-RED to enforce SSL encryption. Note that we __cannot use implicit SSL__ - as our Full HD cameras only use explicit encryption. __You can skip this part for HD and VGA cameras__, as they do not support FTPS. Just leave `ssl_enable=NO` to deactivate the encryption. The above config file will only grant a __read-only__ access to any system user listed within `/etc/passwd` file. To add __write access__ for all system local user uncomment or add the following stanza `write_enable=YES`. The new configuration file consists of: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x29.png) --- By default, our FTP server allows access to any system user defined within `/etc/passwd` file. In order to allow only specific users to be able to login we can include the following lines into our configuration file: ```bash userlist_file=/etc/vsftpd.userlist userlist_enable=YES userlist_deny=NO ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x30.png) --- The above will enable a predefined user list where any user listed within `/etc/vsftpd.userlist` will have access to the FTP server. Let's first create a new user for the FTP service called `ftpuser` by typing the following command and adding a UNIX password: ```bash sudo adduser ftpuser ``` Let's create a new `/etc/vsftpd.userlist` user list and add our `ftpuser`: ```bash sudo echo ftpuser > /etc/vsftpd.userlist ``` Next, restart your vsFTPd: ```bash sudo systemctl restart vsftpd ``` You can check if the FTP service is running on port 21 with the tool `netstat` - but you might have to install `net-tools` first: ```bash sudo apt-get install net-tools netstat -npl ``` --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x31.png) --- Now open port 20/21 in your firewall: ```bash sudo firewall-cmd --permanent --zone=public --add-port=20-21/tcp sudo firewall-cmd --reload sudo firewall-cmd --list-all ``` You can test access to your FTP server using your default web browser - just prepend a `ftp://` to your Linux servers IP address, e.g. `ftp://192.168.2.111`, and log in with the `ftpuser`: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x32.png) --- Note that the directory that is used for the `ftpuser` is his __home__ directory in `/home/ftpuser/`. You can use a tool like the [Filezilla Client](https://filezilla-project.org/download.php?type=client) to login to your FTP Server and upload a file to check if the write access is working (the advantage of using Filezilla over just trying to send an image directly from the FTP service of your INSTAR Full HD camera is, that Filezilla will give a more thorough error report, in case that something goes wrong): --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x33.png) --- Now it's time to try to upload a test image from our INSTAR Full HD camera to the FTP server. Let's create a folder `/home/ftpuser/outdoorcam` on our Linux server and add the FTP information to our camera configuration and press __Apply__: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x34.png) --- __Note__ that we added `TLS encryption` just as we defined it in `/etc/vsftpd.conf`: ``` ssl_tlsv1=YES ssl_sslv2=NO ssl_sslv3=NO ``` __Note2__ that we need to use the __PORT Mode__ since we are operating the FTP server behind a firewall! We now just have one issue left - the files will be uploaded to the `/home/ftpuser` directory and will be owned by the `ftpuser`. Since we are going to start the Node-RED process with a different user - `nodeadmin` - we have to make sure that the files can also be handled by Node-RED. Type `sudo nano /etc/vsftpd.conf` and add the following line: ``` local_umask=0002 ``` You can also make the uploaded files executable - if that is something you need... __potential dangerous__ - by changing this line to `file_open_mode=0777`. --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x35.png) --- To open the `/home/ftpuser` directory to us type: ```bash sudo chmod -R 777 /home/ftpuser ``` This way we are able to set the directory to be the Static Directory of our Node-RED Dashboard and display uploaded images: --- ![Node-RED and INSTAR IP Cameras](./Node-RED_x36.png) ---<file_sep>--- date: "2019-01-13" title: "Centos Administration" categories: - LINUX --- ![Abashiri, Japan](./photo-19196703263_69f9f0df5f_o.png) <!-- TOC --> - [Changing your SSH login](#changing-your-ssh-login) - [Running FirewallD](#running-firewalld) - [Changing the SSH Port](#changing-the-ssh-port) - [Changing the Hostname](#changing-the-hostname) - [Kernel Update Troubleshooting](#kernel-update-troubleshooting) - [Working with IP tables](#working-with-ip-tables) - [How do I view blocked ports rules?](#how-do-i-view-blocked-ports-rules) <!-- /TOC --> ## Changing your SSH login SSH uses your linux user password. Simply use the passwd command after logging in. It will prompt you to change it. Or name a different user to change theirs: ```bash passwd passwd <PASSWORD> ``` ## Running FirewallD ```bash yum install -y firewalld systemctl enable firewalld systemctl start firewalld systemctl status firewalld firewall-cmd --permanent --zone=public --add-service=http firewall-cmd --permanent --zone=public --add-port=1880/tcp firewall-cmd --reload firewall-cmd --list-all ``` ## Changing the SSH Port ```bash firewall-cmd --permanent --zone=public --add-port=4444/tcp firewall-cmd --reload semanage port -a -t ssh_port_t -p tcp 4444 nano /etc/ssh/sshd_config ``` ```yaml # If you want to change the port on a SELinux system, you have to tell # SELinux about this change. # semanage port -a -t ssh_port_t -p tcp #PORTNUMBER # Port 4444 #AddressFamily any #ListenAddress 0.0.0.0 #ListenAddress :: ``` ```bash service sshd restart ``` ## Changing the Hostname ```bash hostnamectl set-hostname your-new-hostname hostnamectl systemctl reboot ``` ## Kernel Update Troubleshooting How to fix: `At least xMB more space needed on the /boot filesystem`. List all installed kernel packages with: ```bash yum list kernel ``` The kernel in-use will be underlined and cannot be removed. Now to remove unused kernels, install the yum-utils package and use the package-cleanup util: ```bash yum install yum-utils package-cleanup --oldkernels --count=2 ``` To make this permanent, edit `/etc/yum.conf` and change the following line: ```bash installonly_limit=2 ``` ## Working with IP tables ### How do I view blocked ports rules? _Use the iptables command:_ ```bash # /sbin/iptables -L -n -v # /sbin/iptables -L -n -v | grep port # /sbin/iptables -L -n -v | grep -i DROP # /sbin/iptables -L OUTPUT -n -v # /sbin/iptables -L INPUT -n -v ``` Generally Useful Rules This section includes a variety of iptables commands that will create rules that are generally useful on most servers. Allow Loopback Connections The loopback interface, also referred to as lo, is what a computer uses to forward network connections to itself. For example, if you run ping localhost or ping 127.0.0.1, your server will ping itself using the loopback. The loopback interface is also used if you configure your application server to connect to a database server with a "localhost" address. As such, you will want to be sure that your firewall is allowing these connections. To accept all traffic on your loopback interface, run these commands: sudo iptables -A INPUT -i lo -j ACCEPT sudo iptables -A OUTPUT -o lo -j ACCEPT Allow Established and Related Incoming Connections As network traffic generally needs to be two-way—incoming and outgoing—to work properly, it is typical to create a firewall rule that allows established and related incoming traffic, so that the server will allow return traffic to outgoing connections initiated by the server itself. This command will allow that: sudo iptables -A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT Allow Established Outgoing Connections You may want to allow outgoing traffic of all established connections, which are typically the response to legitimate incoming connections. This command will allow that: sudo iptables -A OUTPUT -m conntrack --ctstate ESTABLISHED -j ACCEPT Internal to External Assuming eth0 is your external network, and eth1 is your internal network, this will allow your internal to access the external: sudo iptables -A FORWARD -i eth1 -o eth0 -j ACCEPT Drop Invalid Packets Some network traffic packets get marked as invalid. Sometimes it can be useful to log this type of packet but often it is fine to drop them. Do so with this command: sudo iptables -A INPUT -m conntrack --ctstate INVALID -j DROP Block an IP Address To block network connections that originate from a specific IP address, 15.15.15.51 for example, run this command: sudo iptables -A INPUT -s 15.15.15.51 -j DROP In this example, -s 15.15.15.51 specifies a source IP address of "15.15.15.51". The source IP address can be specified in any firewall rule, including an allow rule. If you want to reject the connection instead, which will respond to the connection request with a "connection refused" error, replace "DROP" with "REJECT" like this: sudo iptables -A INPUT -s 15.15.15.51 -j REJECT Block Connections to a Network Interface To block connections from a specific IP address, e.g. 15.15.15.51, to a specific network interface, e.g. eth0, use this command: iptables -A INPUT -i eth0 -s 15.15.15.51 -j DROP This is the same as the previous example, with the addition of -i eth0. The network interface can be specified in any firewall rule, and is a great way to limit the rule to a particular network. Service: SSH If you're using a cloud server, you will probably want to allow incoming SSH connections (port 22) so you can connect to and manage your server. This section covers how to configure your firewall with various SSH-related rules. Allow All Incoming SSH To allow all incoming SSH connections run these commands: sudo iptables -A INPUT -p tcp --dport 22 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT sudo iptables -A OUTPUT -p tcp --sport 22 -m conntrack --ctstate ESTABLISHED -j ACCEPT The second command, which allows the outgoing traffic of established SSH connections, is only necessary if the OUTPUT policy is not set to ACCEPT. Allow Incoming SSH from Specific IP address or subnet To allow incoming SSH connections from a specific IP address or subnet, specify the source. For example, if you want to allow the entire 15.15.15.0/24 subnet, run these commands: sudo iptables -A INPUT -p tcp -s 15.15.15.0/24 --dport 22 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT sudo iptables -A OUTPUT -p tcp --sport 22 -m conntrack --ctstate ESTABLISHED -j ACCEPT The second command, which allows the outgoing traffic of established SSH connections, is only necessary if the OUTPUT policy is not set to ACCEPT. Allow Outgoing SSH If your firewall OUTPUT policy is not set to ACCEPT, and you want to allow outgoing SSH connections—your server initiating an SSH connection to another server—you can run these commands: sudo iptables -A OUTPUT -p tcp --dport 22 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT sudo iptables -A INPUT -p tcp --sport 22 -m conntrack --ctstate ESTABLISHED -j ACCEPT Allow Incoming Rsync from Specific IP Address or Subnet Rsync, which runs on port 873, can be used to transfer files from one computer to another. To allow incoming rsync connections from a specific IP address or subnet, specify the source IP address and the destination port. For example, if you want to allow the entire 192.168.3.11/24 subnet to be able to rsync to your server, run these commands: sudo iptables -A INPUT -p tcp -s 192.168.3.11/24 --dport 873 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT sudo iptables -A OUTPUT -p tcp --sport 873 -m conntrack --ctstate ESTABLISHED -j ACCEPT The second command, which allows the outgoing traffic of established rsync connections, is only necessary if the OUTPUT policy is not set to ACCEPT. Service: Web Server Web servers, such as Apache and Nginx, typically listen for requests on port 80 and 443 for HTTP and HTTPS connections, respectively. If your default policy for incoming traffic is set to drop or deny, you will want to create rules that will allow your server to respond to those requests. Allow All Incoming HTTP To allow all incoming HTTP (port 80) connections run these commands: sudo iptables -A INPUT -p tcp --dport 80 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT sudo iptables -A OUTPUT -p tcp --sport 80 -m conntrack --ctstate ESTABLISHED -j ACCEPT The second command, which allows the outgoing traffic of established HTTP connections, is only necessary if the OUTPUT policy is not set to ACCEPT. Allow All Incoming HTTPS To allow all incoming HTTPS (port 443) connections run these commands: sudo iptables -A INPUT -p tcp --dport 443 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT sudo iptables -A OUTPUT -p tcp --sport 443 -m conntrack --ctstate ESTABLISHED -j ACCEPT The second command, which allows the outgoing traffic of established HTTP connections, is only necessary if the OUTPUT policy is not set to ACCEPT. Allow All Incoming HTTP and HTTPS If you want to allow both HTTP and HTTPS traffic, you can use the multiport module to create a rule that allows both ports. To allow all incoming HTTP and HTTPS (port 443) connections run these commands: sudo iptables -A INPUT -p tcp -m multiport --dports 80,443 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT sudo iptables -A OUTPUT -p tcp -m multiport --dports 80,443 -m conntrack --ctstate ESTABLISHED -j ACCEPT The second command, which allows the outgoing traffic of established HTTP and HTTPS connections, is only necessary if the OUTPUT policy is not set to ACCEPT. Run the following. It'll insert the rule at the top of your iptables and will allow all traffic unless subsequently handled by another rule. ```bash iptables -I INPUT -j ACCEPT ``` You can also flush your entire iptables setup with the following: ```bash iptables -F iptables -X iptables -t nat -F iptables -t nat -X iptables -t mangle -F iptables -t mangle -X iptables -P INPUT ACCEPT iptables -P FORWARD ACCEPT iptables -P OUTPUT ACCEPT ``` If you flush it, you might want to run something like: ``` iptables -A INPUT -i lo -j ACCEPT -m comment --comment "Allow all loopback traffic" iptables -A INPUT ! -i lo -d 127.0.0.0/8 -j REJECT -m comment --comment "Drop all traffic to 127 that doesn't use lo" iptables -A OUTPUT -j ACCEPT -m comment --comment "Accept all outgoing" iptables -A INPUT -j ACCEPT -m comment --comment "Accept all incoming" iptables -A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT -m comment --comment "Allow all incoming on established connections" iptables -A INPUT -j REJECT -m comment --comment "Reject all incoming" iptables -A FORWARD -j REJECT -m comment --comment "Reject all forwarded" ``` If you want to be a bit safer with your traffic, don't use the accept all incoming rule, or remove it with "iptables -D INPUT -j ACCEPT -m comment --comment "Accept all incoming"", and add more specific rules like: ```bash iptables -I INPUT -p tcp --dport 80 -j ACCEPT -m comment --comment "Allow HTTP" iptables -I INPUT -p tcp --dport 443 -j ACCEPT -m comment --comment "Allow HTTPS" iptables -I INPUT -p tcp -m state --state NEW --dport 22 -j ACCEPT -m comment --comment "Allow SSH" iptables -I INPUT -p tcp --dport 8071:8079 -j ACCEPT -m comment --comment "Allow torrents" ``` NOTE: They need to be above the 2 reject rules at the bottom, so use I to insert them at the top. Or if you're anal like me, use "iptables -nL --line-numbers" to get the line numbers, then use "iptables -I INPUT ..." to insert a rule at a specific line number. Finally, save your work with: ``` iptables-save > /etc/network/iptables.rules #Or wherever your iptables.rules file is ```<file_sep>--- date: "2019-01-12" title: "Ubuntu Network Configuration" categories: - LINUX --- ![Battambang, Cambodia](./photo-19196703263_69f9f0df5f_o.jpg) <!-- TOC --> - [Change DNS Server Manually](#change-dns-server-manually) - [DNS Settings using the NetworkManager](#dns-settings-using-the-networkmanager) - [Default gateway on CentOS](#default-gateway-on-centos) <!-- /TOC --> ## Check your network settings ```bash ip a 2: enp3s0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc fq_codel state UP group default qlen 1000 link/ether 74:d4:35:c7:c2:20 brd ff:ff:ff:ff:ff:ff inet 192.168.2.53/24 brd 192.168.2.255 scope global enp3s0 valid_lft forever preferred_lft forever ``` The current IP address for the Ethernet interface is `192.168.2.53`. And the gateway is `192.168.2.1` as seen below: ```bash ip route default via 192.168.2.1 dev enp3s0 proto static ``` ## Configure Netplan ```bash ls /etc/netplan/ 00-installer-config.yaml sudo nano /etc/netplan/00-installer-config.yaml ``` Delete / overwrite the configuration inside this file, e.g. : ```bash # This is the network config written by 'subiquity' network: ethernets: enp3s0: addresses: - 192.168.2.110/24 gateway4: 192.168.2.5 nameservers: addresses: - 192.168.2.5 - 8.8.4.4 search: - workgroup version: 2 ``` ### Test and Apply your Settings ```bash sudo netplan try Warning: Stopping systemd-networkd.service, but it can still be activated by: systemd-networkd.socket Do you want to keep these settings? Press ENTER before the timeout to accept the new configuration Changes will revert in 107 seconds Configuration accepted. ``` If there is no issue, it will return the configuration accepted message. If the configuration file fails the test, it will be reverted to a previous working configuration. ```bash sudo netplan apply ``` Verify your changes: ```bash ip a 2: enp3s0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc fq_codel state UP group default qlen 1000 link/ether 74:d4:35:c7:c2:20 brd ff:ff:ff:ff:ff:ff inet 192.168.2.110/24 brd 192.168.2.255 scope global enp3s0 valid_lft forever preferred_lft forever inet6 fe80::76d4:35ff:fec7:c220/64 scope link valid_lft forever preferred_lft forever ip route default via 192.168.2.5 dev enp3s0 proto static ```<file_sep>const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const baseUrl = "https://randomuser.me/api/" btn.addEventListener("click", getInput) function getInput() { let params = new Request(baseUrl, { method: "GET", mode: "cors", headers: new Headers(), cache: "default" }) fetch(params).then(function (response) { return response.json(); }).then(function (data) { console.log(data); }) .catch(function(error) { console.log(error); }) } function getInput1() { fetch(baseUrl).then(function (response) { return response.text(); }).then(function (data) { console.log(data); }) } function getInput2() { let url = "photo-34475542491_9069464269_o-cover.jpg"; fetch(url).then(function (response) { return response.blob(); }).then(function (data) { let pathImage = URL.createObjectURL(data); document.querySelector("img").src = pathImage; }) } <file_sep>--- date: "2020-05-03" title: "Neo4j Cypher Cheat Sheet" categories: - Databases --- ![<NAME>, <NAME>](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Cypher Fundamentals](#cypher-fundamentals) - [Browser editor](#browser-editor) - [CLI](#cli) - [Match](#match) - [Match node](#match-node) - [Match nodes and relationships](#match-nodes-and-relationships) - [Match labels](#match-labels) - [Match multiple labels](#match-multiple-labels) - [Match same properties](#match-same-properties) - [Match friends of friends with same hobbies](#match-friends-of-friends-with-same-hobbies) - [Match by ID](#match-by-id) - [Create](#create) - [Create node](#create-node) - [Create nodes and relationships](#create-nodes-and-relationships) - [Create relationship between 2 unrelated nodes](#create-relationship-between-2-unrelated-nodes) - [Create node with multiple labels](#create-node-with-multiple-labels) - [Update](#update) - [Update node properties (add new or modify)](#update-node-properties-add-new-or-modify) - [Replace all node properties for the new ones](#replace-all-node-properties-for-the-new-ones) - [Add new node properties without deleting old ones](#add-new-node-properties-without-deleting-old-ones) - [Add new node property if property not already set](#add-new-node-property-if-property-not-already-set) - [Rename a property in all nodes](#rename-a-property-in-all-nodes) - [Add label to existing node](#add-label-to-existing-node) - [Creates the node if not exists and updates (or creates) a property](#creates-the-node-if-not-exists-and-updates-or-creates-a-property) - [Delete](#delete) - [Delete nodes](#delete-nodes) - [Deletes a property in a specific node](#deletes-a-property-in-a-specific-node) - [Delete a label from all nodes](#delete-a-label-from-all-nodes) - [Delete a label from nodes with specific labels](#delete-a-label-from-nodes-with-specific-labels) - [Delete multiple labels from nodes](#delete-multiple-labels-from-nodes) - [Delete entire database](#delete-entire-database) - [Other clauses](#other-clauses) - [Show execution plan](#show-execution-plan) - [Count](#count) - [Limit](#limit) - [Create unique property constraint](#create-unique-property-constraint) - [Drop unique property constraint](#drop-unique-property-constraint) - [Useful Cypher Queries for Neo4J](#useful-cypher-queries-for-neo4j) <!-- /TOC --> _Just a bunch of cyphering I found online - all in one place for easy consumption_ ## Cypher Fundamentals Store any kind of data using the following graph concepts: * **Node**: Graph data records * **Relationship**: Connect nodes (has direction and a type) * **Property**: Stores data in key-value pair in nodes and relationships * **Label**: Groups nodes and relationships (optional) --- ## Browser editor ### CLI Examples: `:help` `:clear` --- ## Match ### Match node ```bash MATCH (ee:Person) WHERE ee.name = "Romeo" RETURN ee; ``` * **MATCH** clause to specify a pattern of nodes and relationships * **(ee:Person)** a single node pattern with label 'Person' which will assign matches to the variable `ee` * **WHERE** clause to constrain the results * **ee.name = "Romeo"** compares name property to the value "Romeo" * **RETURN** clause used to request particular results Gets gets the id 5 and id 0 nodes and creates a `:KNOWS` relationship between them ### Match nodes and relationships ```bash MATCH (ee:Person)-[:KNOWS]-(friends) WHERE ee.name = "Romeo" RETURN ee, friends ``` * **MATCH** clause to describe the pattern from known Nodes to found Nodes * **(ee)** starts the pattern with a Person (qualified by WHERE) * **-[:KNOWS]-** matches "KNOWS" relationships (in either direction) * **(friends)** will be bound to Romeo's friends ### Match labels ```bash MATCH (n:Person) RETURN n ``` or ```bash MATCH (n) WHERE n:Person RETURN n ``` ### Match multiple labels `:Car` **OR** `:Person` labels ```bash MATCH (n) WHERE n:Person OR n:Car RETURN n ``` `:Car` **AND** `:Person` labels ```bash MATCH (n) WHERE n:Person:Car RETURN n ``` ### Match same properties ```bash MATCH (a:Person) WHERE a.from = "Korea" RETURN a ``` Returns every node (and their relationships) where there's a property `from` with "Korea" value ### Match friends of friends with same hobbies Johan is learning surfing, and wants to know any friend of his friends who already knows surfing ```bash MATCH (js:Person)-[:KNOWS]-()-[:KNOWS]-(surfer) WHERE js.name = "Johan" AND surfer.hobby = "surfing" RETURN DISTINCT surfer ``` * **()** empty parenthesis to ignore these nodes * **DISTINCT** because more than one path will match the pattern * **surfer** will contain Allison, a friend of a friend who surfs ### Match by ID Every node and relationship has an internal autonumeric ID, which can be queried using ** **, ** =**, **=**, **= **, ** ** and **IN** operators: **Search node by ID** ```bash MATCH (n) WHERE id(n) = 0 RETURN n ``` **Search multiple nodes by ID** ```bash MATCH (n) WHERE id(n) IN [1, 2, 3] RETURN n ``` **Search relationship by ID** ```bash MATCH ()-[n]-() WHERE id(n) = 0 RETURN n ``` --- ## Create ### Create node ```bash CREATE (ee:Person { name: "Romeo", from: "Korea", klout: 99 }) ``` * **CREATE** clause to create data * **()** parenthesis to indicate a node * **ee:Person** a variable `ee` and label `Person` for the new node * **{}** brackets to add properties (key-value pairs) to the node ### Create nodes and relationships ```bash MATCH (ee:Person) WHERE ee.name = "Romeo" CREATE (js:Person { name: "Johan", from: "Korea", learn: "surfing" }), (ir:Person { name: "Ian", from: "England", title: "author" }), (rvb:Person { name: "Rik", from: "Belgium", pet: "Orval" }), (ally:Person { name: "Allison", from: "California", hobby: "surfing" }), (ee)-[:KNOWS {since: 2001}]- (js),(ee)-[:KNOWS {rating: 5}]- (ir), (js)-[:KNOWS]- (ir),(js)-[:KNOWS]- (rvb), (ir)-[:KNOWS]- (js),(ir)-[:KNOWS]- (ally), (rvb)-[:KNOWS]- (ally) ``` * **MATCH** clause to get "Romeo" in `ee` variable * **CREATE** clause to create multiple nodes (comma separated) with their labels and properties. Also creates directed relationships `(a)-[:Label {key: value}]- (b)` ### Create relationship between 2 unrelated nodes ```bash MATCH (n), (m) WHERE n.name = "Allison" AND m.name = "Romeo" CREATE (n)-[:KNOWS]- (m) ``` Alternative with `MERGE`, which ensures that the relationship is created only **once** ```bash MATCH (n:User {name: "Allison"}), (m:User {name: "Romeo"}) MERGE (n)-[:KNOWS]- (m) ``` ### Create node with multiple labels ```bash CREATE (n:Actor:Director) ``` --- ## Update ### Update node properties (add new or modify) Add new `.owns` property or modify (if exists) ```bash MATCH (n) WHERE n.name = "Rik" SET n.owns = "Audi" ``` ### Replace all node properties for the new ones **Danger**: It will delete all previous properties and create `.plays` and `.age` properties ```bash MATCH (n) WHERE n.name = "Rik" SET n = {plays: "Piano", age: 23} ``` ### Add new node properties without deleting old ones **Danger**: If `.plays` or `.age` properties are already set, it will overwrite them ```bash MATCH (n) WHERE n.name = "Rik" SET n += {plays: "Piano", age: 23} ``` ### Add new node property if property not already set ```bash MATCH (n) WHERE n.plays = "Guitar" AND NOT (EXISTS (n.likes)) SET n.likes = "Movies" ``` ### Rename a property in all nodes ```bash MATCH (n) WHERE NOT (EXISTS (n.instrument)) SET n.instrument = n.plays REMOVE n.plays ``` Alternative ```bash MATCH (n) WHERE n.instrument is null SET n.instrument = n.plays REMOVE n.plays ``` ### Add label to existing node Adds the `:Food` label to nodes id 7 and id 8 ```bash MATCH (n) WHERE id(n) IN [7, 8] SET n:Food ``` ### Creates the node if not exists and updates (or creates) a property ```bash MERGE (n:Person {name: "Rik"}) SET n.owns = "Audi" ``` --- ## Delete ### Delete nodes To **delete a node** (p.e. id 5 ), first we need to **delete its relationships**. Then, the node can be deleted ```bash MATCH (n)-[r]-() WHERE id(n) = 5 DELETE r, n ``` To **delete multiple nodes** (must have their relationships previously deleted) ```bash MATCH (n) WHERE id(n) IN [1, 2, 3] DELETE n ``` ### Deletes a property in a specific node ```bash MATCH (n) WHERE n:Person AND n.name = "Rik" AND n.plays is NOT null REMOVE n.plays ``` Alternative ```bash MATCH (n) WHERE n:Person AND n.name = "Rik" AND EXISTS (n.plays) REMOVE n.plays ``` ### Delete a label from all nodes Deletes the `:Person` label from **all** nodes ```bash MATCH (n) REMOVE n:Person ``` ### Delete a label from nodes with specific labels Deletes the `:Person` label from nodes with `:Food` and `:Person` labels ```bash MATCH (n) WHERE n:Food:Person REMOVE n:Person ``` ### Delete multiple labels from nodes Deletes the `:Food` and `:Person` labels from nodes which have **both** labels ```bash MATCH (n) WHERE n:Food:Person REMOVE n:Food:Person ``` **Danger**: Deletes the `:Food` and `:Person` labels from nodes which have `:Food` or `:Person` or `:Food:Person` labels ```bash MATCH (n) REMOVE n:Food:Person ``` ### Delete entire database ```bash MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n, r ``` --- ## Other clauses ### Show execution plan Use `PROFILE` or `EXPLAIN` before the query `PROFILE`: Shows the execution plan, query information and **db hits**. Example: Cypher version: CYPHER 3.0, planner: COST, runtime: INTERPRETED. 84 total db hits in 32 ms. `EXPLAIN`: Shows the execution plan and query information. Example: Cypher version: CYPHER 3.0, planner: COST, runtime: INTERPRETED. ### Count Count all nodes ```bash MATCH (n) RETURN count(n) ``` Count all relationships ```bash MATCH ()-- () RETURN count(*); ``` ### Limit Returns up to 2 nodes (and their relationships) where there's a property `from` with "Korea" value ```bash MATCH (a:Person) WHERE a.from = "Korea" RETURN a LIMIT 2 ``` ### Create unique property constraint Make `.name` property unique on nodes with `:Person` label ```bash CREATE CONSTRAINT ON (n:Person) ASSERT n.name IS UNIQUE ``` ### Drop unique property constraint Make `.name` property unique on nodes with `:Person` label ```bash DROP CONSTRAINT ON (n:Person) ASSERT n.name IS UNIQUE ```## Useful Cypher Queries for Neo4J Find the unique labels that appear in the database: ```bash match n return distinct labels(n) ``` Find the unique relationships that appear in the database: ```bash match n-[r]-() return distinct type(r) ``` Combine the previous two queries to return the unique combinations relationships and labels in the database: ```bash match n-[r]-() return distinct labels(n), type(r) ``` Find nodes that don't have any relationships: ```bash start n=node(*) match n-[r?]-() where r is null return n ``` Find all nodes that have a specific property: ```bash start n=node(*) match n where has(n.someProperty) return n ``` Find all nodes that have a specific relationship (regardless of the direction of the relationship): ```bash start n=node(*) match n-[:SOME_RELATIONSHIP]-() return distinct n ``` Show the nodes and a count of the number of relationships that they have: ```bash start n=node(*) match n-[r]-() return n, count(r) as rel_count order by rel_count desc ``` Get a count of all nodes in your graph: ```bash start n=node(*) match n return count(n) ``` To delete all nodes in a database (first you have to delete all relationships) ```bash start n=node(*) match n-[r]-() delete r start n=node(*) match n delete n ``` A simple query to get nodes of a certain category that match a certain property ```bash match (n:Person) where n.name="Tim" return n ``` ## Useful Cypher Queries for Neo4J Find the unique labels that appear in the database: ```bash match n return distinct labels(n) ``` Find the unique relationships that appear in the database: ```bash match n-[r]-() return distinct type(r) ``` Combine the previous two queries to return the unique combinations relationships and labels in the database: ```bash match n-[r]-() return distinct labels(n), type(r) ``` Find nodes that don't have any relationships: ```bash start n=node(*) match n-[r?]-() where r is null return n ``` Find all nodes that have a specific property: ```bash start n=node(*) match n where has(n.someProperty) return n ``` Find all nodes that have a specific relationship (regardless of the direction of the relationship): ```bash start n=node(*) match n-[:SOME_RELATIONSHIP]-() return distinct n ``` Show the nodes and a count of the number of relationships that they have: ```bash start n=node(*) match n-[r]-() return n, count(r) as rel_count order by rel_count desc ``` Get a count of all nodes in your graph: ```bash start n=node(*) match n return count(n) ``` To delete all nodes in a database (first you have to delete all relationships) ```bash start n=node(*) match n-[r]-() delete r start n=node(*) match n delete n ``` A simple query to get nodes of a certain category that match a certain property ```bash match (n:Person) where n.name="Tim" return n ```<file_sep>--- date: "2019-08-10" title: "MQTT Networks with Athom Homey" categories: - MQTT - Smarthome - IoT --- ![Central, Hong Kong](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Adding the Homey MQTT Client](#adding-the-homey-mqtt-client) - [Sending a MQTT Command to your Camera](#sending-a-mqtt-command-to-your-camera) <!-- /TOC --> In this tutorial we want to connect an INSTAR Full HD camera through it's MQTT Interface with the [Athom Homey Smarthome](https://www.athom.com/en/). ## Adding the Homey MQTT Client Homey is a modular - that means that it is only set up with a limited amount of functionality out of the box. You can use the Homey App Store to load all kinds of kinds apps onto your gateway to personalize it's functionality. For the following we are going to need an MQTT client to be able to connect to the MQTT server on our camera. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_01.png) --- 1. Start by launching your Homey App for [iOS](https://apps.apple.com/us/app/homey/id1435800024) or [Android](https://play.google.com/store/apps/details?id=app.homey&hl=en_US) and switch to the __More__ App. 2. Open the Apps category. 3. And click on the __+__ icon in the top right to add a new app. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_02.png) --- 4. Type `mqtt` into the search field and select the __MQTT Client__. I am also installing the MQTT Hub - _this is optional_. The Hub App publishes Homey state variables through MQTT which comes in handy if you also use another smarthome solution that you want to interconnect with your Homey. 5. Once installed, click on the __MQTT Client__ inside the Apps category. 6. Click on __Configure App__ to connect the client to the INSTAR MQTT Server. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_03.png) --- 7. Start by entering the IP address of your camera (that is running your MQTT broker) - e.g. `192.168.2.116`. We are going to use the Port `1883` that allows us to connect to the MQTT broker without the SSL encryption (an encrypted connection can be established via the port `8883` but requires a custom SSL certificate which I could not find out how to upload). The __username__ and __password__ for the MQTT broker is the one you set in the MQTT Menu. 8. If you installed the MQTT Hub earlier - _which is optional_ - enter it and click on __Configure app__. 9. Here you can set which information Homey should publish to our MQTT broker for use with other smarthome components. ## Sending a MQTT Command to your Camera --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_04.png) --- 1. Now switch to the __Devices__ tab and check if you are able to add virtual devices. If you can't find them, the are still hidden as _experimental features_. 2. Switch to the __More__ tab and enter the __Settings__ menu. Click on __Experiments__. 3. Activate __Virtual Devices__. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_05.png) --- 4. Back to __Devices__ you should now be able to add a __Virtual Button__. 5. Click on __Install__ to add the button to your devices. 6. Click __Next__ to finish the installation. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_06.png) --- 7. You can _click and hold_ the virtual button to rename it. 8. Click on the cog wheel icon to enter the settings menu. 9. Add a name for your virtual button and add it to a zone. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_07.png) --- 10. Now we need to add some logic to made this button work. This can be done in the __Flows__ tab. 11. Click on the __+__ icon in the top right to add a new flow. 12. In the __Create Flow__ menu click on __Add card__ in the _When condition_ to assign an event that should trigger your flow. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_08.png) --- 13. Search for your virtual button under __Devices__ and select it. 14. The button only has this one trigger - _When button has bee pressed_. Select it and confirm. 15. Now swipe up the __Then__ section of our flow to define what Homey should do when the flow was triggered. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_09.png) --- 16. Now click on __Add card__ to add the __Then__ condition for our flow. 17. Scroll down to the __Apps__ section and select the __MQTT Client__. 18. And choose to __Publish a message__ to the INSTAR MQTT server. --- ![INSTAR MQTT Broker and Athom Homey](./INSTAR_MQTT_with_Athom_Homey_10.png) --- 19. Now you can select a MQTT Topic from the INSTAR MQTT API that you want to update every time the button is pressed. Here I choose `alarm/pushalarm` with a value of `{"val":"1"}` to trigger an alarm on my camera. Note that if you have more than 1 camera connected to the MQTT server, you either have to add the MAC address in front of the MQTT topic to address a specific camera. Or add `all` - `all/alarm/pushalarm` - to address all connected cameras. By adding prefix `local`, you only address the camera that is running the MQTT broker. 20. After you confirmed your edit click __Test__ to verify that everything is working - your cameras System Log should show an __Audio Alarm__ (Note that the Audio Alarm has a __cool-down of 60s__ - so even if you hit the virtual button serveral times, you only get 1 alarm per minute). Now you can click on save to exit the setup window. 21. You now have a working button in the __Devices__ window that can trigger an alarm on your camera. Try to add another button with the MQTT topic `features/ptz/preset` and a value of `{"val":"0"}` - `{"val":"7"}` to have your camera pan&tilt between its preset positions.<file_sep>--- date: "2019-08-02" title: "Building a MQTT Interface" categories: - MQTT - IoT --- ![Shanghai, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) ## The Interface All camera functions of INSTAR Full HD cameras can be controlled through the HTTP/S CGI interface. But in the Smarthome and Internet-of-Things (IoT) realm there is another protocol that is widely in use and that offers a few advantages over the classic HTTP - this protocol is called MQTT (Message Queuing Telemetry Transport). Adding an MQTT interface to our Full HD cameras makes adding those cameras to an existing Smarthome - almost - too easy :) ![Building an MQTT Interface](./MQTT_Broker_01.png) In MQTT you control your camera by publishing updates to MQTT Topics. The value you want to update such a topic to is formatted in JSON and added to the Message Payload. So instead of sending a CGI command like `param.cgi?cmd=setmdattr&-name=1&-enable=$val` to either activate (val=1) or deactivate (val=0) Motion Detection Area 1 you publish this update to the topic alarm/area1/enable formatted like {"val":"1"} or {"val":"0"}. ### Addressing the right Camera To make sure that this update is received by the correct device on your MQTT network, we first have to prefix the topic with instar - this makes sure that your camera will try to match the update to an internal function - like updating the state of an detection area. Secondly, we have to state what camera we want to address by either saying all to update all INSTAR cameras on your network or picking a specific one by it's MAC address, e.g. `000389888811`. You can find the MAC address of your camera under System/Overview - simply remove the colons, e.g. __00:03:89:88:88:11__. ![Building an MQTT Interface](./MQTT_Broker_02.png) If you want to address the camera that is running your MQTT broker, you can also use the prefix local instead. The complete topic will then look like this: * `instar/all/alarm/area1/enable` - to address all INSTAR Full HD cameras on your MQTT network (all have to be connected to the same MQTT broker). * `instar/000389888811/alarm/area1/enable` - to address a camera with the (LAN) MAC address __00:03:89:88:88:11__. * `instar/local/alarm/area1/enable` - to address the cameras that runs the MQTT broker on your network. You can find the allowed payloads for each MQTT topic in the table below. The second table below that displays the corresponding CGI command to each topic (if you are already familiar with the HTTP API and are looking for a specific command). ### Status, Command or Raw Topics Once you update a __COMMAND__ Topic like `instar/000389888811/alarm/area1/enable` you will see that you receive an update on the __STATUS__ Topic `instar/000389888811/status/alarm/area1/enable` once you camera received the command and updated its settings. ![Building an MQTT Interface](./MQTT_Broker_03.png) If your application doesn't support sending command payloads in the JSON format, you can use the RAW Topic by adding raw to the end of the topic and use the value as a string directly: * `instar/000389888811/alarm/area1/enable/raw` - the message payload here is simply either `1` or `0` instead of `{"val":"1"}` or `{"val":"0"}`. ## Update your first MQTT Topic To update a topic you can use a desktop software like the MQTT Explorer or MQTT.fx for Windows, macOS or LINUX. Or one of the many available Smartphone Apps. These allow you to try out the MQTT API without the need of additional Hardware (Smarthome Gateways) and quickly debug your MQTT network.<file_sep>--- date: "2019-06-09" title: "Getting Started with Windows Server 2019" categories: - Windows --- ![Central, Hong Kong](./photo-45fdsfd6d_64567fh6drethg4_d.jpg) <!-- TOC --> - [Windows Admin Center](#windows-admin-center) - [Setting up User Accounts](#setting-up-user-accounts) - [Setting up an IIS Webserver](#setting-up-an-iis-webserver) - [Adding your own static website](#adding-your-own-static-website) <!-- /TOC --> ## Windows Admin Center [Windows Admin Center](https://docs.microsoft.com/en-us/windows-server/manage/windows-admin-center/understand/windows-admin-center) is a locally deployed, browser-based app for managing servers, clusters, hyper-converged infrastructure, and Windows 10 PCs. It comes at no additional cost beyond Windows and is ready to use in production. You can download it from [here](https://www.microsoft.com/en-us/software-download/windowsinsiderpreviewserver). Run the installation and start the program. This will open the main Admin Center Dashboard inside your default web browser: ![Windows Server 2019](./Windows_Server_2019_01.png) Now click on __Add__ in the top right and choose __Add Windows Server__: ![Windows Server 2019](./Windows_Server_2019_02.png) Switch to your Windows Server 2019 desktop and open the __Server Manager__ to find out the __Server Name__ of the server you want to add: ![Windows Server 2019](./Windows_Server_2019_03.png) You can now go back to the Admin Center and type in the server name you want to search for. Once it is found on the local network, add your login credentials and click on __Add__: ![Windows Server 2019](./Windows_Server_2019_04.png) Your server should now show up under __All connections__: ![Windows Server 2019](./Windows_Server_2019_05.png) Click on it to open the __Server Management Dashboard__: ![Windows Server 2019](./Windows_Server_2019_06.png) ### Setting up User Accounts To add users search for `Local users & groups` under __Tools__ and click on __Add__ to create a new user account: ![Windows Server 2019](./Windows_Server_2019_07.png) Then click on __Manage Membership__ to add your new user to all the necessary __Groups__: ![Windows Server 2019](./Windows_Server_2019_08.png) ## Setting up an IIS Webserver To be able to host static content from your Windows Server we need to add the __IIS Webserver__. Additionally we can add an FTP Service to be able to upload content we want to be hosting. For this search for `Roles & features` under Tools and add the __Webserver (IIS)__ role to your server: ![Windows Server 2019](./Windows_Server_2019_09.png) After the installation is finished, open the IP address of your Windows Server inside your default web browser - e.g. `http://192.168.2.63`. You should be able to see the default IIS start page: ![Windows Server 2019](./Windows_Server_2019_10.png) ### Adding your own static website We now have a web server that is serving the default IIS startpage. Let's replace this default content with some more meaningful content. We already installed the __FTP Service__ we now have to create a FTP Site that has access to the directory where IIS stores it's default content. First open the Server Manager and access the __Internet Information Services (IIS) Manager__: ![Windows Server 2019](./Windows_Server_2019_11.png) We can see the default IIS website under `Sites`. Right-click `Sites` to add your __FTP Site__: ![Windows Server 2019](./Windows_Server_2019_12.png) Give your FTP site a name and point it to the default directory where IIS stores it's web content - `C:\inetpub\wwwroot`: ![Windows Server 2019](./Windows_Server_2019_13.png) Leave the binding at it's default settings, let the FTP site activate automatically and deactivate SSL (we only want to access the server via FTP over our local network): ![Windows Server 2019](./Windows_Server_2019_14.png) For the Authentication choose __basic__ and either allow all users to access this service or create a special `ftpuser` for it: ![Windows Server 2019](./Windows_Server_2019_15.png) Make sure that the __Windows Firewall__ does not block the FTP service: ![Windows Server 2019](./Windows_Server_2019_16.png) You might also have to add a rule that explicitly opens port 21 for incoming traffic - though, be careful with this setting in case you want to expose your server to the internet. It is fine for a local server: ![Windows Server 2019](./Windows_Server_2019_17.png) Here I encountered a little issue. When I tried to access the FTP Server with an FTP client I received an error message `530 User cannot log in, home directory inaccessible`: ![Windows Server 2019](./Windows_Server_2019_18.png) Back in the __Internet Information Services (IIS) Manager__ click on your FTP site and double-click __FTP Authorization Rules__: ![Windows Server 2019](./Windows_Server_2019_19.png) Click on __Add Rule__ in the top right and specify your `ftpuser` to have __Read/Write__ access: ![Windows Server 2019](./Windows_Server_2019_20.png) Then restart the FTP Service from the __Services Manager__: ![Windows Server 2019](./Windows_Server_2019_21.png) Now it works as expected: ![Windows Server 2019](./Windows_Server_2019_22.png) I was now able to read the content of the FTP root directory but I was still lacking the right to write or delete from this directory. So back on the server right-click the directory, select properties and security. Here you can add the FTP User and give him full control over the directory: ![Windows Server 2019](./Windows_Server_2019_23.png) Now upload your static website content to the FTP site directory and open web servers IP address inside your browser. You should be able to see your website. But another issue that I found was that all my images, saved in the `.webp` image format, were blocked by the webserver. The problem here was the missing __MIME Type__. Just open the __Internet Information Services (IIS) Manager__ again and click on the `Default Web Site` and double-click __MIME Types__: ![Windows Server 2019](./Windows_Server_2019_24.png) Here you can add the `image/webp` type manually: ![Windows Server 2019](./Windows_Server_2019_25.png) And the website was up and running: ![Windows Server 2019](./Windows_Server_2019_26.png) <file_sep>--- date: "2018-11-01" title: "Home Automation and IP Cameras" categories: - Windows - OpenHAB --- ![Shenzhen, China](./photo-34606002985_b41c7bdcd5_o.jpg) The following instructions will guide you through the process of setting up OpenHAB 2 to automate an IP camera via the MQTT protocol. <!-- TOC --> - [openHAB 2 on Windows](#openhab-2-on-windows) - [Prerequisites](#prerequisites) - [Installation](#installation) - [nodeRED on Windows](#nodered-on-windows) - [Installing Node.js](#installing-nodejs) - [Installing nodeRED](#installing-nodered) - [Run nodeRED](#run-nodered) - [Mosquitto on Windows](#mosquitto-on-windows) - [Installing Mosquitto](#installing-mosquitto) - [Testing the MQTT Server with MQTT.fx](#testing-the-mqtt-server-with-mqttfx) - [OpenHAB2 Configuration](#openhab2-configuration) - [Connect the MQTT Server to openHab2](#connect-the-mqtt-server-to-openhab2) - [Add Things](#add-things) - [Access your SmartHome Remotely](#access-your-smarthome-remotely) - [Configuring nodeRed](#configuring-nodered) - [Create a Static directory](#create-a-static-directory) <!-- /TOC --> ## openHAB 2 on Windows The following instructions will guide you through the process of setting up [openHAB 2](https://www.openhab.org/docs/). ### Prerequisites First we need to [Download](https://java.com/en/) and install the JAVA Development Kit (JDK or openJDK). Then add the JAVA_HOME System Environment Variable in Windows in the Control Panel: `System and Security ➡️ System ➡️ Advanced System Settings ➡️ Environment Variables ➡️ System Variables` --- ![Set JAVA_HOME](./openhab2_01.png) --- Open the Windows Control Panel, go to the __Advanced System Settings__ and open the __Environment Variables__. --- ![Set JAVA_HOME](./openhab2_02.png) --- Select to set a new System variable __JAVA_HOME__ and point to your JAVA installation path (that _might differ_ from the one shown below): --- ![Set JAVA_HOME](./openhab2_03.png) --- ### Installation [Download](https://www.openhab.org/download/) the latest Windows Stable or Snapshot ZIP archive file for manual installation and unzip the file in your chosen directory (e.g. `C:\openHAB2`). To start the server launch the runtime by executing the script `C:\openhab230\start.bat`: --- ![openhab2](./openhab2_04.png) --- Point your browser to `http://localhost:8080`. You should be looking at the openHAB [package selection](https://docs.openhab.org/configuration/packages.html) page: --- ![openhab2](./openhab2_05.png) --- ## nodeRED on Windows Setting up [nodeRED](https://nodered.org) in a Microsoft Windows environment. The instructions are specific to Windows 10 but should also work for Windows 7 and Windows Server from 2008R2. ### Installing Node.js Download the latest version of Node.js from the official [Node.js home](https://nodejs.org/en/) page. Run the downloaded MSI file. Installing Node.js requires local administrator rights. Once installed, open a command prompt and run the following command to ensure Node.js and npm are installed correctly: `node -v && npm -v` --- ![nodeRED](./nodeRED_01.png) --- ### Installing nodeRED Installing nodeRED as a global module adds the command node-red to your system path. Execute the following at the command prompt: `npm install -g --unsafe-perm node-red` ### Run nodeRED Once installed, the simple way to run Node-RED is to use the node-red command in a command prompt: If you have installed Node-RED as a global npm package, you can use the node-red command: `node-red` --- ![nodeRED](./nodeRED_02.png) --- Point your browser to `http://localhost:1880/`. You should be looking at the nodeRED interface: --- ![nodeRED](./nodeRED_03.png) --- For more information check out our [Node-RED Guide](/Advanced_User/Node-RED_and_MQTT/) <br/><br/> ## Mosquitto on Windows [Eclipse Mosquitto](https://github.com/eclipse/mosquitto) is an open source (EPL/EDL licensed) message broker that implements the MQTT protocol versions 3.1 and 3.1.1. Mosquitto is lightweight and is suitable for use on all devices from low power single board computers to full servers. The MQTT protocol provides a lightweight method of carrying out messaging using a publish/subscribe model. This makes it suitable for Internet of Things messaging such as with low power sensors or mobile devices such as phones, embedded computers or microcontrollers. ### Installing Mosquitto First we need to [Download](https://mosquitto.org/download/) the latest binary and execute it: --- ![Mosquitto MQTT](./Mosquitto_01.png) --- The Installer will ask to download and install the latest version of [Win32OpenSSL](http://slproweb.com/products/Win32OpenSSL.html) and copy all `.dll`'s to the Mosquitto install directory: --- ![Mosquitto MQTT](./Mosquitto_02.png) --- ![Mosquitto MQTT](./Mosquitto_03.png) --- And the same with [this pthread.dll](ftp://sources.redhat.com/pub/pthreads-win32/dll-latest/dll/x86/): --- ![Mosquitto MQTT](./Mosquitto_04.png) --- ### Testing the MQTT Server with MQTT.fx [Download MQTT.fx](https://mqttfx.jensd.de/index.php/download) as a MQTT Client for MQTT testing/debugging. Install and start the program and connect it to our local MQTT server: --- ![Mosquitto MQTT](./Mosquitto_05.png) --- ![Mosquitto MQTT](./Mosquitto_06.png) --- ![Mosquitto MQTT](./Mosquitto_07.png) --- We can test the MQTT Service by __subscribing__: --- ![Mosquitto MQTT](./Mosquitto_08.png) --- Going back to the __Publish__ tab and sending a message: --- ![Mosquitto MQTT](./Mosquitto_09.png) --- The message should show up in the __Subscribe__ tab: --- ![Mosquitto MQTT](./Mosquitto_10.png) --- ## OpenHAB2 Configuration The configuration files for open hub can be found in the __./conf__ directory of the installation dir. First we want to configure the MQTT service to connect to our MQTT Server. ### Connect the MQTT Server to openHab2 Go back to `http://localhost:8080` and select the PaperUI, go to Add-ons and search for MQTT-bindings inside the __Bindings__ tab - in my case this was already installed, if not, hit install: --- ![Mosquitto MQTT](./Mosquitto_11.png) --- Now go to the openHAB2 install dir - if you followed this guide, this will be `C:\openhab230\conf\services` and open the __mqtt.cfg__: --- ![Mosquitto MQTT](./Mosquitto_12.png) --- Add the MQTT URL as follows: `mosquitto.url=tcp://localhost:1883` (The broker name - in our case we choose _mosquitto_ - can be choosen freely). Make sure that you set all MQTT Server variables according to your MQTT Server configuration in Node-RED - e.g.: ``` mosquitto.url=tcp://localhost:1883 mosquitto.clientId=openhab2 mosquitto.user=mosquitto mosquitto.pwd=<PASSWORD> mosquitto.qos=1 mosquitto.retain=false mosquitto.async=true ``` You can double-click a MQTT-Node in your Node-RED flows to check and edit your configuration: --- ![Mosquitto MQTT](./nodeRED_05.png) --- --- ![Mosquitto MQTT](./nodeRED_06.png) --- --- ![Mosquitto MQTT](./nodeRED_07.png) --- ### Add Things Go to `C:\openhab230\conf\services` to create a configuration file named __office.things__ and add the following declaration _//This is the Things file_. Eventually we will populate this file with devices that require a thing declaration, keep in mind that not all devices need it. To be able to access things - such as a light switch - we need to add them to an items file __office.items__ in `C:\openhab230\conf\items`. For demo purposes we can add a __Switch Item__: ``` // This is the Items File //Demo items Switch MyFirstSwitch "Demo Switch" ``` In this case __Switch__ will be the type of item we add, __MyFirstSwitch__ is it's name and __Demo Switch__ will be used as it's lable. Now we have to add the switch to our user interface, by adding a __office.sitemap__ file in `C:\openhab230\conf\sitemap`. The sitemap file is how you interact with the devices or the user interface: ``` sitemap home label="INSTAR - SmartOffice" { Frame label="Demo" { Switch item=__MyFirstSwitch__ } } ``` To activate the sitemap go to __Configuration__ and __Services__, choose __UI__ and select to configure the __Basic UI__: --- ![openHAB2](./openhab2_06.png) --- Type in the name of your sitemap - we used __office__ - and click __Save__. You can then repeat this step with the __CLassic UI__: --- ![openHAB2](./openhab2_07.png) --- You can now open the __Basic UI__ in a new tab `http://localhost:8080/basicui/app` and see the switch we just created: --- ![openHAB2](./openhab2_08.png) --- To add functionality to our switch, we need to add a __office.rules__ file in `C:\openhab230\conf\rules`. This is the file that does all the automation. ## Access your SmartHome Remotely --- ![openHAB2](./openhab2_09.png) --- ![openHAB2](./openhab2_10.png) --- You can use the [OpenHAB Cloud](https://myopenhab.org/users) to access your Smarthome over the internet. Just use your Email Address and Password + __UUID__ and __Secret__ of your OpenHAB installation - the latter can be found under `./userdata/uuid` and `./userdata/openhabcloud/secret` --- ![openHAB2](./openhab2_11.png) --- If you cannot find those files, make sure that the openHAB CLoud Connector is installed by going to `http://localhost:8080/paperui`, choose __Addons__ and __Misc__. Search for _Cloud_ to find the Addon and install it: --- ![openHAB2](./openhab2_13.png) --- Go back to to [MyOpenHAB](https://myopenhab.org/), make sure that you are logged in, and you will see a __Online__ notification on the top right - if not, stop and restart the OpenHAB service from your console and refresh the webpage. --- ![openHAB2](./openhab2_12.png) --- You will see the familiar UI when you navigate to https://home.myopenhab.org/start/index. You can use this login on the web, on [Android](https://play.google.com/store/apps/details?id=org.openhab.habdroid), [iOS](https://itunes.apple.com/us/app/openhab/id492054521?mt=8) and [Window Phone / Metro](https://www.microsoft.com/en-us/p/openhab/9nmq39ctwxgt). ## Configuring nodeRed ### Create a Static directory TO work with __static files__ directly in __nodeRed__, open the __settings.js__ in `\Users\<Username>\.node-red`. Add the following line to the top of the document: ```js var path = require("path"); ``` Add a folder named __static__ to `.node-red` and change the default staticHTTP path to the following: ```js //httpStatic: '/home/nol/node-red-static/', httpStatic: path.join(__dirname,'static'), ``` --- ![nodeRed](./nodeRED_04.png) --- Restart nodeRed and test if everything is working, by adding an image file to the newly created __static__ folder (e.g. test.png) and open it via the nodeRed URL on port _1880_ - the image should be displayed in your browser window: ``` http://127.0.0.1:1880/test.png ```<file_sep>--- date: "2019-04-01" title: "Introduction to Keras" categories: - Machine Learning - Python --- ![Shanghai, China](./photo-456tdsfggd_67gfh6dgdf4_d.jpg) [Keras](https://keras.io) is a high-level neural networks API, written in Python and capable of running on top of TensorFlow, CNTK, or Theano. It was developed with a focus on enabling fast experimentation. ## Installation of Keras You can install TensorFlow directly through `pip3 install keras` (make sure you have [Python v3](https://www.python.org/downloads/) installed). I have [Anaconda](https://docs.anaconda.com/anaconda/install/windows/) set up on my computer and am going to use it for this instead. If you already have Anaconda installed, make sure that it is up to date (make sure that you start the [Anaconda Prompt](https://docs.anaconda.com/anaconda/user-guide/getting-started/#write-a-python-program-using-anaconda-prompt-or-terminal) with Admin privileges): ```bash conda update conda conda update anaconda ``` Anaconda allows us to create a virtual environment in which we can run our TensorFlow and Keras program. To create the environment with the name `py3-TF2` run the following command in the Anaconda Prompt - hit `y` when prompted: ```bash conda create --name py3-TF2 python=3 conda activate py3-TF2 ``` Before installing Keras, please install one of its backend engines: [TensorFlow](https://mpolinowski.github.io/introduction-to-tensorflow-2-beta), Theano, or CNTK. We recommend the TensorFlow backend. We can now continue installing Keras inside this virtual environment: ```bash pip install keras ``` To be able to use the virtual environment in [Jupyter Notebook](https://docs.anaconda.com/anaconda/user-guide/getting-started/#run-python-in-a-jupyter-notebook) we need to install `ipykernel` and `nb_conda_kernels` inside it: ```bash pip install ipykernel conda install nb_conda_kernels ``` <!-- ![Image Classification with Keras](./keras_01.png) --><file_sep>--- date: "2018-01-26" title: "Run your Node.js app from a Docker Container" categories: - Javascript - Node - Docker --- ![Abashiri, Hokkaido, Japan](./photo-11626710613_fd6faa15ae_o.png) [Github Repository](https://github.com/mpolinowski/node-express-docker-starter) - [01 Create your Node App](#01-create-your-node-app) - [02 Dockerizing your Node.js application](#02-dockerizing-your-nodejs-application) - [Building your image](#building-your-image) - [Running the Container](#running-the-container) - [Running the Container with Kitematic](#running-the-container-with-kitematic) ## 01 Create your Node App Keeping it simple - create a folder and use [Node.js](https://nodejs.org) to [npm](https://npmjs.com/package/cylon) install [Express.js](https://expressjs.com) ```bash npm init npm install express ``` The __init__ will create a package.json file - we can add a start script to it to start our Express Webserver: ```json "scripts": { "start": "node ./index.js" } ``` Then configure your webserver by adding the __index.js__ file to the root directory: ```js //Load express module with `require` directive var express = require('express') var app = express() //Define request response in root URL (/) app.get('/', function (req, res) { res.send('Hello World!') }) //Launch listening server on port 3000 app.listen(3000, function () { console.log('app listening on port http://localhost:3000!') }) ``` You can test the webserver by running the npm script from your console: ```bash npm start ``` And access _http://localhost:3000_ in your preferred web browser - you should be greeted by: ![Docker & Node.js](./docker_01.png) ## 02 Dockerizing your Node.js application If you did not so far, first [install Docker](https://docs.docker.com/install/) on your computer. The Docker container is launched on the basis of a Docker image, a template with the application details. The Docker image is created with instructions written in the Dockerfile. Let’s add __Dockerfile__ to the directory with our application: ```docker FROM node:latest # Create app directory WORKDIR /usr/src/app # Install app dependencies # A wildcard is used to ensure both package.json AND package-lock.json are copied # where available (npm@5+) COPY package*.json ./ RUN npm install # If you are building your code for production # RUN npm install --only=production # Bundle app source COPY . . EXPOSE 8080 CMD [ "npm", "start" ] ``` * The first thing we need to do is define from what image we want to build from. Here we will use the latest version of node available from the [Docker Hub](https://hub.docker.com/): __FROM node:latest__ * Next we create a directory to hold the application code inside the image, this will be the working directory for your application: __WORKDIR /app__ * This image comes with Node.js and NPM already installed so the next thing we need to do is to install your app dependencies using the npm binary: __COPY package*.json ./__, __RUN npm install__ Note that, rather than copying the entire working directory, we are only copying the package.json file. * To bundle your app's source code inside the Docker image, use the COPY instruction: __COPY . /app__ * Your app binds to port 3000 so you'll use the EXPOSE instruction to have it mapped by the docker daemon: __EXPOSE 3000__ * Last but not least, define the command to run your app using CMD which defines your runtime. Here we will use the basic npm start which will run node server.js to start your server: __CMD [ "npm", "start" ]__ Create a __.dockerignore__ file in the same directory as your Dockerfile with following content: ``` node_modules npm-debug.log ``` This will prevent your local modules and debug logs from being copied onto your Docker image and possibly overwriting modules installed within your image. ### Building your image Go to the directory that has your Dockerfile and run the following command to build the Docker image. The -t flag lets you tag your image so it's easier to find later using the docker images command: ```bash docker build -t hello-world . ``` ![Docker & Node.js](./docker_02.png) You can now list the Docker image: ```bash docker images ``` ![Docker & Node.js](./docker_03.png) ### Running the Container Running your image with -d runs the container in detached mode, leaving the container running in the background. The -p flag redirects a public port to a private port inside the container. Run the image you previously built: ```bash docker run -p 8080:3000 hello-world ``` ![Docker & Node.js](./docker_04.png) The Container is now running and listening on Port 3000 - to access the container we have to use the exposed Port 8080 on localhost or the local IP address of the machine we running it on. ### Running the Container with Kitematic Alternatively, we can use the graphical user interface [Kitematic](https://kitematic.com). First install the [Docker Toolbox](https://docker.com/toolbox) for Windows and macOS and start Kitematic. Choose __My Images__ and __Create__ on our hello-world container: ![Docker & Node.js](./docker_05.png) Our container is now running: ![Docker & Node.js](./docker_06.png) Be aware that the __Port 3000__ that we defined is only the internal Docker Port. You can check (and change) the exposed port under __Settings__ in the __Hostname/Ports__ tab: ![Docker & Node.js](./docker_07.png) As seen above, our container is exposed on Port 32772. And can be accessed on __http://localhost:32772__ as well as on the local network IP (in this case the 192.168.1.112): ![Docker & Node.js](./docker_08.png) --- __Further Readings__: https://medium.com/statuscode/dockerising-a-node-js-and-mongodb-app-d22047e2806f<file_sep>--- date: "2019-01-22" title: "Kubernetes NGINX Ingress" categories: - LINUX - Docker - Kubernetes - NGINX --- ![Malekula, Vanuatu](./photo-34607488365_9f40aafb01_o.jpg) <!-- TOC --> - [How to Use Nginx Ingress Controller](#how-to-use-nginx-ingress-controller) - [Creating the resources for Nginx Ingress](#creating-the-resources-for-nginx-ingress) - [Creating the Load Balancing Service](#creating-the-load-balancing-service) - [Create two Web Apps to Respond to the incoming Traffic](#create-two-web-apps-to-respond-to-the-incoming-traffic) - [Creating the Ingress](#creating-the-ingress) - [Testing the Ingress](#testing-the-ingress) - [Adding a Node.js / Express.js Web App](#adding-a-nodejs--expressjs-web-app) - [Preparing the NGINX Ingress](#preparing-the-nginx-ingress) - [It could be so easy, but...](#it-could-be-so-easy-but) - [Adding TLS](#adding-tls) <!-- /TOC --> An [Ingress](https://kubernetes.github.io/ingress-nginx/deploy/#generic-deployment) is an application that allows you to access your Kubernetes services from outside the Kubernetes cluster. This lets you consolidate your routing rules into a single resource, e.g.: * mydomain.com/api/web/ leads to an api service for your web application * mydomain.com/api/mobile/ leads to an api-v2 service for the mobile access The __Ingress__ enables you make your services available __without__ having to use [LoadBalancers](https://kubernetes.io/docs/tasks/access-application-cluster/create-external-load-balancer/) (only available on Cloud solutions like AWS, GCE, Azure...) or exposing each service on the Node ([NodePort](https://kubernetes.io/docs/concepts/services-networking/service/#nodeport)). Making this the ideal solution for an on-premise hosting of an Kubernetes Cluster. Popular Ingress Controllers include: * [Nginx](https://github.com/kubernetes/ingress-nginx/blob/master/README.md) * [Contour](https://github.com/heptio/contour) * [HAProxy](https://www.haproxy.com/blog/haproxy_ingress_controller_for_kubernetes/) * [Traefik](https://github.com/containous/traefik) ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_01.png) ## How to Use Nginx Ingress Controller ### Creating the resources for Nginx Ingress ```bash kubectl apply -f https://raw.githubusercontent.com/kubernetes/ingress-nginx/master/deploy/mandatory.yaml ``` Or create the __mandatory.yaml__ file locally and create the prerequisites with `kubectl apply -f mandatory.yaml`: ```yaml apiVersion: v1 kind: Namespace metadata: name: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx --- kind: ConfigMap apiVersion: v1 metadata: name: nginx-configuration namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx --- kind: ConfigMap apiVersion: v1 metadata: name: tcp-services namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx --- kind: ConfigMap apiVersion: v1 metadata: name: udp-services namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx --- apiVersion: v1 kind: ServiceAccount metadata: name: nginx-ingress-serviceaccount namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx --- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRole metadata: name: nginx-ingress-clusterrole labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx rules: - apiGroups: - "" resources: - configmaps - endpoints - nodes - pods - secrets verbs: - list - watch - apiGroups: - "" resources: - nodes verbs: - get - apiGroups: - "" resources: - services verbs: - get - list - watch - apiGroups: - "extensions" resources: - ingresses verbs: - get - list - watch - apiGroups: - "" resources: - events verbs: - create - patch - apiGroups: - "extensions" resources: - ingresses/status verbs: - update --- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: Role metadata: name: nginx-ingress-role namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx rules: - apiGroups: - "" resources: - configmaps - pods - secrets - namespaces verbs: - get - apiGroups: - "" resources: - configmaps resourceNames: # Defaults to "<election-id>-<ingress-class>" # Here: "<ingress-controller-leader>-<nginx>" # This has to be adapted if you change either parameter # when launching the nginx-ingress-controller. - "ingress-controller-leader-nginx" verbs: - get - update - apiGroups: - "" resources: - configmaps verbs: - create - apiGroups: - "" resources: - endpoints verbs: - get --- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: RoleBinding metadata: name: nginx-ingress-role-nisa-binding namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: nginx-ingress-role subjects: - kind: ServiceAccount name: nginx-ingress-serviceaccount namespace: ingress-nginx --- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRoleBinding metadata: name: nginx-ingress-clusterrole-nisa-binding labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: nginx-ingress-clusterrole subjects: - kind: ServiceAccount name: nginx-ingress-serviceaccount namespace: ingress-nginx --- apiVersion: apps/v1 kind: Deployment metadata: name: nginx-ingress-controller namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx spec: replicas: 1 selector: matchLabels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx template: metadata: labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx annotations: prometheus.io/port: "10254" prometheus.io/scrape: "true" spec: serviceAccountName: nginx-ingress-serviceaccount containers: - name: nginx-ingress-controller image: quay.io/kubernetes-ingress-controller/nginx-ingress-controller:0.23.0 args: - /nginx-ingress-controller - --configmap=$(POD_NAMESPACE)/nginx-configuration - --tcp-services-configmap=$(POD_NAMESPACE)/tcp-services - --udp-services-configmap=$(POD_NAMESPACE)/udp-services - --publish-service=$(POD_NAMESPACE)/ingress-nginx - --annotations-prefix=nginx.ingress.kubernetes.io securityContext: allowPrivilegeEscalation: true capabilities: drop: - ALL add: - NET_BIND_SERVICE # www-data -> 33 runAsUser: 33 env: - name: POD_NAME valueFrom: fieldRef: fieldPath: metadata.name - name: POD_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace ports: - name: http containerPort: 80 - name: https containerPort: 443 livenessProbe: failureThreshold: 3 httpGet: path: /healthz port: 10254 scheme: HTTP initialDelaySeconds: 10 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 10 readinessProbe: failureThreshold: 3 httpGet: path: /healthz port: 10254 scheme: HTTP periodSeconds: 10 successThreshold: 1 timeoutSeconds: 10 --- ``` ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_02.png) ### Creating the Load Balancing Service ```bash https://raw.githubusercontent.com/kubernetes/ingress-nginx/master/deploy/provider/baremetal/service-nodeport.yaml ``` Or again, create the __nodeport-ingress-service.yaml__ file locally and create the Ingress service with `kubectl apply -f nodeport-ingress-service.yaml`: ```yaml apiVersion: v1 kind: Service metadata: name: ingress-nginx namespace: ingress-nginx labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx spec: type: NodePort ports: - name: http port: 80 targetPort: 80 protocol: TCP - name: https port: 443 targetPort: 443 protocol: TCP selector: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx externalIPs: - <add the IP address of your Kubernetes Master here> --- ``` > I am using the Kubernetes cluster on an on-premise cloud service that does not provide a LoadBalancer. The [Bare-Metal](https://kubernetes.github.io/ingress-nginx/deploy/baremetal/) solution is something I am working towards, but did implement yet. So this is an old-school DIY cluster and for some reasons the [official documentation](https://kubernetes.github.io/ingress-nginx/deploy/#docker-for-mac) lead me to a situation where I did not get an IP address on my Ingress service. [The solution](https://qiita.com/soumi/items/c6358e5e859004c2961c) that I found was to add my external IP (this is the WAN IP of my Kubernetes Master - e.g. `externalIPs: 192.168.3.11` - if you install another server in-front of your cluster on the local area network, you should also be able to use the LAN address of your Master here instead - avoiding exposing your cluster directly to the internet) to the service configuration above. Afterwards I was able to access the apps behind this service through this IP address - see below. ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_03.png) This has set up the Nginx Ingress Controller - you can check that it is running with the following command: ```bash kubectl get pods --namespace ingress-nginx ``` ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_04.png) ### Create two Web Apps to Respond to the incoming Traffic Now, we can create Ingress resources in our Kubernetes cluster and route external requests to our services. For this we need two services that represent the Ingress routes for our requests. To test this, we can use two web applications that just echo a string, when they receive an HTTP GET command: __web-echo.yaml__ ```yaml kind: Pod apiVersion: v1 metadata: name: web-app labels: app: web spec: containers: - name: web-app image: hashicorp/http-echo args: - "-text=I am the Web API" --- kind: Service apiVersion: v1 metadata: name: web-service spec: selector: app: web ports: - port: 5678 ``` __mobile-echo.yaml__ ```yaml kind: Pod apiVersion: v1 metadata: name: mobile-app labels: app: mobile spec: containers: - name: mobile-app image: hashicorp/http-echo args: - "-text=I am the Mobile API" --- kind: Service apiVersion: v1 metadata: name: mobile-service spec: selector: app: mobile ports: - port: 5678 ``` Create those two YAML files and create those apps in Kubernetes: ```bash kubectl apply -f web-echo.yaml kubectl apply -f mobile-echo.yaml ``` ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_05.png) ### Creating the Ingress Now we need to declare an Ingress to route requests to `/web` to the first service, and requests to `/mobile` to second: __nginx-ingress.yaml__ ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: nginx-ingress annotations: ingress.kubernetes.io/rewrite-target: / spec: rules: - http: paths: - path: /web backend: serviceName: web-service servicePort: 5678 - path: /mobile backend: serviceName: mobile-service servicePort: 5678 ``` Create the Ingress using the Kubernetes command: ```bash kubectl create -f nginx-ingress.yaml ``` ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_06.png) ## Testing the Ingress You should now be able to see that the service was created with `kubectl get svc --namespace=ingress-nginx` and access your two apps via the WAN IP of your Kubernetes Master (see remark above about the `externalIP`): ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_07.png) ## Adding a Node.js / Express.js Web App We [earlier created](https://mpolinowski.github.io/express-generator-dockerrized/) a [Node.js](https://nodejs.org/en/) Web App that uses [Express.js](https://expressjs.com/en/starter/static-files.html) to host web content and wrapped it into an Docker container. I want to try to add this docker image and use the web app behind the NGINX Ingress. For this app we used the [Express Generator](https://expressjs.com/en/starter/generator.html) to scaffold a simple website that consist of 2 pages - one hosted on the app root `/`, the other one under `/users` - [Github Repository](https://github.com/mpolinowski/docker-node-express). This was set up inside the __app.js__ file the following way: ```js var indexRouter = require('./routes/index'); var usersRouter = require('./routes/users'); ... app.use('/', indexRouter); app.use('/users', usersRouter); ``` We are importing a router that is responsible for a specific route. The router itself - e.g. in case of the `/users` route just replies with a string once sends a GET: ```js var express = require('express'); var router = express.Router(); /* GET users listing. */ router.get('/', function(req, res, next) { res.send('User Login'); }); module.exports = router; ``` ### Preparing the NGINX Ingress We can now add the app to our Ingress configuration as follows: __nginx-ingress.yaml__ ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: nginx-ingress annotations: ingress.kubernetes.io/rewrite-target: / spec: rules: - http: paths: - path: /web backend: serviceName: web-service servicePort: 5678 - path: /mobile backend: serviceName: mobile-service servicePort: 5678 - path: /test backend: serviceName: test-service servicePort: 3000 ``` We just added a third route to the Ingress on `/test` and assigned the service port that our Express app is running on - the port for the app is defined in `.\bin\www` : ```js var port = normalizePort(process.env.PORT || '3000'); ``` Great now we could push the app to Docker Hub - e.g. to __mpolinowski/docker-node-express__ - and create the following Pod and Service configuration for it: ```yaml kind: Pod apiVersion: v1 metadata: name: web-app labels: app: web spec: containers: - name: web-app image: mpolinowski/docker-node-express ports: - containerPort: 3000 --- kind: Service apiVersion: v1 metadata: name: web-service spec: selector: app: web ports: - port: 3000 ``` And run it and access our app on the Master servers WAN IP with the URL `/test` .... ### It could be so easy, but... You will get a 404 from NGINX - the Ingress is working (good!), but there is nothing hosting on that URL. What went wrong here is the __Path Prefix__ is messing with our app routing. `/test` should lead us to the root of our web app - at least that is what I expected (this is how the router in Express.js works). But Ingress just recognizes that `/test` belongs to our web app and then routes `/test` to it. Since we only have the `/` and `/users` route defined, this leads us to a 404 from NGINX. To fix this, we have to go back to the __app.js__ of our web app and add the __Path Prefix__ to every route: ```js app.use('/test', indexRouter); app.use('/test/users', usersRouter); ``` Now rebuilding the Docker image, re-uploading it to Docker Hub and restarting the image in Kubernetes gives us the result we needed: ![NGINX Ingress for your Kubernetes Cluster](./kubernetes-ingress_08.png) We added the NGINX Ingress to our Kubernetes cluster and used NGINX to proxy three web apps that can now be reached over the internet under the routes `<Cluster WAN IP>\web`, `<Cluster WAN IP>\mobile`, `<Cluster WAN IP>\test` ## Adding TLS <!-- HTTPS / Certbot: https://www.digitalocean.com/community/tutorials/how-to-set-up-an-nginx-ingress-with-cert-manager-on-digitalocean-kubernetes#step-4-—-installing-and-configuring-cert-manager --> You can create a certificate for your domain using [Certbot](https://certbot.eff.org). This will create two files that we need to add as a __secret__ to Kubernetes - `privkey.pem` and `fullchain.pem`. Both files can be found under _/etc/letsencrypt/live/my.domain.com_ and be added with the following command: ```bash kubectl create secret tls my-secret --key ./privkey.pem --cert ./fullchain.pem ``` And change the Ingress configuration to use the certificate: __nginx-ingress.yaml__ ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: nginx-ingress annotations: ingress.kubernetes.io/rewrite-target: / spec: tls: - hosts: - my.domain.com secretName: my-domain-secret rules: - host: my.domain.com http: paths: - path: /web backend: serviceName: web-service servicePort: 5678 - path: /mobile backend: serviceName: mobile-service servicePort: 5678 - path: /test backend: serviceName: test-service servicePort: 3000 ``` <file_sep>--- date: "2019-06-20" title: "User Login with Twitter for Discourse" categories: - LINUX - Discourse --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Create a Twitter Developer Account](#create-a-twitter-developer-account) - [Add a Authentication App](#add-a-authentication-app) <!-- /TOC --> ## Create a Twitter Developer Account Go to [Twitter Developer](https://developer.twitter.com/en/apps) and sign in with your Twitter account and apply for a developer account: ![Using Twitter Login in Discourse](./Twitter_Auth_for_Discourse_01.png) ## Add a Authentication App Click the Create An App button: ![Using Twitter Login in Discourse](./Twitter_Auth_for_Discourse_02.png) Enter a name and description for your forum and enter the address in the Website Url field: ![Using Twitter Login in Discourse](./Twitter_Auth_for_Discourse_03.png) Put the check to Enable Sign in with Twitter. Enter `https://forum.instar.com/auth/twitter/callback` in the Callback URLs field. Enter the URLs for your `tos` and `policy` stuff. If you have an organization, also fill in the fields related to it: ![Using Twitter Login in Discourse](./Twitter_Auth_for_Discourse_04.png) Go to the Permissions tab, choose `Read Only`, and `Request email addresses from users`, then click on __Save__: ![Using Twitter Login in Discourse](./Twitter_Auth_for_Discourse_05.png) Click on the Keys and tokens tab: ![Using Twitter Login in Discourse](./Twitter_Auth_for_Discourse_06.png) Copy the API key and generate the API secret and paste them in the twitter_consumer_key and twitter_consumer_secret settings in the Login section of your Discourse site (/admin/site_settings/category/login): ![Using Twitter Login in Discourse](./Twitter_Auth_for_Discourse_07.png)<file_sep>--- date: "2018-11-21" title: "Red Hat Certified Specialist in Containerized Application Development" categories: - LINUX - Docker - Kubernetes --- ![Shanghai, China](./photo-11629527996_e8a3d50fde_o.jpg) This exam and the exam objectives provided here are based on the Red Hat® Enterprise Linux® 7.0 version of the exam. An IT professional who is a Red Hat Certified Specialist in Containerized Application Development is capable of performing the following tasks: * Implementing new container images by extending existing images * Manage images including pulling and pushing images from a private registry * Manually run and link containers in a development environment [Red Hat Website](https://www.redhat.com/en/services/training/ex300-red-hat-certified-engineer-rhce-exam) Technologies used: * __Kubernetes__: Kubernetes is a manager for cluster of containerized applications. It automates container deployment and scaling containers across clusters of hosts. A Kubernetes cluster is made up of __Nodes__ (server instances to which pods are deployed), __Pods__ (containers are coupled into pods in which they can share resources and reach each other via localhost) and __Containers__ (containerized applications/services executed by the docker runtime). * __Docker__: Docker is the container runtime that we are going to use to run our containerized applications on. * __ectd__: etcd is a distributed key value store that provides a reliable way to store data across a cluster of machines. <!-- TOC --> - [Network Time Protocol Service](#network-time-protocol-service) - [Setting a Hostname](#setting-a-hostname) - [FirewallD](#firewalld) - [Disable SELinux Enforcement](#disable-selinux-enforcement) - [Add a FirewallD Service](#add-a-firewalld-service) - [Install Docker & Kuberenetes on CentOS](#install-docker--kuberenetes-on-centos) - [Configuring the Master Server](#configuring-the-master-server) - [Configuring Nodes](#configuring-nodes) - [Install Docker & Kuberenetes on Debian](#install-docker--kuberenetes-on-debian) - [Working with Containers](#working-with-containers) - [Provisioning a MySQL Database](#provisioning-a-mysql-database) - [Connecting to your MySQL Database](#connecting-to-your-mysql-database) - [Linking Containers](#linking-containers) - [Accessing a Container](#accessing-a-container) - [Container Logs](#container-logs) - [Listening to Docker Events](#listening-to-docker-events) - [Inspect Docker Container](#inspect-docker-container) - [Exposing Containers](#exposing-containers) - [Persistent Storage for Containers](#persistent-storage-for-containers) - [Shared persistent Storage Volumes](#shared-persistent-storage-volumes) - [Host and Container Basic Security](#host-and-container-basic-security) - [Orchestrating Containers Using Kubernetes](#orchestrating-containers-using-kubernetes) - [Working with Images](#working-with-images) - [Build a Custom Image Container](#build-a-custom-image-container) - [Managing Images](#managing-images) - [Image Lifecycle](#image-lifecycle) - [Application Workflow](#application-workflow) - [Multiple Container Application Deployment](#multiple-container-application-deployment) - [Database Installation](#database-installation) - [Wordpress Configuration](#wordpress-configuration) - [Creating the Docker Container](#creating-the-docker-container) - [Kubernetes Service Configuration](#kubernetes-service-configuration) <!-- /TOC --> ## Network Time Protocol Service NTP- is a protocol which runs over port 123 UDP at Transport Layer and allows computers to synchronize time over networks for an accurate time. This service - by default - is handled by __Chrony.d__ on CentOS 7 and higher. But we are going to use the __ntp__ package instead. You can check if __Chrony.d__ is active on your system with the command `systemctl status chronyd.service`, stopping and disabling it with `systemctl stop chronyd.service`, `systemctl disable chronyd.service`. To install __ntp__ run `yum install ntp -y`. Run `systemctl enable ntpd` and `systemctl start ntpd` to activate the NTP service. You can verify that the service is working with `systemctl status ntpd` and `ntpstat`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_01.png) --- ## Setting a Hostname We now need to make sure that all of our CentOS server can talk to each other via a hostname that is bound to their internal IP address. This can be set in `nano /etc/hosts`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_02.png) --- Add all server that you want to connect by their local IP and assign a host name, by which you want to call them - add those lines to all your server's host files: ``` 192.168.2.110 instar.centos.master 192.168.2.111 instar.centos.minion1 ``` You can test if the hostname is used by pinging it: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_03.png) --- ## FirewallD ### Disable SELinux Enforcement Edit `nano /etc/selinux/config` and set `SELINUX=permissive` on both the master and minion servers - then reboot the servers. ### Add a FirewallD Service Configure FirewallD for Kubernetes to work correctly. First download the k8s-master.xml and k8s-worker.xml files to `cd /etc/firewalld/services` on your master and minion server: ```bash wget https://raw.githubusercontent.com/mpolinowski/k8s-firewalld/master/k8s-master.xml wget https://raw.githubusercontent.com/mpolinowski/k8s-firewalld/master/k8s-worker.xml firewall-cmd --reload ``` __On Master Node__ ```bash firewall-cmd --add-service=k8s-master --zone=public --permanent ``` <!-- ```bash firewall-cmd --permanent --add-port=2370/tcp firewall-cmd --permanent --add-port=2379-2380/tcp firewall-cmd --permanent --add-port=6443/tcp firewall-cmd --permanent --add-port=8472/udp firewall-cmd --permanent --add-port=10250/tcp firewall-cmd --permanent --add-port=10251/tcp firewall-cmd --permanent --add-port=10252/tcp firewall-cmd --permanent --add-port=10255/tcp firewall-cmd --reload ``` --> __On Minion Nodes__ ```bash firewall-cmd --add-service=k8s-worker --zone=public --permanent ``` <!-- ```bash firewall-cmd --add-masquerade --permanent firewall-cmd --permanent --add-port=2379/tcp firewall-cmd --permanent --add-port=2370/tcp firewall-cmd --permanent --add-port=8472/udp firewall-cmd --permanent --add-port=10250/tcp firewall-cmd --permanent --add-port=10251/tcp firewall-cmd --permanent --add-port=10255/tcp firewall-cmd --reload ``` --> ## Install Docker & Kuberenetes on CentOS First we need to add the repository to pull the Docker code from - type `nano /etc/yum.repos.d/virt7-docker-common-release.repo` and add: ``` [virt7-docker-common-release] name=virt7-docker-common-release baseurl=http://cbs.centos.org/repos/virt7-docker-common-release/x86_64/os/ gpgcheck=0 ``` Then install [Docker](https://www.docker.com/), [Kubernetes](https://kubernetes.io) and [etcd](https://coreos.com/etcd/docs/latest/): ```bash yum -y install docker kubernetes etcd systemctl enable docker systemctl start docker ``` ### Configuring the Master Server Now we need to edit the __Kubernetes config__ `nano /etc/kubernetes/config` and add the following lines: ``` # How the controller-manager, scheduler, and proxy find the apiserver KUBE_MASTER="--master=http://instar.centos.master:8080" KUBE_ETCD_SERVERS="--etcd-servers=http://instar.centos.master:2379" ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_04.png) --- We continue with configuring __etcd__ - The configuration only needs to be added to the __master server__ and can be found under `nano /etc/etcd/etcd.conf`. Here we need to change two URLs from __localhost__ to __0.0.0.0__ to listen to use all available network interfaces. ``` ETCD_LISTEN_CLIENT_URLS="http://0.0.0.0:2379" ETCD_ADVERTISE_CLIENT_URLS="http://0.0.0.0:2379" ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_05.png) --- Now we can edit the __Kubernetes API Server__ configuration under `nano /etc/kubernetes/apiserver`: ``` # The address on the local server to listen to. KUBE_API_ADDRESS="--address=0.0.0.0" # The port on the local server to listen on. KUBE_API_PORT="--port=8080" # Port minions listen on KUBELET_PORT="--kubelet-port=10250" # default admission control policies # KUBE_ADMISSION_CONTROL="--admission-control=NamespaceLifecycle,NamespaceExists,LimitRanger,SecurityContextD$ ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_06.png) --- We now have to make sure that the services on the __master server__ are started in a particular order: ```bash systemctl enable etcd kube-apiserver kube-controller-manager kube-scheduler systemctl start etcd kube-apiserver kube-controller-manager kube-scheduler systemctl status etcd kube-apiserver kube-controller-manager kube-scheduler | grep "(running)" ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_07.png) --- ### Configuring Nodes Now we need to set up the slave server configuration. We will start with __Kubernetes__ by editing `nano /etc/kubernetes/kubelet`: ``` # kubernetes kubelet (minion) config # The address for the info server to serve on (set to 0.0.0.0 or "" for all interfaces) KUBELET_ADDRESS="--address=0.0.0.0" # The port for the info server to serve on KUBELET_PORT="--port=10250" # You may leave this blank to use the actual hostname KUBELET_HOSTNAME="--hostname-override=instar.centos.minion1" # location of the api-server KUBELET_API_SERVER="--api-servers=http://instar.centos.master:8080" # pod infrastructure container # KUBELET_POD_INFRA_CONTAINER="--pod-infra-container-image=registry.access.redhat.com/rhel7/pod-infrastructur$ ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_08.png) --- And `nano /etc/kubernetes/config`: ``` # How the controller-manager, scheduler, and proxy find the apiserver KUBE_MASTER="--master=http://instar.centos.master:8080" KUBE_ETCD_SERVERS="--etcd-servers=http://instar.centos.master:2379" ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_09.png) --- Wit the configuration done, we can now start up the services on the minion server(s) with `systemctl enable kube-proxy kubelet docker` and `systemctl start kube-proxy kubelet docker`. Going back to your master server, you can now list all the configured nodes with `kubectl top node`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_10.png) --- __Troubleshooting__: If your node is not listed, try stopping FirewallD - `systemctl stop firewalld` #### Install Docker & Kuberenetes on Debian _Kubernetes_: ``` apt-get update && apt-get install -y apt-transport-https ca-certificates curl gnupg2 software-properties-common echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee -a /etc/apt/sources.list.d/kubernetes.list apt-get update apt-get install -y kubelet kubeadm kubectl apt-mark hold kubelet kubeadm kubectl ``` _Kubernetes (Chinese Mirror)_: ``` apt-get update && apt-get install -y apt-transport-https curl -s https://mirrors.aliyun.com/kubernetes/apt/doc/apt-key.gpg | apt-key add - cat <<EOF >/etc/apt/sources.list.d/kubernetes.list deb https://mirrors.aliyun.com/kubernetes/apt/ kubernetes-xenial main EOF apt-get update apt-get install -y kubelet kubeadm kubectl ``` _Docker_: ``` apt-get remove docker docker-engine docker.io curl -fsSL https://download.docker.com/linux/debian/gpg | sudo apt-key add - add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/debian $(lsb_release -cs) stable" apt-get update apt-get install docker-ce systemctl enable docker systemctl start docker systemctl status docker ``` ## Working with Containers ### Provisioning a MySQL Database We installed __Docker__ in the previous step - make sure that the service is running by `docker ps` (you should see an empty list). Let's then fill this list by adding a container. We want to run a MySQL database like `docker run mysql` - but we need to provide first a name under which this container should be listed and passwords for the mysql database: ``` docker run --name=test1-mysql --env="MYSQL_ROOT_PASSWORD=<PASSWORD>" mysql ``` This will run the process attached to the active terminal - the following line tells you that the database is up and running: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_11.png) --- You can stop the process by typing `docker stop test1-mysql` inside a secondary console on your minion server. You can check all - running and inactive containers - with `docker ps -a` and check the log for a specific container with `docker logs <container name>`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_12.png) --- To remove a the container type `docker rm test1-mysql`. And to run the container in the background add the flag `--detach`: ``` docker run --detach --name=test1-mysql --env="MYSQL_ROOT_PASSWORD=<PASSWORD>" mysql ``` You might see a `WARNING: IPv4 forwarding is disabled. Networking will not work.` -> In this case `nano /etc/sysctl.conf` and add the line `net.ipv4.ip_forward=1`. Restart the network service `systemctl restart network` and check `sysctl net.ipv4.ip_forward`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_13.png) --- Afterwards `docker stop test1-mysql` and `docker rm test1-mysql` - then run it again in detached mode. The error should now be gone. ### Connecting to your MySQL Database To communicate with the database we first have to install the MySQL Client via `yum install mysql`. And when the container is running in the background, use `docker inspect test1-mysql` to find out the IP address assigned to your MySQL database: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_14.png) --- You can use this IP to connect to your MySQL Databases with `mysql -h 172.17.0.2 -p` (the h-flag is adding the host address and the p-flag signifies that we have set a password that needs to be given to access the database). After typing in your password, you might be confronted by an error message `Authentication plugin 'caching_sha2_password' cannot be loaded` - in this case stop and remove the container and restart it with the flag `--default-authentication-plugin=mysql_native_password`: ``` docker run --detach --name=test1-mysql --env="MYSQL_ROOT_PASSWORD=<PASSWORD>" mysql --default-authentication-plugin=mysql_native_password ``` You should now be able to establish the connection: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_15.png) --- To clean out the test container with `docker stop test1-mysql` and `docker rm test1-mysql` - but you can keep them for the next step. ## Linking Containers We now want to run a Wordpress container that has access to the MySQL Database. First run the MySql container: ``` docker run --detach --name=test1-mysql --env="MYSQL_ROOT_PASSWORD=<PASSWORD>" mysql --default-authentication-plugin=mysql_native_password ``` Then add a Wordpress container and link it with the flag `--link test1-mysql:mysql`: ``` docker run --detach --name=test1-wordpress --link test1-mysql:mysql -e="WORDPRESS_DB_PASSWORD=<PASSWORD>" -e="WORDPRESS_DB_USER=root" wordpress ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_16.png) --- ## Accessing a Container We can now access the Wordpress container and run a bash terminal to configure the Wordpress installation with `docker exec -it test1-wordpress bash`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_17.png) --- Check the host file `cat /etc/hosts` to see that the MySQL instance is linked to the Wordpress installation. We also see that the IP address of the wordpress container is __172.17.0.3__ - `exit` the container and run `elinks http://172.17.0.3` (you might have to install elinks first `yum -y install elinks` - it is a text based browser for Linux) - you can see that Wordpress is actually running: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_18.png) --- We can verify that Wordpress is using the MySQL Database by running `mysql -h 172.17.0.2 -p` again and `show databases;`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_19.png) --- You can also go back into the Wordpress container `docker exec -ti -t test1-wordpress bash` and use the `set` command to see all the MySQL variables that have been set by linking both containers: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_20.png) --- ## Container Logs To access the [log outputs](https://docs.docker.com/engine/reference/commandline/logs/) of detached containers run `docker logs <container name>`: ```bash docker logs test1-wordpress //full logs docker logs -f test1-wordpress //follow log - live streaming docker logs --tail 5 test1-wordpress //only the last five lines ``` ## Listening to Docker Events Get real time [events](https://docs.docker.com/engine/reference/commandline/events/) from the server with the following command `docker events [OPTIONS]`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_21.png) --- ```bash docker events --since '2018-12-31T13:23:37' //Show logs since timestamp docker events --until '42m' //Show logs before a timestamp docker events --filter "name=test1-wordpress" --until '10m' //filter by image name docker events --filter "image=mysql" --until '10m' //filter by image type ``` ## Inspect Docker Container Return low-level [information on Docker objects](https://docs.docker.com/engine/reference/commandline/inspect/) with `docker inspect [OPTIONS] NAME|ID [NAME|ID...]`. To get the full information simply run `docker inspect test1-mysql` - to get a specific line, just query a value out of the JSON array: ```bash docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' test1-mysql //Get an instance’s IP address docker inspect --format='{{range .NetworkSettings.Networks}}{{.MacAddress}}{{end}}' test1-mysql //Get an instance’s MAC address docker inspect --format='{{.LogPath}}' test1-mysql //Get an instance’s log path docker inspect --format='{{.Id}}' test1-mysql //Get Image ID ``` ## Exposing Containers You can use the publish option to expose ports from the container to the outside world - this is one method to share data between containers: ``` docker run --detach --name=test1-mysql --env="MYSQL_ROOT_PASSWORD=<PASSWORD>" --publish 6603:3306 mysql --default-authentication-plugin=mysql_native_password ``` This will expose the internal port 3306 of the MySQL container to be reachable over port 6603 on the network. You are now able to connect to your MySQL database with `mysql -h 192.168.2.111 -p -P6603` with `-h` host=local IP of your Centos Server and `-P` port=exposed port of your docker container: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_22.png) --- ## Persistent Storage for Containers You can us the [volume command to create](https://docs.docker.com/engine/reference/commandline/volume_create/) a volume that holds data for attached containers: ```bash docker volume create [OPTIONS] [VOLUME] //create volumes docker volume inspect [OPTIONS] VOLUME [VOLUME...] //Inspect volumes docker volume ls [OPTIONS] //List volumes docker volume prune [OPTIONS] //Remove all unused local volumes docker volume rm [OPTIONS] VOLUME [VOLUME...] //Remove one or more volumes ``` We can now create a volume with the name __ShareVol1__ and extend an Ubuntu container with it: ```bash docker volume create --name ShareVol1 docker run -ti -v ShareVol1:/sharevol1 ubuntu ``` You now inside the Ubuntu image and can use the `df -h` command to check if the `/sharevol1` was successfully mounted: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_23.png) --- We can now create a file inside the shared space and exit the container: ```bash cd /sharevol1 touch test1 echo "hello world" > test1 exit ``` To find out where the data inside the volume was stored, we can use the `docker volume inspect` command and verify that the __test1__ file was actually stored in that directory: ```bash docker volume inspect ShareVol1 cd /var/lib/docker/volumes/ShareVol1/_data cat test1 ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_24.png) --- ### Shared persistent Storage Volumes A volume can extend more than one Docker container, enabling shared usage of data. First create an new volume and an Ubuntu container that uses it: ```bash docker volume create --name NewVol1 docker run -ti -v NewVol1:/newvol1 --name=UbuntuOne ubuntu ``` Then open a second terminal on the same host and mount that volume from __UbuntuOne__ into a second Ubuntu container: ```bash docker run -ti --volumes-from UbuntuOne --name=UbuntuTwo ubuntu ``` Create a file inside the shared volume on __UbuntuTwo__: ```bash cd /newvol1 touch test1 ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_25.png) --- And this file will show up inside the shared volume inside the __UbuntuOne__ container: ```bash cd /newvol1 ll ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_26.png) --- ## Host and Container Basic Security * Docker always runs as root - when you are in a docker group, you effectively have root access to everything. To secure your server, you should run Docker inside a VM and limit the number of active processes inside a container with `docker run --pids-limit=64`. * You can also use [apparmor](https://docs.docker.com/engine/security/apparmor/), [seccomp](https://docs.docker.com/engine/security/seccomp/) or [SELinux](https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux_atomic_host/7/html/container_security_guide/docker_selinux_security_policy) to secure your application. <!-- https://cloud.google.com/container-optimized-os/docs/how-to/secure-apparmor https://medium.com/lucjuggery/docker-apparmor-30-000-foot-view-60c5a5deb7b https://medium.com/lucjuggery/docker-selinux-30-000-foot-view-30f6ef7f621 --> * Control privileges of new processes with `docker run --security-opt=no-new-privileges` * Turn of the inter-process communication with `docker --ipc=false` * Disable iptables changes with `docker --iptables=false` * Run Docker as read-only `docker run --read-only` * Mount Volumes as read-only `docker run -v $(pwd)/secrets:/secrets:ro ubuntu` * Use a hash to pull images `docker pull debian@sha265:a35465875...` * Limit memory and CPU sharing `docker -c 512 -mem 512m` * Define and run a user in your Dockerfile, so you are not running as root inside the container `RUN groupadd -r user && useradd -r -g user user` ## Orchestrating Containers Using Kubernetes We now want to run two containers on our node server - the minion that provides the docker runtime environment for the containers - and manage them in pods from our Kubernetes master server. Go to your master server and use Kubernetes to run a Wordpress container: ```bash kubectl run wordpress --image=wordpress --port=80 --hostport=81 kubectl get pods ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_27.png) --- __tbc...__ ## Working with Images So far we only worked with containers from the Docker Hub. Lets build our own Docker Container from a custom Image file - or Dockerfile. Create the following folder structure inside your home dir `~/docker/build/test1` and enter it. Now create the following file `nano Dockerfile`: ```dockerfile # a test file FROM ubuntu RUN apt-get update ADD files* /mydir1/ ADD test/ /test2/ WORKDIR /path USER administrator CMD "Echo" "Hello from your Docker Container!" ENTRYPOINT echo ONBUILD ADD . /app/src ONBUILD RUN /usr/local/bin/python-build --dir /app/src EXPOSE 8080 ENV NAME_FOR_FILE "foo bar" ENV SERVER_NO 4 ENV NEW_WORK_DIR /folder1 WORKDIR ${new_work_dir} # Or just WORKDIR $new_work_dir ENV abc=hello ENV abc=bye def=$abc ENV ghi=$abc # this results in def=hello and ghi=bye ``` * The __FROM__ statement has to be on top of the _Dockerfile_ to signify the image we want our container to be based on. * The next line starts with __RUN__, which gives a command that is executed during the build process. * The __ADD__ command gets a source and a destination argument and allows you to copy files from a source onto the container file system. If the source is an URL, the content will be downloaded during build. If it is a local folder, the content of the folder will be copied - the path has to be relative to the location of the dockerfile in this case. The command also accepts archives that will be unpacked during build. * The __COPY__ instruction is similar to the ADD command and copies new files from the source to the container fs. If you have multiple steps that use COPY, use the instruction individually so that each build steps cache is only invalidated if those files change. * The __difference to the ADD command__ is that you cannot use COPY for sources on remote URLs and for archive extractions. You should use copy over ADD when possible. * __WORKDIR__ is a directive that is used to set where the command defined with CMD is to be used. It can also set the destination of a following COPY command. Multiple working directories can be used - if a relative path is used, the path will be set in relation to the previous WORKDIR. * The __USER__ directive is used to set the UID or username which is to run the container based on the image file. * __CMD__ is similar to RUN and can be used to execute a specific command. But the command is __run after the build__ when the container is instantiated. * The __ENTRYPOINT__ sets the default application that is started when the container is run. Both the CMD and ENTRYPOINT command allow you to specify a startup command for an image. * CMD can be overwritten with the `docker run` command - to overwrite the ENTRYPOINT you will have to add the `--entrypoint` flag to do that. Use the ENTRYPOINT when you are building a container that is exclusively used to run a specific application. * __ONBUILD__ adds a trigger instruction to be executed when the image is used as a base for another build and _does not affect the current build_. Any build instruction can be registered as a trigger. * __EXPOSE__ is used to a specific port and enables networking from inside the container, as well as netwroking between containers on the docker host. You can also use the `-p` flag with the `docker run` command - but it adds transparency when you use EXPOSE inside your image file instead. * __ENV__ is used to set environment variables and is formatted as a key=value pair. The variable can be accessed by scripts from inside the container. * __VOLUME__ creates a mount point inside the container for externally mounted volumes from the docker host or containers. The value can be a JSON array or plain string with multiple arguments. (see below) ```dockerfile FROM ubuntu RUN mkdir /myvol RUN echo "hello world" > /myvol/greeting VOLUME /myvol ``` Put this Dockerfile into the __test1__ folder and build it and run the container with the following commands: ```bash docker build -t testvol1 /root/docker/builds/test1 docker run -ti testvol1 /bin/bash ``` You can verify that the greeting was written by `cat /myvol/greeting`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_28.png) --- The volume also exist on the host system under `/var/lib/docker/volumes` and all data in it will persist, even if the container that created it is purged. --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_29.png) --- ### Build a Custom Image Container * We will create a _tar.gz_ file to use as content for apache. * Write a Dockerfile based on the _ubuntu image_: * Add Apache to the image * Extract the content from our archive to the Apache folder * Expose a port to serve the content * Set Apache to start when the container starts Lets start by creating a folder `mkdir apachewebsite` and adding another folder inside that holds our web content `src` with `nano /root/docker/builds/apachewebsite/src/index.html`: ```html <!doctype html> <html class="no-js" lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="x-ua-compatible" content="ie=edge"> <title>Page Title</title> <meta name="description" content="page description"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="apple-touch-icon" href="icon.png"> <!-- Place favicon.ico in the root directory --> </head> <body> <!--[if lte IE 9]> <p class="browserupgrade">You are using an <strong>outdated</strong> browser. Please <a href="https://browsehappy.com/">upgrade your browser</a> to improve your experience and security.</p> <![endif]--> <!-- Add your site or application content here --> <p>Hello world! This is HTML5 Boilerplate.</p> <script src="https://cdnjs.cloudflare.com/ajax/libs/modernizr/2.8.3/modernizr.min.js"></script> <script src="http://code.jquery.com/jquery-3.3.1.min.js" integrity="<KEY> crossorigin="anonymous"></script> </body> </html> ``` To compress our website content we can run `tar -czf websitesrc.tar.gz index.html` Now we can add a our docker file `nano Dockerfile` inside the __apachewebsite__ folder `nano /root/docker/builds/apachewebsite/Dockerfile`: ```dockerfile # This is a template for an httpd hosted static website FROM centos MAINTAINER <EMAIL> RUN yum -y install httpd elinks ADD ./src/websitesrc.tar.gz /var/www/html EXPOSE 80 ENTRYPOINT ["/usr/sbin/httpd", "-D", "FOREGROUND"] ``` To build the image we can now type: ``` docker build -t apachewebsite /root/docker/builds/apachewebsite/ ``` And run the docker container with: ``` docker run -tid apachewebsite ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_30.png) --- By using `docker ps` we can see the container was assigned a name of `inspiring_austin`. We can now use `docker inspect inspiring_austin | grep IP` to find out the IP address the container was given: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_31.png) --- In this case it is the __172.17.0.2__ which we can use with __elinks__ to check if our website is now hosted - `elinks http://172.17.0.2`: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_32.png) --- To stop and remove the container run the following commands: ```bash docker stop inspiring_austin docker rm inspiring_austin ``` ## Managing Images ### Image Lifecycle * A need for an image occcurs * An Dockerfile is created * The docker image is built * The docker container is run from it and tested * The image is modified where needed * The now approved image is moved to production * In case of problems the image is send back to be modified * An image that is no longer needed is stopped * Old unused images are deleted To remove all images and containers on your server run: ```bash docker rm `docker ps --no-trunc -aq` docker rmi $(docker images -q) ``` ### Application Workflow * Image Creation * Image Tags * Push to Registry * Pull from Registry Let's use the centos7-apache image from the last chapter to learn how to add release tags and to publish our image on the Docker Hub: ```dockerfile # This is a template for an httpd hosted static website FROM centos MAINTAINER <EMAIL> RUN yum -y install httpd elinks ADD ./src/websitesrc.tar.gz /var/www/html EXPOSE 80 ENTRYPOINT ["/usr/sbin/httpd", "-D", "FOREGROUND"] ``` I rebuild the image under a different name and with a tag for the Docker Hub repository that I want to push it to as well as giving it a version tag: ```bash docker build -t mpolinowski/centos7-apache:beta . ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_33.png) --- To upload the image to the [Docker Hub](https://hub.docker.com/) you first need to create an account there. You can then login to your account from your console and push the image: ``` docker login docker push mpolinowski/centos7-apache ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_34.png) --- The image should now show up in your in your Docker Hub repository list: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_35.png) --- We can then bump the version to v1 and latest with a rebuild + push: ```bash docker build -t mpolinowski/centos7-apache:v1 -t mpolinowski/centos7-apache . docker push mpolinowski/centos7-apache ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_36.png) --- To pull an image from Docker Hub use the pull command - not adding a tag will always pull the latest version: ``` docker pull mpolinowski/centos7-apache ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_37.png) --- ## Multiple Container Application Deployment I need to build a custom container image for the deployment of an application over multiple containers - this will be a customized Wordpress application hosted with Apache with an MySQL database connection. ### Database Installation Let's first run the MySQL Container on the Minion Server: ```bash docker run --detach --name=test1-mysql --env="MYSQL_ROOT_PASSWORD=<PASSWORD>" mysql --default-authentication-plugin=mysql_native_password ``` To connect to your database, you first have tp find out the IP address of the mysql container with `docker inspect <name of the container>` (the name can be found with `docker ps`). Then use the mysql client (`yum install -y mysql`) to connect (the password is the root password we set when we ran the container): ```sql mysql -h 172.17.0.2 -p create database wordpress; show databases; ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_38.png) --- We now need to create a user for the Wordpress application and exit the container: ```sql CREATE USER 'wordpress'@'%' IDENTIFIED BY '<PASSWORD>'; GRANT ALL PRIVILEGES ON wordpress.* TO 'wordpress'@'%'; ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_39.png) --- ### Wordpress Configuration We now need to download Wordpress to configure it. We are going to use the configuration file later to modify the default Wordpress Image from Docker Hub: ```bash mkdir /root/docker/builds/centos7-wordpress-shared cd /root/docker/builds/centos7-wordpress-shared wget http://wordpress.org/latest.tar.gz tar -xvf latest.tar.gz cd wordpress cp wp-config-sample.php ../ cd .. mv wp-config-sample.php wp-config.php ``` We now have a copy of the sample configuration file inside the project folder - let's edit it to suit our database setup `nano wp-config.php`: ```php // ** MySQL settings - You can get this info from your web host ** // /** The name of the database for WordPress */ define('DB_NAME', 'wordpress'); /** MySQL database username */ define('DB_USER', 'wordpress'); /** MySQL database password */ define('DB_PASSWORD', '<PASSWORD>'); /** MySQL hostname */ define('DB_HOST', '172.17.0.2'); ``` The second part we need to change are the unique keys and salts that wordpress uses for it's authentication. You can autogenerate them with the [Wordpress API](https://api.wordpress.org/secret-key/1.1/salt/) - copy and paste them to replace the following block in the config file: ```php /**#@+ * Authentication Unique Keys and Salts. * * Change these to different unique phrases! * You can generate these using the {@link https://api.wordpress.org/secret-key/1.1/salt/ WordPress.org secre$ * You can change these at any point in time to invalidate all existing cookies. This will force all users to$ * * @since 2.6.0 */ define('AUTH_KEY', 'put your unique phrase here'); define('SECURE_AUTH_KEY', 'put your unique phrase here'); define('LOGGED_IN_KEY', 'put your unique phrase here'); define('NONCE_KEY', 'put your unique phrase here'); define('AUTH_SALT', 'put your unique phrase here'); define('SECURE_AUTH_SALT', 'put your unique phrase here'); define('LOGGED_IN_SALT', 'put your unique phrase here'); define('NONCE_SALT', 'put your unique phrase here'); /**#@-*/ ``` ### Creating the Docker Container ```dockerfile # Docker file for the centos7-wordpress-shared image FROM centos:7 MAINTAINER <EMAIL> # Install our Apache and PHP RUN yum -y install httpd elinks php php-mysql # Get & Unzip Wordpress files ADD latest.tar.gz /var/www # Remove the Apache default page RUN ["rm", "-rf", "/var/www/html"] # Replace it with Wordpress RUN ["cp", "-r", "/var/www/wordpress","/var/www/html"] # Replace the Wordpress default Congfig ADD wp-config.php /var/www/html # Remove the Wordpress Sample Config RUN ["rm", "/var/www/html/wp-config-sample.php"] # Expose the Web Port EXPOSE 80 # Run Apache when container starts ENTRYPOINT ["/usr/sbin/httpd", "-D", "FOREGROUND"] ``` We can test the Dockerfile by building the image locally: ```bash docker build -t centos7-wordpress-shared . ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_40.png) --- Now we can test the image with ```bash docker run -tid --name=test1-wordpress centos7-wordpress-shared ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_41.png) --- We can test that Apache and Wordpress are running and connected to our MySQL database by connecting to it: ```bash docker exec -ti test1-wordpress /bin/bash ps -ef cd /var/www/html ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_42.png) --- Apache was started and all the Wordpress files have been copied over - you can also check if the default config file was replaced with `cat wp-config.php`. And to see if Wordpress Web frontend is running, we need to check the Container IP `docker inspect test1-wordpress` and use __elinks__ to access it on port 80: ```bash elinks http://172.17.0.3 ``` You should see the following welcome screen if the MySQL connection was found: --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_43.png) --- We can now push the _centos7-wordpress-shared_ image into the Docker Hub: ```bash cd /root/docker/builds/centos7-wordpress-shared docker build -t mpolinowski/centos7-wordpress-shared . docker push mpolinowski/centos7-wordpress-shared ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_45.png) --- ### Kubernetes Service Configuration Let's now create a service for Kubernetes on the __Master Server__: ```bash nano service-centos7-wordpress-shared.yaml /root/docker/services ``` ```yaml apiVersion: v1 kind: Service metadata: name: service-centos-wordpress spec: ports: - nodePort: 32400 port: 8000 protocol: TCP targetPort: 80 selector: app: centos7-wordpress type: NodePort ``` ```bash kubectl create -f ./service-centos7-wordpress-shared.yaml kubectl get services ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_44.png) --- ```bash nano centos7-wordpress-shared.yaml /root/docker/services ``` ```yaml apiVersion: v1 kind: ReplicationController metadata: name: centos7-wordpress-shared spec: replicas: 2 selector: app: centos7-wordpress template: metadata: name: centos7-wordpress labels: app: centos7-wordpress spec: containers: - name: centos7-wordpress-shared1 image: mpolinowski/centos7-wordpress-shared ports: - containerPort: 80 ``` ```bash kubectl create -f centos7-wordpress-shared.yaml ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_46.png) --- We can verify that the services for _centos7-wordpress-shared_ are running by typing on the __Master Server__: ```bash kubectl describe replicationcontroller kubectl get pods ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_47.png) --- The corresponding containers should be listed on our __Minion Server__: ```bash docker ps ``` --- ![Red Hat Certified Specialist in Containerized Application Development](./Containerized_Application_Development_48.png) --- Instead of running the MySQL Container on the Minion Server we now want to do make our master into the database server for our cluster. Run the following command - note that this time we need to expose the port __3306__ to have access to it over the regular network: ```bash docker run --detach --name=mysql-wordpress --env="MYSQL_ROOT_PASSWORD=<PASSWORD>" -p 3306:3306 mysql --default-authentication-plugin=mysql_native_password ``` We want to allow access to our database from another server, so we also need to configure FirewallD: ```bash firewall-cmd --zone=public --add-service=mysql --permanent firewall-cmd --reload firewall-cmd --list-services ``` To connect to your database, you first have tp find out the IP address of the mysql container with `docker inspect mysql-wordpress`. Then use the mysql client (`yum install -y mysql`) to connect (the password is the root password we set when we ran the container): ```sql mysql -h 172.17.0.2 -p create database wordpress; show databases; ``` We now need to create a user for the Wordpress application and exit the container: ```sql CREATE USER 'wordpress'@'%' IDENTIFIED BY '<PASSWORD>'; GRANT ALL PRIVILEGES ON wordpress.* TO 'wordpress'@'%'; ``` Exit the MySQL container and login again using the _wordpress_ user with the password _<PASSWORD>_: ```bash mysql -h 127.0.0.1 -u wordpress -p ``` To test if the Database can be accessed from our node server, switch to the __Minion__ and try: ```bash mysql -h in-centos-master -u wordpress -p show databases; ``` You should be able to see the `wordpress` database.<file_sep>import sys import time # Importing our modules from ip_file_valid import ip_file_valid from ip_addr_valid import ip_addr_valid from ip_reach import ip_reach from ssh_connect import ssh_connection from create_threads import create_threads # Saving the list of IP addresses to a variable ip_list = ip_file_valid() # Verifying the validity of each IP address in the list try: ip_addr_valid(ip_list) except KeyboardInterrupt: print("\n\n* Program aborted by user. Exiting...\n") sys.exit() # Verifying the reachability of each IP address in the list try: ip_reach(ip_list) except KeyboardInterrupt: print("\n\n* Program aborted by user. Exiting...\n") sys.exit() # Calling threads creation function for one or multiple SSH connections while True: # while loop will keep running querying CPU utilization every 10 sec create_threads(ip_list, ssh_connection) time.sleep(10) # End of program<file_sep>--- date: "2020-06-07" title: "Developing WikiJS in Docker" categories: - Databases - Docker --- ![Central, Hong Kong](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Preparations](#preparations) - [Dockerfile](#dockerfile) - [config.yml](#configyml) - [docker-compose.yml](#docker-composeyml) - [Spinning up Docker](#spinning-up-docker) - [Running the App](#running-the-app) - [Stopping the App](#stopping-the-app) - [Building production assets](#building-production-assets) <!-- /TOC --> ## Preparations Based on the official [WikiJS Docker Sample](https://github.com/Requarks/wiki/tree/dev/dev/containers). Start by cloning the [WikiJS Repository](https://codeload.github.com/Requarks/wiki/zip/dev) into a folder called `wiki-source`. Continue by creating the following 3 files next to this folder: ### Dockerfile ```dockerfile FROM node:12 LABEL maintainer "<EMAIL>" RUN apt-get update && \ apt-get install -y bash curl git python make g++ nano openssh-server gnupg && \ mkdir -p /wiki WORKDIR /wiki ENV dockerdev 1 ENV DEVDB postgres EXPOSE 3000 CMD ["tail", "-f", "/dev/null"] ``` --- ### config.yml ```yml port: 3000 bindIP: 0.0.0.0 db: type: postgres host: db port: 5432 user: wikijs pass: <PASSWORD> db: wiki logLevel: info ``` --- ### docker-compose.yml ```yml version: "3" services: db: container_name: wiki-db image: postgres:9-alpine environment: POSTGRES_DB: wiki POSTGRES_PASSWORD: wikijsrocks POSTGRES_USER: wikijs logging: driver: "none" volumes: - db-data:/var/lib/postgresql/data ports: - "15432:5432" adminer: container_name: wiki-adminer image: adminer:latest logging: driver: "none" ports: - "3001:8080" wiki: container_name: wiki-app build: context: ./wiki-source dockerfile: ../Dockerfile depends_on: - db ports: - "3000:3000" volumes: - ./wiki-source:/wiki - /wiki/node_modules - /wiki/.git volumes: db-data: ``` --- ## Spinning up Docker Run the build command to pull / build your images: ```bash sudo docker-compose build ``` ### Running the App Run the following commands: ```bash docker-compose -f docker-compose.yml up -d docker exec wiki-app npm install # only necessary the first time docker exec wiki-app npm run dev ``` This will start Wiki.js in dev mode. Client assets are compiled first (using Webpack), then the server will start automatically. Wait for this process to complete before loading the app! See [how to build for production](#building-production-assets) below. ![Run Wiki.js in Docker](Wikijs_Docker_01.png) Browse to the site, using the configuration you defined in config.yml. For example, if using port 3000 on your local machine, you would browse to `http://localhost:3000/`. The first time you load the wiki, you'll get greeted with the setup wizard. Complete all the steps to finish the installation. ### Stopping the App Run the following to shut down the app ```bash docker-compose -f docker-compose.yml stop ``` To completely remove the containers run: ```bash docker-compose -f docker-compose.yml down ``` To wipe the database as well, use: ```bash docker-compose -f docker-compose.yml down --volumes ``` ## Building production assets Once you're ready to deploy your changes, you need to build the client assets into a production optimized bundle: ```bash docker exec wiki-app npm run build docker exec wiki-app npm run start ``` ## Running the official Wiki.js Build __wikijs-docker-compose.yaml__ ```yml version: "3" services: db: image: postgres:11-alpine environment: POSTGRES_DB: wiki POSTGRES_PASSWORD: wiki<PASSWORD> POSTGRES_USER: wikijs logging: driver: "none" restart: unless-stopped volumes: - db-data:/var/lib/postgresql/data wiki: image: requarks/wiki:2 depends_on: - db environment: DB_TYPE: postgres DB_HOST: db DB_PORT: 5432 DB_USER: wikijs DB_PASS: <PASSWORD> DB_NAME: wiki restart: unless-stopped ports: - "80:3000" volumes: db-data: ``` Build with: ```bash sudo docker-compose -f wikijs-docker-compose.yaml up -d ```<file_sep>const btn = document.querySelector("button") const output = document.querySelector("#output") const intake = document.querySelector("input") const url = "https://randomuser.me/api/" btn.addEventListener("click", getInput) function getInput() { const xhr = new XMLHttpRequest() let tempVal = intake.value let tempURL = url + "?results=" +tempVal xhr.onload = function () { if(xhr.readyState === 4 && xhr.status == "200") { let data = JSON.parse(xhr.responseText).results outputHTML(data) } else { console.log("error") } } xhr.open("GET", tempURL) xhr.send() } function outputHTML(data) { console.log(data) for(let i=0; i<data.length; i++) { output.innerHTML += "<br>" + data[i].email + "<br>" } }<file_sep>--- date: "2020-05-02" title: "Neo4j Movie Graph" categories: - Databases --- ![Shenzhen, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Cypher Queries](#cypher-queries) - [Nodes, Properties and Relationships](#nodes-properties-and-relationships) - [Filter](#filter) - [Comparison](#comparison) - [Transformation](#transformation) - [Aggregation](#aggregation) - [Other Functions](#other-functions) - [Mathematical Functions](#mathematical-functions) - [Movie Database Data](#movie-database-data) <!-- /TOC --> The software comes with a set of movie data that we can use to work with. Go to __Projects__ and open the preinstalled __Primer Project__. ![Neo4j Movie Graph](./Neo4j_Movie_Graphs_01.png) Start the movie database and connect to your Neo4j backend with your username and password: ![Neo4j Movie Graph](./Neo4j_Movie_Graphs_02.png) We can take a first look at our data by running the following query: ```bash MATCH (anything) RETURN (anything) ``` ![Neo4j Movie Graph](./Neo4j_Movie_Graphs_03.png) ## Cypher Queries ### Nodes, Properties and Relationships Only return the first five nodes with the property __Person__: ``` MATCH (anyone:Person) RETURN (anyone) LIMIT 5 ``` or property __Movie__: ```bash MATCH (any:Movie) RETURN (any) LIMIT 5 ``` Show the first 5 relations between nodes: ```bash MATCH (node1)--(node2) RETURN node1, node2, rel LIMIT 5 ``` Name the relationship so you can add it to the return and specify the direction of an relationship by adding an arrow: ```bash MATCH (node1)-[rel]->(node2) RETURN node1, node2, rel LIMIT 5 ``` Specify the kind of relationship you want to query for: ```bash MATCH (node1)-[rel:DIRECTED]->(node2) RETURN node1, node2, rel LIMIT 5 ``` We can further specify properties our nodes have to have to be returned by our query: ```bash MATCH (producer:Person)-[rel:PRODUCED]->(movie:Movie) RETURN producer, movie, rel LIMIT 5 ``` Using the __OR__ statement - return all nodes with property Person that either acted in or directed a movie: ```bash MATCH (actors:Person)-[rel:ACTED_IN | DIRECTED]->(movie:Movie) RETURN actors, movie, rel LIMIT 5 ``` Return a list of a movie titles inside our database: ```bash MATCH (movie:Movie) RETURN movie.title ``` Take the result of the query above and find the directors that directed those movies: ```bash MATCH (movie:Movie) MATCH (director:Person)-[:DIRECTED]->(movie) RETURN director.name ``` Further specify it by only matching directors that also acted in their own movie: ```bash MATCH (movie:Movie) MATCH (director:Person)-[:DIRECTED]->(movie) MATCH (director)-[:ACTED_IN]->(movie) RETURN movie.title, director.name ``` Using an optional match: ```bash MATCH (movie:Movie) OPTIONAL MATCH (director:Person)-[:DIRECTED]->(movie)<-[:ACTED_IN]-(director) RETURN movie, director ``` ```bash MATCH (p1:Person)-[:FOLLOWS]->(p2:Person)-[:REVIEWED]->(movie:Movie) RETURN p1.name, p2.name ``` ### Filter Search for a specific node - e.g. specific actor: ```bash MATCH (actor{name: '<NAME>'}) RETURN actor ``` We can speed those queries up by adding the label that the node will have: ```bash MATCH (movie:Movie{title: 'The Matrix'}) RETURN movie ``` In this case only nodes will be searched that have the correct label - in this case `Movie`. If there are more than 1 node that has `Matrix` in it's title - which will give us the complete trilogy: ```bash MATCH (movie:Movie) WHERE (movie.title CONTAINS 'Matrix') RETURN movie ``` You can specify another property to specify the movie that you are looking for, e.g. the release date: ```bash MATCH (movie:Movie{released:1999}) WHERE (movie.title CONTAINS 'Matrix') RETURN movie ``` Another way is to use the __AND__ operator to add another property to your match: ```bash MATCH (actor:Person) WHERE actor.name = '<NAME>' AND actor.born = 1961 RETURN actor ``` ### Comparison Using the __OR__ operator: ```bash MATCH (movie:Movie) WHERE movie.released = 1999 OR movie.released = 2000 RETURN movie ``` We can shorten this query: ```bash MATCH (movie:Movie) WHERE movie.released IN [1999, 2000] RETURN movie ``` Search within a time window: ```bash MATCH (actor:Person) WHERE actor.born >= 1967 AND actor.born < 1976 RETURN actor ``` Or exclude a time window by using __NOT__: ```bash MATCH (actor:Person) WHERE NOT (actor.born >= 1900 AND actor.born < 1980) RETURN actor ``` Find all persons that acted in but did not direct the movie with the title `Unforgiven`. You can check out the movie by running the following query: ```bash MATCH (movie:Movie) WHERE movie.title CONTAINS 'Unforgiven' RETURN movie ``` When you click on the movie node and expand it's relationships you can see that there are 4 actors listed. One of which, <NAME>, also directed the movie. We can also query this information directly: ```bash MATCH (person:Person)-->(movie:Movie) WHERE movie.title = 'Unforgiven' RETURN person ``` To exclude the director from our query we can use the __AND NOT__ operator: ```bash MATCH (person:Person)-->(movie:Movie) WHERE movie.title = 'Unforgiven' AND NOT (person)-[:DIRECTED]->(movie) RETURN person ``` Using __Regular Expressions__ to find all movies that start with a `The` in their titles: ```bash MATCH (movie:Movie) WHERE movie.title =~ 'The.*' RETURN movie.title ``` Or find all movies that have a title that contains a `The` anywhere: ```bash MATCH (movie:Movie) WHERE movie.title =~ '.*The.*' RETURN movie.title ``` This will get the movie `A League of Their Own` as a result - if you only want to match the exact word `The` you have to add a SPACE behind it: ```bash MATCH (movie:Movie) WHERE movie.title =~ '.*The .*' RETURN movie.title ``` To make the query case-insensitive use the following expression: ```bash MATCH (movie:Movie) WHERE movie.title =~ '(?i).*The .*' RETURN movie.title ``` To exclude titles that start with `The` we can use `.+` at the start to match any character- but there has to be at least one in front of the search word: ```bash MATCH (movie:Movie) WHERE movie.title =~ '(?i).+The .*' RETURN movie.title ``` This will only match titles that have the word __The__ or __the__ somewhere in the middle. ### Transformation Return the persons that acted in the movie `Top Gun` and order the results by the age of the actors: ```bash MATCH (actor:Person)-[:ACTED_IN]->(movie:Movie{title:'Top Gun'}) RETURN actor.name, actor.born ORDER BY actor.born DESC LIMIT 3 ``` For __Pagination__ we can use the __SKIP__ and __LIMIT__ operator: ```bash MATCH (actor:Person)-[:ACTED_IN]->(movie:Movie{title:'Top Gun'}) RETURN actor.name, actor.born ORDER BY actor.born DESC SKIP 3 LIMIT 3 ``` We can also make our results more readable by using the __AS__ operator: ```bash MATCH (actor:Person)-[:ACTED_IN]->(movie:Movie{title:'Top Gun'}) RETURN actor.name AS Name, actor.born AS Born ORDER BY actor.born DESC SKIP 3 LIMIT 3 ``` ### Aggregation |Function|Description| | --- | --- | |[avg() - Numeric values](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-avg)|Returns the average of a set of numeric values.| |[avg() - Durations](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-avg-duration)|Returns the average of a set of Durations.| |[collect()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-collect)|Returns a list containing the values returned by an expression.| |[count()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-count)|Returns the number of values or rows.| |[max()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-max)|Returns the maximum value in a set of values.| |[min()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-min)|Returns the minimum value in a set of values.| |[percentileCont()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-percentilecont)|Returns the percentile of a value over a group using linear interpolation.| |[percentileDisc()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-percentiledisc)|Returns the nearest value to the given percentile over a group using a rounding method.| |[stDev()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-stdev)|Returns the standard deviation for the given value over a group for a sample of a population.| |[stDevP()](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-stdevp)|Returns the standard deviation for the given value over a group for an entire population.| |[sum() - Numeric values](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-sum)|Returns the sum of a set of numeric values.| |[sum() - Durations](https://neo4j.com/docs/cypher-manual/current/functions/aggregating/#functions-sum-duration)|Returns the sum of a set of Durations.| __COUNT__: Count the results instead of displaying them: ```bash MATCH (actor:Person{name:'<NAME>'})-[:ACTED_IN]->(movie:Movie) RETURN COUNT(movie) AS MovieCount ``` __SUM__: If you want to add up all results: ```bash ... RETURN SUM(movie.cost) AS MovieCosts ``` __AVG__, __MIN__, __MAX__: Get the average, minimum and maximum cost of a movie in your results ```bash ... RETURN AVG(movie.cost) AS AVGMovieCosts, MIN(movie.cost) AS MinMovieCosts, MAX(movie.cost) AS MaxMovieCosts ``` ```bash MATCH (actor:Person)-[:ACTED_IN]->(movie:Movie) RETURN MIN(actor.born), MAX(actor.born), toInteger(AVG(actor.born)) ``` __DISTINCT__: When a query retrieves the same node multiple time, use the distinct operator to remove duplicates: ```bash ... RETURN DISTINCT movie.title AS MovieTitle ``` ### Other Functions __String Functions__: |Function|Description| | --- | --- | |[left()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-left)|Returns a string containing the specified number of leftmost characters of the original string.| |[lTrim()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-ltrim)|Returns the original string with leading whitespace removed.| |[replace()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-replace)|Returns a string in which all occurrences of a specified string in the original string have been replaced by another (specified) string.| |[reverse()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-reverse)|Returns a string in which the order of all characters in the original string have been reversed.| |[right()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-right)|Returns a string containing the specified number of rightmost characters of the original string.| |[rTrim()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-rtrim)|Returns the original string with trailing whitespace removed.| |[split()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-split)|Returns a list of strings resulting from the splitting of the original string around matches of the given delimiter.| |[substring()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-substring)|Returns a substring of the original string, beginning with a 0-based index start and length.| |[toLower()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-tolower)|Returns the original string in lowercase.| |[toString()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-tostring)|Converts an integer, float, boolean or temporal type (i.e. Date, Time, LocalTime, DateTime, LocalDateTime or Duration) value to a string.| |[toUpper()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-toupper)|Returns the original string in uppercase.| |[trim()](https://neo4j.com/docs/cypher-manual/current/functions/string/#functions-trim)|Returns the original string with leading and trailing whitespace removed.| __toString()__ Turn returned values into strings: ```bash RETURN toString(11.5), toString('already a string'), toString(TRUE ), toString(date({ year:1984, month:10, day:11 })) AS dateString, toString(datetime({ year:1984, month:10, day:11, hour:12, minute:31, second:14, millisecond: 341, timezone: 'Europe/Stockholm' })) AS datetimeString, toString(duration({ minutes: 12, seconds: -60 })) AS durationString ``` ![Neo4j Cypher Queries and Functions](./Neo4j_Movie_Graphs_04.png) __trim()__ Strip SPACES from results: ```bash RETURN trim(" hello ") ``` __replace()__ Replace characters inside a string `replace(original, search, replace)`: ```bash RETURN replace('hello', 'l', 'x') ``` __Temporal Functions__: |Function|Description| | --- | --- | |[date()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-current)|Returns the current  *Date* .| |[date.transaction()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-current-transaction)|Returns the current  *Date*  using the  `transaction`  clock.| |[date.statement()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-current-statement)|Returns the current  *Date*  using the  `statement`  clock.| |[date.realtime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-current-realtime)|Returns the current  *Date*  using the  `realtime`  clock.| |[date({year [, month, day]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-calendar)|Returns a calendar (Year-Month-Day)  *Date* .| |[date({year [, week, dayOfWeek]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-week)|Returns a week (Year-Week-Day)  *Date* .| |[date({year [, quarter, dayOfQuarter]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-quarter)|Returns a quarter (Year-Quarter-Day)  *Date* .| |[date({year [, ordinalDay]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-ordinal)|Returns an ordinal (Year-Day)  *Date* .| |[date(string)](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-create-string)|Returns a  *Date*  by parsing a string.| |[date({map})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-temporal)|Returns a  *Date*  from a map of another temporal value’s components.| |[date.truncate()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/date/#functions-date-truncate)|Returns a  *Date*  obtained by truncating a value at a specific component boundary. [Truncation summary](https://neo4j.com/docs/cypher-manual/current/functions/temporal/instant/#functions-temporal-truncate-overview).| |[datetime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-current)|Returns the current  *DateTime* .| |[datetime.transaction()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-current-transaction)|Returns the current  *DateTime*  using the  `transaction`  clock.| |[datetime.statement()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-current-statement)|Returns the current  *DateTime*  using the  `statement`  clock.| |[datetime.realtime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-current-realtime)|Returns the current  *DateTime*  using the  `realtime`  clock.| |[datetime({year [, month, day, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-calendar)|Returns a calendar (Year-Month-Day)  *DateTime* .| |[datetime({year [, week, dayOfWeek, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-week)|Returns a week (Year-Week-Day)  *DateTime* .| |[datetime({year [, quarter, dayOfQuarter, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-quarter)|Returns a quarter (Year-Quarter-Day)  *DateTime* .| |[datetime({year [, ordinalDay, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-ordinal)|Returns an ordinal (Year-Day)  *DateTime* .| |[datetime(string)](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-create-string)|Returns a  *DateTime*  by parsing a string.| |[datetime({map})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-temporal)|Returns a  *DateTime*  from a map of another temporal value’s components.| |[datetime({epochSeconds})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-timestamp)|Returns a  *DateTime*  from a timestamp.| |[datetime.truncate()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/datetime/#functions-datetime-truncate)|Returns a  *DateTime*  obtained by truncating a value at a specific component boundary. [Truncation summary](https://neo4j.com/docs/cypher-manual/current/functions/temporal/instant/#functions-temporal-truncate-overview).| |[localdatetime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-current)|Returns the current  *LocalDateTime* .| |[localdatetime.transaction()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-current-transaction)|Returns the current  *LocalDateTime*  using the  `transaction`  clock.| |[localdatetime.statement()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-current-statement)|Returns the current  *LocalDateTime*  using the  `statement`  clock.| |[localdatetime.realtime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-current-realtime)|Returns the current  *LocalDateTime*  using the  `realtime`  clock.| |[localdatetime({year [, month, day, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-calendar)|Returns a calendar (Year-Month-Day)  *LocalDateTime* .| |[localdatetime({year [, week, dayOfWeek, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-week)|Returns a week (Year-Week-Day)  *LocalDateTime* .| |[localdatetime({year [, quarter, dayOfQuarter, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-quarter)|Returns a quarter (Year-Quarter-Day)  *DateTime* .| |[localdatetime({year [, ordinalDay, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-ordinal)|Returns an ordinal (Year-Day)  *LocalDateTime* .| |[localdatetime(string)](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-create-string)|Returns a  *LocalDateTime*  by parsing a string.| |[localdatetime({map})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-temporal)|Returns a  *LocalDateTime*  from a map of another temporal value’s components.| |[localdatetime.truncate()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localdatetime/#functions-localdatetime-truncate)|Returns a  *LocalDateTime*  obtained by truncating a value at a specific component boundary. [Truncation summary](https://neo4j.com/docs/cypher-manual/current/functions/temporal/instant/#functions-temporal-truncate-overview).| |[localtime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-current)|Returns the current  *LocalTime* .| |[localtime.transaction()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-current-transaction)|Returns the current  *LocalTime*  using the  `transaction`  clock.| |[localtime.statement()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-current-statement)|Returns the current  *LocalTime*  using the  `statement`  clock.| |[localtime.realtime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-current-realtime)|Returns the current  *LocalTime*  using the  `realtime`  clock.| |[localtime({hour [, minute, second, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-create)|Returns a  *LocalTime*  with the specified component values.| |[localtime(string)](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-create-string)|Returns a  *LocalTime*  by parsing a string.| |[localtime({time [, hour, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-temporal)|Returns a  *LocalTime*  from a map of another temporal value’s components.| |[localtime.truncate()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/localtime/#functions-localtime-truncate)|Returns a  *LocalTime*  obtained by truncating a value at a specific component boundary. [Truncation summary](https://neo4j.com/docs/cypher-manual/current/functions/temporal/instant/#functions-temporal-truncate-overview).| |[time()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-current)|Returns the current  *Time* .| |[time.transaction()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-current-transaction)|Returns the current  *Time*  using the  `transaction`  clock.| |[time.statement()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-current-statement)|Returns the current  *Time*  using the  `statement`  clock.| |[time.realtime()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-current-realtime)|Returns the current  *Time*  using the  `realtime`  clock.| |[time({hour [, minute, …​]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-create)|Returns a  *Time*  with the specified component values.| |[time(string)](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-create-string)|Returns a  *Time*  by parsing a string.| |[time({time [, hour, …​, timezone]})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-temporal)|Returns a  *Time*  from a map of another temporal value’s components.| |[time.truncate()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/time/#functions-time-truncate)|Returns a  *Time*  obtained by truncating a value at a specific component boundary. [Truncation summary](https://neo4j.com/docs/cypher-manual/current/functions/temporal/instant/#functions-temporal-truncate-overview).| __Temporal Functions (Duration)__ |Function|Description| | --- | --- | |[duration({map})](https://neo4j.com/docs/cypher-manual/current/functions/temporal/duration/#functions-duration-create-components)|Returns a  *Duration*  from a map of its components.| |[duration(string)](https://neo4j.com/docs/cypher-manual/current/functions/temporal/duration/#functions-duration-create-string)|Returns a  *Duration*  by parsing a string.| |[duration.between()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/duration/#functions-duration-between)|Returns a  *Duration*  equal to the difference between two given instants.| |[duration.inMonths()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/duration/#functions-duration-inmonths)|Returns a  *Duration*  equal to the difference in whole months, quarters or years between two given instants.| |[duration.inDays()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/duration/#functions-duration-indays)|Returns a  *Duration*  equal to the difference in whole days or weeks between two given instants.| |[duration.inSeconds()](https://neo4j.com/docs/cypher-manual/current/functions/temporal/duration/#functions-duration-inseconds)|Returns a  *Duration*  equal to the difference in seconds and fractions of seconds, or minutes or hours, between two given instants.| __Spacial Functions__ |Function|Description| | --- | --- | |[distance()](https://neo4j.com/docs/cypher-manual/current/functions/spatial/#functions-distance)|Returns a floating point number representing the geodesic distance between any two points in the same CRS.| |[point() - Cartesian 2D](https://neo4j.com/docs/cypher-manual/current/functions/spatial/#functions-point-cartesian-2d)|Returns a 2D point object, given two coordinate values in the Cartesian coordinate system.| |[point() - Cartesian 3D](https://neo4j.com/docs/cypher-manual/current/functions/spatial/#functions-point-cartesian-3d)|Returns a 3D point object, given three coordinate values in the Cartesian coordinate system.| |[point() - WGS 84 2D](https://neo4j.com/docs/cypher-manual/current/functions/spatial/#functions-point-wgs84-2d)|Returns a 2D point object, given two coordinate values in the WGS 84 geographic coordinate system.| |[point() - WGS 84 3D](https://neo4j.com/docs/cypher-manual/current/functions/spatial/#functions-point-wgs84-3d)|Returns a 3D point object, given three coordinate values in the WGS 84 geographic coordinate system.| __Scalar Functions__ |Function|Description| | --- | --- | |[coalesce()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-coalesce)|Returns the first non- `null`  value in a list of expressions.| |[endNode()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-endnode)|Returns the end node of a relationship.| |[head()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-head)|Returns the first element in a list.| |[id()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-id)|Returns the id of a relationship or node.| |[last()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-last)|Returns the last element in a list.| |[length()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-length)|Returns the length of a path.| |[properties()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-properties)|Returns a map containing all the properties of a node or relationship.| |[randomUUID()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-randomuuid)|Returns a string value corresponding to a randomly-generated UUID.| |[size()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-size)|Returns the number of items in a list.| |[size() applied to pattern expression](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-size-of-pattern-expression)|Returns the number of sub-graphs matching the pattern expression.| |[size() applied to string](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-size-of-string)|Returns the number of Unicode characters in a string.| |[startNode()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-startnode)|Returns the start node of a relationship.| |[timestamp()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-timestamp)|Returns the difference, measured in milliseconds, between the current time and midnight, January 1, 1970 UTC.| |[toBoolean()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-toboolean)|Converts a string value to a boolean value.| |[toFloat()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-tofloat)|Converts an integer or string value to a floating point number.| |[toInteger()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-tointeger)|Converts a floating point or string value to an integer value.| |[type()](https://neo4j.com/docs/cypher-manual/current/functions/scalar/#functions-type)|Returns the string representation of the relationship type.| ### Mathematical Functions __Numeric__ |Function|Description| | --- | --- | |[abs()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-numeric/#functions-abs)|Returns the absolute value of a number.| |[ceil()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-numeric/#functions-ceil)|Returns the smallest floating point number that is greater than or equal to a number and equal to a mathematical integer.| |[floor()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-numeric/#functions-floor)|Returns the largest floating point number that is less than or equal to a number and equal to a mathematical integer.| |[rand()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-numeric/#functions-rand)|Returns a random floating point number in the range from 0 (inclusive) to 1 (exclusive); i.e.  `[0,1)` .| |[round()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-numeric/#functions-round)|Returns the value of a number rounded to the nearest integer.| |[sign()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-numeric/#functions-sign)|Returns the signum of a number:  `0`  if the number is  `0` ,  `-1`  for any negative number, and  `1`  for any positive number.| __Logarithmic__ |Function|Description| | --- | --- | |[e()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-logarithmic/#functions-e)|Returns the base of the natural logarithm,  `e` .| |[exp()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-logarithmic/#functions-exp)|Returns  `e^n` , where  `e`  is the base of the natural logarithm, and  `n`  is the value of the argument expression.| |[log()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-logarithmic/#functions-log)|Returns the natural logarithm of a number.| |[log10()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-logarithmic/#functions-log10)|Returns the common logarithm (base 10) of a number.| |[sqrt()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-logarithmic/#functions-sqrt)|Returns the square root of a number.| __Trigonometric__ |Function|Description| | --- | --- | |[acos()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-acos)|Returns the arccosine of a number in radians.| |[asin()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-asin)|Returns the arcsine of a number in radians.| |[atan()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-atan)|Returns the arctangent of a number in radians.| |[atan2()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-atan2)|Returns the arctangent2 of a set of coordinates in radians.| |[cos()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-cos)|Returns the cosine of a number.| |[cot()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-cot)|Returns the cotangent of a number.| |[degrees()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-degrees)|Converts radians to degrees.| |[haversin()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-haversin)|Returns half the versine of a number.| |[pi()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-pi)|Returns the mathematical constant  *pi* .| |[radians()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-radians)|Converts degrees to radians.| |[sin()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-sin)|Returns the sine of a number.| |[tan()](https://neo4j.com/docs/cypher-manual/current/functions/mathematical-trigonometric/#functions-tan)|Returns the tangent of a number.| ## Movie Database Data The data that is written to our database is the following (see [Online Documentation](https://neo4j.com/developer/example-data) for details): ```bash CREATE (TheMatrix:Movie {title:'The Matrix', released:1999, tagline:'Welcome to the Real World'}) CREATE (Keanu:Person {name:'<NAME>', born:1964}) CREATE (Carrie:Person {name:'<NAME>', born:1967}) CREATE (Laurence:Person {name:'<NAME>', born:1961}) CREATE (Hugo:Person {name:'<NAME>', born:1960}) CREATE (LillyW:Person {name:'<NAME>', born:1967}) CREATE (LanaW:Person {name:'<NAME>', born:1965}) CREATE (JoelS:Person {name:'<NAME>', born:1952}) CREATE (Keanu)-[:ACTED_IN {roles:['Neo']}]->(TheMatrix), (Carrie)-[:ACTED_IN {roles:['Trinity']}]->(TheMatrix), (Laurence)-[:ACTED_IN {roles:['Morpheus']}]->(TheMatrix), (Hugo)-[:ACTED_IN {roles:['Agent Smith']}]->(TheMatrix), (LillyW)-[:DIRECTED]->(TheMatrix), (LanaW)-[:DIRECTED]->(TheMatrix), (JoelS)-[:PRODUCED]->(TheMatrix) CREATE (Emil:Person {name:"<NAME>", born:1978}) CREATE (Emil)-[:ACTED_IN {roles:["Emil"]}]->(TheMatrix) CREATE (TheMatrixReloaded:Movie {title:'The Matrix Reloaded', released:2003, tagline:'Free your mind'}) CREATE (Keanu)-[:ACTED_IN {roles:['Neo']}]->(TheMatrixReloaded), (Carrie)-[:ACTED_IN {roles:['Trinity']}]->(TheMatrixReloaded), (Laurence)-[:ACTED_IN {roles:['Morpheus']}]->(TheMatrixReloaded), (Hugo)-[:ACTED_IN {roles:['Agent Smith']}]->(TheMatrixReloaded), (LillyW)-[:DIRECTED]->(TheMatrixReloaded), (LanaW)-[:DIRECTED]->(TheMatrixReloaded), (JoelS)-[:PRODUCED]->(TheMatrixReloaded) CREATE (TheMatrixRevolutions:Movie {title:'The Matrix Revolutions', released:2003, tagline:'Everything that has a beginning has an end'}) CREATE (Keanu)-[:ACTED_IN {roles:['Neo']}]->(TheMatrixRevolutions), (Carrie)-[:ACTED_IN {roles:['Trinity']}]->(TheMatrixRevolutions), (Laurence)-[:ACTED_IN {roles:['Morpheus']}]->(TheMatrixRevolutions), (Hugo)-[:ACTED_IN {roles:['Agent Smith']}]->(TheMatrixRevolutions), (LillyW)-[:DIRECTED]->(TheMatrixRevolutions), (LanaW)-[:DIRECTED]->(TheMatrixRevolutions), (JoelS)-[:PRODUCED]->(TheMatrixRevolutions) CREATE (TheDevilsAdvocate:Movie {title:"The Devil's Advocate", released:1997, tagline:'Evil has its winning ways'}) CREATE (Charlize:Person {name:'<NAME>', born:1975}) CREATE (Al:Person {name:'<NAME>', born:1940}) CREATE (Taylor:Person {name:'<NAME>', born:1944}) CREATE (Keanu)-[:ACTED_IN {roles:['<NAME>']}]->(TheDevilsAdvocate), (Charlize)-[:ACTED_IN {roles:['<NAME>']}]->(TheDevilsAdvocate), (Al)-[:ACTED_IN {roles:['<NAME>']}]->(TheDevilsAdvocate), (Taylor)-[:DIRECTED]->(TheDevilsAdvocate) CREATE (AFewGoodMen:Movie {title:"A Few Good Men", released:1992, tagline:"In the heart of the nation's capital, in a courthouse of the U.S. government, one man will stop at nothing to keep his honor, and one will stop at nothing to find the truth."}) CREATE (TomC:Person {name:'<NAME>', born:1962}) CREATE (JackN:Person {name:'<NAME>', born:1937}) CREATE (DemiM:Person {name:'<NAME>', born:1962}) CREATE (KevinB:Person {name:'<NAME>', born:1958}) CREATE (KieferS:Person {name:'<NAME>', born:1966}) CREATE (NoahW:Person {name:'<NAME>', born:1971}) CREATE (CubaG:Person {name:'<NAME>r.', born:1968}) CREATE (KevinP:Person {name:'<NAME>', born:1957}) CREATE (JTW:Person {name:'<NAME>', born:1943}) CREATE (JamesM:Person {name:'<NAME>', born:1967}) CREATE (ChristopherG:Person {name:'<NAME>', born:1948}) CREATE (RobR:Person {name:'<NAME>', born:1947}) CREATE (AaronS:Person {name:'<NAME>', born:1961}) CREATE (TomC)-[:ACTED_IN {roles:['Lt. <NAME>']}]->(AFewGoodMen), (JackN)-[:ACTED_IN {roles:['Col. <NAME>']}]->(AFewGoodMen), (DemiM)-[:ACTED_IN {roles:['Lt. Cdr. <NAME>']}]->(AFewGoodMen), (KevinB)-[:ACTED_IN {roles:['Capt. <NAME>']}]->(AFewGoodMen), (KieferS)-[:ACTED_IN {roles:['Lt. <NAME>']}]->(AFewGoodMen), (NoahW)-[:ACTED_IN {roles:['Cpl. <NAME>']}]->(AFewGoodMen), (CubaG)-[:ACTED_IN {roles:['Cpl. <NAME>']}]->(AFewGoodMen), (KevinP)-[:ACTED_IN {roles:['Lt. <NAME>']}]->(AFewGoodMen), (JTW)-[:ACTED_IN {roles:['Lt. Col. <NAME>']}]->(AFewGoodMen), (JamesM)-[:ACTED_IN {roles:['Pfc. Lou<NAME>']}]->(AFewGoodMen), (ChristopherG)-[:ACTED_IN {roles:['Dr. Stone']}]->(AFewGoodMen), (AaronS)-[:ACTED_IN {roles:['Man in Bar']}]->(AFewGoodMen), (RobR)-[:DIRECTED]->(AFewGoodMen), (AaronS)-[:WROTE]->(AFewGoodMen) CREATE (TopGun:Movie {title:"Top Gun", released:1986, tagline:'I feel the need, the need for speed.'}) CREATE (KellyM:Person {name:'<NAME>', born:1957}) CREATE (ValK:Person {name:'<NAME>', born:1959}) CREATE (AnthonyE:Person {name:'<NAME>', born:1962}) CREATE (TomS:Person {name:'<NAME>', born:1933}) CREATE (MegR:Person {name:'<NAME>', born:1961}) CREATE (TonyS:Person {name:'<NAME>', born:1944}) CREATE (JimC:Person {name:'<NAME>', born:1941}) CREATE (TomC)-[:ACTED_IN {roles:['Maverick']}]->(TopGun), (KellyM)-[:ACTED_IN {roles:['Charlie']}]->(TopGun), (ValK)-[:ACTED_IN {roles:['Iceman']}]->(TopGun), (AnthonyE)-[:ACTED_IN {roles:['Goose']}]->(TopGun), (TomS)-[:ACTED_IN {roles:['Viper']}]->(TopGun), (MegR)-[:ACTED_IN {roles:['Carole']}]->(TopGun), (TonyS)-[:DIRECTED]->(TopGun), (JimC)-[:WROTE]->(TopGun) CREATE (JerryMaguire:Movie {title:'Jerry Maguire', released:2000, tagline:'The rest of his life begins now.'}) CREATE (ReneeZ:Person {name:'<NAME>', born:1969}) CREATE (KellyP:Person {name:'<NAME>', born:1962}) CREATE (JerryO:Person {name:"<NAME>", born:1974}) CREATE (JayM:Person {name:'<NAME>', born:1970}) CREATE (BonnieH:Person {name:'<NAME>', born:1961}) CREATE (ReginaK:Person {name:'<NAME>', born:1971}) CREATE (JonathanL:Person {name:'<NAME>', born:1996}) CREATE (CameronC:Person {name:'<NAME>', born:1957}) CREATE (TomC)-[:ACTED_IN {roles:['J<NAME>ire']}]->(JerryMaguire), (CubaG)-[:ACTED_IN {roles:['Rod Tidwell']}]->(JerryMaguire), (ReneeZ)-[:ACTED_IN {roles:['Dorothy Boyd']}]->(JerryMaguire), (KellyP)-[:ACTED_IN {roles:['Avery Bishop']}]->(JerryMaguire), (JerryO)-[:ACTED_IN {roles:['<NAME>']}]->(JerryMaguire), (JayM)-[:ACTED_IN {roles:['<NAME>']}]->(JerryMaguire), (BonnieH)-[:ACTED_IN {roles:['Laurel Boyd']}]->(JerryMaguire), (ReginaK)-[:ACTED_IN {roles:['<NAME>']}]->(JerryMaguire), (JonathanL)-[:ACTED_IN {roles:['<NAME>']}]->(JerryMaguire), (CameronC)-[:DIRECTED]->(JerryMaguire), (CameronC)-[:PRODUCED]->(JerryMaguire), (CameronC)-[:WROTE]->(JerryMaguire) CREATE (StandByMe:Movie {title:"Stand By Me", released:1986, tagline:"For some, it's the last real taste of innocence, and the first real taste of life. But for everyone, it's the time that memories are made of."}) CREATE (RiverP:Person {name:'<NAME>', born:1970}) CREATE (CoreyF:Person {name:'<NAME>', born:1971}) CREATE (WilW:Person {name:'<NAME>', born:1972}) CREATE (JohnC:Person {name:'<NAME>', born:1966}) CREATE (MarshallB:Person {name:'<NAME>', born:1942}) CREATE (WilW)-[:ACTED_IN {roles:['Gordie Lachance']}]->(StandByMe), (RiverP)-[:ACTED_IN {roles:['Chris Chambers']}]->(StandByMe), (JerryO)-[:ACTED_IN {roles:['Vern Tessio']}]->(StandByMe), (CoreyF)-[:ACTED_IN {roles:['Teddy Duchamp']}]->(StandByMe), (JohnC)-[:ACTED_IN {roles:['Denny Lachance']}]->(StandByMe), (KieferS)-[:ACTED_IN {roles:['Ace Merrill']}]->(StandByMe), (MarshallB)-[:ACTED_IN {roles:['Mr. Lachance']}]->(StandByMe), (RobR)-[:DIRECTED]->(StandByMe) CREATE (AsGoodAsItGets:Movie {title:'As Good as It Gets', released:1997, tagline:'A comedy from the heart that goes for the throat.'}) CREATE (HelenH:Person {name:'<NAME>', born:1963}) CREATE (GregK:Person {name:'<NAME>', born:1963}) CREATE (JamesB:Person {name:'<NAME>', born:1940}) CREATE (JackN)-[:ACTED_IN {roles:['Melvin Udall']}]->(AsGoodAsItGets), (HelenH)-[:ACTED_IN {roles:['<NAME>']}]->(AsGoodAsItGets), (GregK)-[:ACTED_IN {roles:['Simon Bishop']}]->(AsGoodAsItGets), (CubaG)-[:ACTED_IN {roles:['Frank Sachs']}]->(AsGoodAsItGets), (JamesB)-[:DIRECTED]->(AsGoodAsItGets) CREATE (WhatDreamsMayCome:Movie {title:'What Dreams May Come', released:1998, tagline:'After life there is more. The end is just the beginning.'}) CREATE (AnnabellaS:Person {name:'<NAME>', born:1960}) CREATE (MaxS:Person {name:'<NAME>', born:1929}) CREATE (WernerH:Person {name:'<NAME>', born:1942}) CREATE (Robin:Person {name:'<NAME>', born:1951}) CREATE (VincentW:Person {name:'<NAME>', born:1956}) CREATE (Robin)-[:ACTED_IN {roles:['<NAME>']}]->(WhatDreamsMayCome), (CubaG)-[:ACTED_IN {roles:['<NAME>']}]->(WhatDreamsMayCome), (AnnabellaS)-[:ACTED_IN {roles:['<NAME>']}]->(WhatDreamsMayCome), (MaxS)-[:ACTED_IN {roles:['The Tracker']}]->(WhatDreamsMayCome), (WernerH)-[:ACTED_IN {roles:['The Face']}]->(WhatDreamsMayCome), (VincentW)-[:DIRECTED]->(WhatDreamsMayCome) CREATE (SnowFallingonCedars:Movie {title:'Snow Falling on Cedars', released:1999, tagline:'First loves last. Forever.'}) CREATE (EthanH:Person {name:'<NAME>', born:1970}) CREATE (RickY:Person {name:'<NAME>', born:1971}) CREATE (JamesC:Person {name:'<NAME>', born:1940}) CREATE (ScottH:Person {name:'<NAME>', born:1953}) CREATE (EthanH)-[:ACTED_IN {roles:['Ishmael Chambers']}]->(SnowFallingonCedars), (RickY)-[:ACTED_IN {roles:['<NAME>']}]->(SnowFallingonCedars), (MaxS)-[:ACTED_IN {roles:['<NAME>']}]->(SnowFallingonCedars), (JamesC)-[:ACTED_IN {roles:['<NAME>']}]->(SnowFallingonCedars), (ScottH)-[:DIRECTED]->(SnowFallingonCedars) CREATE (YouveGotMail:Movie {title:"You've Got Mail", released:1998, tagline:'At odds in life... in love on-line.'}) CREATE (ParkerP:Person {name:'<NAME>', born:1968}) CREATE (DaveC:Person {name:'<NAME>', born:1973}) CREATE (SteveZ:Person {name:'<NAME>', born:1967}) CREATE (TomH:Person {name:'<NAME>', born:1956}) CREATE (NoraE:Person {name:'<NAME>', born:1941}) CREATE (TomH)-[:ACTED_IN {roles:['<NAME>']}]->(YouveGotMail), (MegR)-[:ACTED_IN {roles:['<NAME>']}]->(YouveGotMail), (GregK)-[:ACTED_IN {roles:['<NAME>']}]->(YouveGotMail), (ParkerP)-[:ACTED_IN {roles:['<NAME>']}]->(YouveGotMail), (DaveC)-[:ACTED_IN {roles:['<NAME>']}]->(YouveGotMail), (SteveZ)-[:ACTED_IN {roles:['<NAME>']}]->(YouveGotMail), (NoraE)-[:DIRECTED]->(YouveGotMail) CREATE (SleeplessInSeattle:Movie {title:'Sleepless in Seattle', released:1993, tagline:'What if someone you never met, someone you never saw, someone you never knew was the only someone for you?'}) CREATE (RitaW:Person {name:'<NAME>', born:1956}) CREATE (BillPull:Person {name:'<NAME>', born:1953}) CREATE (VictorG:Person {name:'<NAME>', born:1949}) CREATE (RosieO:Person {name:"<NAME>", born:1962}) CREATE (TomH)-[:ACTED_IN {roles:['Sam Baldwin']}]->(SleeplessInSeattle), (MegR)-[:ACTED_IN {roles:['Annie Reed']}]->(SleeplessInSeattle), (RitaW)-[:ACTED_IN {roles:['Suzy']}]->(SleeplessInSeattle), (BillPull)-[:ACTED_IN {roles:['Walter']}]->(SleeplessInSeattle), (VictorG)-[:ACTED_IN {roles:['Greg']}]->(SleeplessInSeattle), (RosieO)-[:ACTED_IN {roles:['Becky']}]->(SleeplessInSeattle), (NoraE)-[:DIRECTED]->(SleeplessInSeattle) CREATE (JoeVersustheVolcano:Movie {title:'Joe Versus the Volcano', released:1990, tagline:'A story of love, lava and burning desire.'}) CREATE (JohnS:Person {name:'<NAME>', born:1950}) CREATE (Nathan:Person {name:'<NAME>', born:1956}) CREATE (TomH)-[:ACTED_IN {roles:['Joe Banks']}]->(JoeVersustheVolcano), (MegR)-[:ACTED_IN {roles:['DeDe', 'Angelica Graynamore', 'Patricia Graynamore']}]->(JoeVersustheVolcano), (Nathan)-[:ACTED_IN {roles:['Baw']}]->(JoeVersustheVolcano), (JohnS)-[:DIRECTED]->(JoeVersustheVolcano) CREATE (WhenHarryMetSally:Movie {title:'When Harry Met Sally', released:1998, tagline:'Can two friends sleep together and still love each other in the morning?'}) CREATE (BillyC:Person {name:'<NAME>', born:1948}) CREATE (CarrieF:Person {name:'<NAME>', born:1956}) CREATE (BrunoK:Person {name:'<NAME>', born:1949}) CREATE (BillyC)-[:ACTED_IN {roles:['Harry Burns']}]->(WhenHarryMetSally), (MegR)-[:ACTED_IN {roles:['Sally Albright']}]->(WhenHarryMetSally), (CarrieF)-[:ACTED_IN {roles:['Marie']}]->(WhenHarryMetSally), (BrunoK)-[:ACTED_IN {roles:['Jess']}]->(WhenHarryMetSally), (RobR)-[:DIRECTED]->(WhenHarryMetSally), (RobR)-[:PRODUCED]->(WhenHarryMetSally), (NoraE)-[:PRODUCED]->(WhenHarryMetSally), (NoraE)-[:WROTE]->(WhenHarryMetSally) CREATE (ThatThingYouDo:Movie {title:'That Thing You Do', released:1996, tagline:'In every life there comes a time when that thing you dream becomes that thing you do'}) CREATE (LivT:Person {name:'<NAME>', born:1977}) CREATE (TomH)-[:ACTED_IN {roles:['Mr. White']}]->(ThatThingYouDo), (LivT)-[:ACTED_IN {roles:['Faye Dolan']}]->(ThatThingYouDo), (Charlize)-[:ACTED_IN {roles:['Tina']}]->(ThatThingYouDo), (TomH)-[:DIRECTED]->(ThatThingYouDo) CREATE (TheReplacements:Movie {title:'The Replacements', released:2000, tagline:'Pain heals, Chicks dig scars... Glory lasts forever'}) CREATE (Brooke:Person {name:'<NAME>', born:1970}) CREATE (Gene:Person {name:'<NAME>', born:1930}) CREATE (Orlando:Person {name:'<NAME>', born:1968}) CREATE (Howard:Person {name:'<NAME>', born:1950}) CREATE (Keanu)-[:ACTED_IN {roles:['<NAME>']}]->(TheReplacements), (Brooke)-[:ACTED_IN {roles:['<NAME>']}]->(TheReplacements), (Gene)-[:ACTED_IN {roles:['<NAME>']}]->(TheReplacements), (Orlando)-[:ACTED_IN {roles:['<NAME>']}]->(TheReplacements), (Howard)-[:DIRECTED]->(TheReplacements) CREATE (RescueDawn:Movie {title:'RescueDawn', released:2006, tagline:"Based on the extraordinary true story of one man's fight for freedom"}) CREATE (ChristianB:Person {name:'<NAME>', born:1974}) CREATE (ZachG:Person {name:'<NAME>', born:1954}) CREATE (MarshallB)-[:ACTED_IN {roles:['Admiral']}]->(RescueDawn), (ChristianB)-[:ACTED_IN {roles:['<NAME>']}]->(RescueDawn), (ZachG)-[:ACTED_IN {roles:['Squad Leader']}]->(RescueDawn), (SteveZ)-[:ACTED_IN {roles:['Duane']}]->(RescueDawn), (WernerH)-[:DIRECTED]->(RescueDawn) CREATE (TheBirdcage:Movie {title:'The Birdcage', released:1996, tagline:'Come as you are'}) CREATE (MikeN:Person {name:'<NAME>', born:1931}) CREATE (Robin)-[:ACTED_IN {roles:['<NAME>']}]->(TheBirdcage), (Nathan)-[:ACTED_IN {roles:['<NAME>']}]->(TheBirdcage), (Gene)-[:ACTED_IN {roles:['Sen. <NAME>']}]->(TheBirdcage), (MikeN)-[:DIRECTED]->(TheBirdcage) CREATE (Unforgiven:Movie {title:'Unforgiven', released:1992, tagline:"It's a hell of a thing, killing a man"}) CREATE (RichardH:Person {name:'<NAME>', born:1930}) CREATE (ClintE:Person {name:'<NAME>', born:1930}) CREATE (RichardH)-[:ACTED_IN {roles:['English Bob']}]->(Unforgiven), (ClintE)-[:ACTED_IN {roles:['<NAME>']}]->(Unforgiven), (Gene)-[:ACTED_IN {roles:['Little <NAME>']}]->(Unforgiven), (ClintE)-[:DIRECTED]->(Unforgiven) CREATE (JohnnyMnemonic:Movie {title:'Johnny Mnemonic', released:1995, tagline:'The hottest data on earth. In the coolest head in town'}) CREATE (Takeshi:Person {name:'<NAME>', born:1947}) CREATE (Dina:Person {name:'<NAME>', born:1968}) CREATE (IceT:Person {name:'Ice-T', born:1958}) CREATE (RobertL:Person {name:'<NAME>', born:1953}) CREATE (Keanu)-[:ACTED_IN {roles:['Johnny Mnemonic']}]->(JohnnyMnemonic), (Takeshi)-[:ACTED_IN {roles:['Takahashi']}]->(JohnnyMnemonic), (Dina)-[:ACTED_IN {roles:['Jane']}]->(JohnnyMnemonic), (IceT)-[:ACTED_IN {roles:['J-Bone']}]->(JohnnyMnemonic), (RobertL)-[:DIRECTED]->(JohnnyMnemonic) CREATE (CloudAtlas:Movie {title:'Cloud Atlas', released:2012, tagline:'Everything is connected'}) CREATE (HalleB:Person {name:'<NAME>', born:1966}) CREATE (JimB:Person {name:'<NAME>', born:1949}) CREATE (TomT:Person {name:'<NAME>', born:1965}) CREATE (DavidMitchell:Person {name:'<NAME>', born:1969}) CREATE (StefanArndt:Person {name:'<NAME>', born:1961}) CREATE (TomH)-[:ACTED_IN {roles:['Zachry', 'Dr. <NAME>', '<NAME>', '<NAME>']}]->(CloudAtlas), (Hugo)-[:ACTED_IN {roles:['<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', 'Old Georgie']}]->(CloudAtlas), (HalleB)-[:ACTED_IN {roles:['<NAME>', '<NAME>', 'Ovid', 'Meronym']}]->(CloudAtlas), (JimB)-[:ACTED_IN {roles:['<NAME>', '<NAME>', '<NAME>']}]->(CloudAtlas), (TomT)-[:DIRECTED]->(CloudAtlas), (LillyW)-[:DIRECTED]->(CloudAtlas), (LanaW)-[:DIRECTED]->(CloudAtlas), (DavidMitchell)-[:WROTE]->(CloudAtlas), (StefanArndt)-[:PRODUCED]->(CloudAtlas) CREATE (TheDaVinciCode:Movie {title:'The Da Vinci Code', released:2006, tagline:'Break The Codes'}) CREATE (IanM:Person {name:'<NAME>', born:1939}) CREATE (AudreyT:Person {name:'<NAME>', born:1976}) CREATE (PaulB:Person {name:'<NAME>', born:1971}) CREATE (RonH:Person {name:'<NAME>', born:1954}) CREATE (TomH)-[:ACTED_IN {roles:['Dr. <NAME>']}]->(TheDaVinciCode), (IanM)-[:ACTED_IN {roles:['Sir Leight Teabing']}]->(TheDaVinciCode), (AudreyT)-[:ACTED_IN {roles:['Sophie Neveu']}]->(TheDaVinciCode), (PaulB)-[:ACTED_IN {roles:['Silas']}]->(TheDaVinciCode), (RonH)-[:DIRECTED]->(TheDaVinciCode) CREATE (VforVendetta:Movie {title:'V for Vendetta', released:2006, tagline:'Freedom! Forever!'}) CREATE (NatalieP:Person {name:'<NAME>', born:1981}) CREATE (StephenR:Person {name:'<NAME>', born:1946}) CREATE (JohnH:Person {name:'<NAME>', born:1940}) CREATE (BenM:Person {name: '<NAME>', born:1967}) CREATE (Hugo)-[:ACTED_IN {roles:['V']}]->(VforVendetta), (NatalieP)-[:ACTED_IN {roles:['<NAME>']}]->(VforVendetta), (StephenR)-[:ACTED_IN {roles:['<NAME>']}]->(VforVendetta), (JohnH)-[:ACTED_IN {roles:['High Chancellor <NAME>']}]->(VforVendetta), (BenM)-[:ACTED_IN {roles:['Dascomb']}]->(VforVendetta), (JamesM)-[:DIRECTED]->(VforVendetta), (LillyW)-[:PRODUCED]->(VforVendetta), (LanaW)-[:PRODUCED]->(VforVendetta), (JoelS)-[:PRODUCED]->(VforVendetta), (LillyW)-[:WROTE]->(VforVendetta), (LanaW)-[:WROTE]->(VforVendetta) CREATE (SpeedRacer:Movie {title:'Speed Racer', released:2008, tagline:'Speed has no limits'}) CREATE (EmileH:Person {name:'<NAME>', born:1985}) CREATE (JohnG:Person {name:'<NAME>', born:1960}) CREATE (SusanS:Person {name:'<NAME>', born:1946}) CREATE (MatthewF:Person {name:'<NAME>', born:1966}) CREATE (ChristinaR:Person {name:'<NAME>', born:1980}) CREATE (Rain:Person {name:'Rain', born:1982}) CREATE (EmileH)-[:ACTED_IN {roles:['Speed Racer']}]->(SpeedRacer), (JohnG)-[:ACTED_IN {roles:['Pops']}]->(SpeedRacer), (SusanS)-[:ACTED_IN {roles:['Mom']}]->(SpeedRacer), (MatthewF)-[:ACTED_IN {roles:['Racer X']}]->(SpeedRacer), (ChristinaR)-[:ACTED_IN {roles:['Trixie']}]->(SpeedRacer), (Rain)-[:ACTED_IN {roles:['Taejo Togokahn']}]->(SpeedRacer), (BenM)-[:ACTED_IN {roles:['Cass Jones']}]->(SpeedRacer), (LillyW)-[:DIRECTED]->(SpeedRacer), (LanaW)-[:DIRECTED]->(SpeedRacer), (LillyW)-[:WROTE]->(SpeedRacer), (LanaW)-[:WROTE]->(SpeedRacer), (JoelS)-[:PRODUCED]->(SpeedRacer) CREATE (NinjaAssassin:Movie {title:'Ninja Assassin', released:2009, tagline:'Prepare to enter a secret world of assassins'}) CREATE (NaomieH:Person {name:'<NAME>'}) CREATE (Rain)-[:ACTED_IN {roles:['Raizo']}]->(NinjaAssassin), (NaomieH)-[:ACTED_IN {roles:['<NAME>']}]->(NinjaAssassin), (RickY)-[:ACTED_IN {roles:['Takeshi']}]->(NinjaAssassin), (BenM)-[:ACTED_IN {roles:['<NAME>']}]->(NinjaAssassin), (JamesM)-[:DIRECTED]->(NinjaAssassin), (LillyW)-[:PRODUCED]->(NinjaAssassin), (LanaW)-[:PRODUCED]->(NinjaAssassin), (JoelS)-[:PRODUCED]->(NinjaAssassin) CREATE (TheGreenMile:Movie {title:'The Green Mile', released:1999, tagline:"Walk a mile you'll never forget."}) CREATE (MichaelD:Person {name:'<NAME>', born:1957}) CREATE (DavidM:Person {name:'<NAME>', born:1953}) CREATE (SamR:Person {name:'<NAME>', born:1968}) CREATE (GaryS:Person {name:'<NAME>', born:1955}) CREATE (PatriciaC:Person {name:'<NAME>', born:1959}) CREATE (FrankD:Person {name:'<NAME>', born:1959}) CREATE (TomH)-[:ACTED_IN {roles:['<NAME>']}]->(TheGreenMile), (MichaelD)-[:ACTED_IN {roles:['<NAME>']}]->(TheGreenMile), (DavidM)-[:ACTED_IN {roles:['Brutus "Brutal" Howell']}]->(TheGreenMile), (BonnieH)-[:ACTED_IN {roles:['<NAME>']}]->(TheGreenMile), (JamesC)-[:ACTED_IN {roles:['<NAME>']}]->(TheGreenMile), (SamR)-[:ACTED_IN {roles:['"Wild Bill" Wharton']}]->(TheGreenMile), (GaryS)-[:ACTED_IN {roles:['<NAME>']}]->(TheGreenMile), (PatriciaC)-[:ACTED_IN {roles:['<NAME>']}]->(TheGreenMile), (FrankD)-[:DIRECTED]->(TheGreenMile) CREATE (FrostNixon:Movie {title:'Frost/Nixon', released:2008, tagline:'400 million people were waiting for the truth.'}) CREATE (FrankL:Person {name:'<NAME>', born:1938}) CREATE (MichaelS:Person {name:'<NAME>', born:1969}) CREATE (OliverP:Person {name:'<NAME>', born:1960}) CREATE (FrankL)-[:ACTED_IN {roles:['<NAME>']}]->(FrostNixon), (MichaelS)-[:ACTED_IN {roles:['<NAME>']}]->(FrostNixon), (KevinB)-[:ACTED_IN {roles:['<NAME>']}]->(FrostNixon), (OliverP)-[:ACTED_IN {roles:['<NAME>']}]->(FrostNixon), (SamR)-[:ACTED_IN {roles:['<NAME>, Jr.']}]->(FrostNixon), (RonH)-[:DIRECTED]->(FrostNixon) CREATE (Hoffa:Movie {title:'Hoffa', released:1992, tagline:"He didn't want law. He wanted justice."}) CREATE (DannyD:Person {name:'<NAME>', born:1944}) CREATE (JohnR:Person {name:'<NAME>', born:1965}) CREATE (JackN)-[:ACTED_IN {roles:['Hoffa']}]->(Hoffa), (DannyD)-[:ACTED_IN {roles:['Robert "Bobby" Ciaro']}]->(Hoffa), (JTW)-[:ACTED_IN {roles:['<NAME>']}]->(Hoffa), (JohnR)-[:ACTED_IN {roles:['Peter "Pete" Connelly']}]->(Hoffa), (DannyD)-[:DIRECTED]->(Hoffa) CREATE (Apollo13:Movie {title:'Apollo 13', released:1995, tagline:'Houston, we have a problem.'}) CREATE (EdH:Person {name:'<NAME>', born:1950}) CREATE (BillPax:Person {name:'<NAME>', born:1955}) CREATE (TomH)-[:ACTED_IN {roles:['<NAME>']}]->(Apollo13), (KevinB)-[:ACTED_IN {roles:['<NAME>']}]->(Apollo13), (EdH)-[:ACTED_IN {roles:['<NAME>']}]->(Apollo13), (BillPax)-[:ACTED_IN {roles:['<NAME>']}]->(Apollo13), (GaryS)-[:ACTED_IN {roles:['<NAME>']}]->(Apollo13), (RonH)-[:DIRECTED]->(Apollo13) CREATE (Twister:Movie {title:'Twister', released:1996, tagline:"Don't Breathe. Don't Look Back."}) CREATE (PhilipH:Person {name:'<NAME>', born:1967}) CREATE (JanB:Person {name:'<NAME>', born:1943}) CREATE (BillPax)-[:ACTED_IN {roles:['<NAME>']}]->(Twister), (HelenH)-[:ACTED_IN {roles:['Dr. <NAME>']}]->(Twister), (ZachG)-[:ACTED_IN {roles:['Eddie']}]->(Twister), (PhilipH)-[:ACTED_IN {roles:['Dustin "Dusty" Davis']}]->(Twister), (JanB)-[:DIRECTED]->(Twister) CREATE (CastAway:Movie {title:'Cast Away', released:2000, tagline:'At the edge of the world, his journey begins.'}) CREATE (RobertZ:Person {name:'<NAME>', born:1951}) CREATE (TomH)-[:ACTED_IN {roles:['Ch<NAME>']}]->(CastAway), (HelenH)-[:ACTED_IN {roles:['<NAME>']}]->(CastAway), (RobertZ)-[:DIRECTED]->(CastAway) CREATE (OneFlewOvertheCuckoosNest:Movie {title:"One Flew Over the Cuckoo's Nest", released:1975, tagline:"If he's crazy, what does that make you?"}) CREATE (MilosF:Person {name:'<NAME>', born:1932}) CREATE (JackN)-[:ACTED_IN {roles:['<NAME>']}]->(OneFlewOvertheCuckoosNest), (DannyD)-[:ACTED_IN {roles:['Martini']}]->(OneFlewOvertheCuckoosNest), (MilosF)-[:DIRECTED]->(OneFlewOvertheCuckoosNest) CREATE (SomethingsGottaGive:Movie {title:"Something's Gotta Give", released:2003}) CREATE (DianeK:Person {name:'<NAME>', born:1946}) CREATE (NancyM:Person {name:'<NAME>', born:1949}) CREATE (JackN)-[:ACTED_IN {roles:['<NAME>']}]->(SomethingsGottaGive), (DianeK)-[:ACTED_IN {roles:['<NAME>']}]->(SomethingsGottaGive), (Keanu)-[:ACTED_IN {roles:['<NAME>']}]->(SomethingsGottaGive), (NancyM)-[:DIRECTED]->(SomethingsGottaGive), (NancyM)-[:PRODUCED]->(SomethingsGottaGive), (NancyM)-[:WROTE]->(SomethingsGottaGive) CREATE (BicentennialMan:Movie {title:'Bicentennial Man', released:1999, tagline:"One robot's 200 year journey to become an ordinary man."}) CREATE (ChrisC:Person {name:'<NAME>', born:1958}) CREATE (Robin)-[:ACTED_IN {roles:['<NAME>']}]->(BicentennialMan), (OliverP)-[:ACTED_IN {roles:['<NAME>']}]->(BicentennialMan), (ChrisC)-[:DIRECTED]->(BicentennialMan) CREATE (CharlieWilsonsWar:Movie {title:"<NAME>'s War", released:2007, tagline:"A stiff drink. A little mascara. A lot of nerve. Who said they couldn't bring down the Soviet empire."}) CREATE (JuliaR:Person {name:'<NAME>', born:1967}) CREATE (TomH)-[:ACTED_IN {roles:['Rep. <NAME>']}]->(CharlieWilsonsWar), (JuliaR)-[:ACTED_IN {roles:['<NAME>']}]->(CharlieWilsonsWar), (PhilipH)-[:ACTED_IN {roles:['<NAME>']}]->(CharlieWilsonsWar), (MikeN)-[:DIRECTED]->(CharlieWilsonsWar) CREATE (ThePolarExpress:Movie {title:'The Polar Express', released:2004, tagline:'This Holiday Season… Believe'}) CREATE (TomH)-[:ACTED_IN {roles:['Hero Boy', 'Father', 'Conductor', 'Hobo', 'Scrooge', 'Santa Claus']}]->(ThePolarExpress), (RobertZ)-[:DIRECTED]->(ThePolarExpress) CREATE (ALeagueofTheirOwn:Movie {title:'A League of Their Own', released:1992, tagline:'Once in a lifetime you get a chance to do something different.'}) CREATE (Madonna:Person {name:'Madonna', born:1954}) CREATE (GeenaD:Person {name:'<NAME>', born:1956}) CREATE (LoriP:Person {name:'<NAME>', born:1963}) CREATE (PennyM:Person {name:'<NAME>', born:1943}) CREATE (TomH)-[:ACTED_IN {roles:['<NAME>']}]->(ALeagueofTheirOwn), (GeenaD)-[:ACTED_IN {roles:['<NAME>']}]->(ALeagueofTheirOwn), (LoriP)-[:ACTED_IN {roles:['<NAME>']}]->(ALeagueofTheirOwn), (RosieO)-[:ACTED_IN {roles:['<NAME>']}]->(ALeagueofTheirOwn), (Madonna)-[:ACTED_IN {roles:['"All the Way" <NAME>']}]->(ALeagueofTheirOwn), (BillPax)-[:ACTED_IN {roles:['<NAME>']}]->(ALeagueofTheirOwn), (PennyM)-[:DIRECTED]->(ALeagueofTheirOwn) CREATE (PaulBlythe:Person {name:'<NAME>'}) CREATE (AngelaScope:Person {name:'<NAME>'}) CREATE (JessicaThompson:Person {name:'<NAME>'}) CREATE (JamesThompson:Person {name:'<NAME>'}) CREATE (JamesThompson)-[:FOLLOWS]->(JessicaThompson), (AngelaScope)-[:FOLLOWS]->(JessicaThompson), (PaulBlythe)-[:FOLLOWS]->(AngelaScope) CREATE (JessicaThompson)-[:REVIEWED {summary:'An amazing journey', rating:95}]->(CloudAtlas), (JessicaThompson)-[:REVIEWED {summary:'Silly, but fun', rating:65}]->(TheReplacements), (JamesThompson)-[:REVIEWED {summary:'The coolest football movie ever', rating:100}]->(TheReplacements), (AngelaScope)-[:REVIEWED {summary:'Pretty funny at times', rating:62}]->(TheReplacements), (JessicaThompson)-[:REVIEWED {summary:'Dark, but compelling', rating:85}]->(Unforgiven), (JessicaThompson)-[:REVIEWED {summary:"Slapstick redeemed only by the <NAME> and <NAME>'s stellar performances", rating:45}]->(TheBirdcage), (JessicaThompson)-[:REVIEWED {summary:'A solid romp', rating:68}]->(TheDaVinciCode), (JamesThompson)-[:REVIEWED {summary:'Fun, but a little far fetched', rating:65}]->(TheDaVinciCode), (JessicaThompson)-[:REVIEWED {summary:'You had me at Jerry', rating:92}]->(JerryMaguire) WITH TomH as a MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d) RETURN a,m,d LIMIT 10; ``` <file_sep>import sys #Check IP def ip_addr_valid(list): for ip in list: ip = ip.rstrip("\n") octet_list = ip.split('.') if (len(octet_list) == 4) and (1 <= int(octet_list[0]) <= 223) and (int(octet_list[0]) != 127) and (int(octet_list[0]) != 169 or int(octet_list[1]) != 254) and (0 <= int(octet_list[1]) <= 255 and 0 <= int(octet_list[2]) <= 255 and 0 <= int(octet_list[3]) <= 255): continue else: print("\n* invalid IP address - check file {} \n* exiting program \n".format(ip)) sys.exit()<file_sep>--- date: "2017-09-03" title: "React under the Hood" categories: - Javascript - React --- ![Harbin, China](./photo-33762606594_1e4362c22c_o.png) > A look behind the curtain of React Starters like: > > * [create-react-app](https://github.com/facebookincubator/create-react-app) > * [Gatsby.js](https://github.com/gatsbyjs/gatsby) > * [Next.js](https://github.com/zeit/next.js) > * [Neutrino](https://neutrino.js.org) > > React is often said to be easy to learn, but impossible to set up in an dev environment. Once you start reading about it, you will be faced by an exhausting amount of choices that you have to make, before you can move on to actual coding. Starter Packages, like the ones named above, give a quick access to the React world. Let's take a look into that black box now. [Github](https://github.com/mpolinowski/react-under-the-hood) <!-- TOC --> - [01 Pure React](#01-pure-react) - [02 JSX and Babel](#02-jsx-and-babel) - [Transpilation](#transpilation) - [03 Webpack](#03-webpack) - [Loading JSON](#loading-json) - [Adding SASS](#adding-sass) - [04 React Components](#04-react-components) - [ES6 Class Syntax](#es6-class-syntax) - [Stateless Functions](#stateless-functions) - [05 Adding React Icons](#05-adding-react-icons) - [06 Working with Props](#06-working-with-props) - [Default Props](#default-props) - [PropType Validation](#proptype-validation) - [07 Working with State](#07-working-with-state) <!-- /TOC --> ## 01 Pure React Create a file _/dist/index.js_ with the following React code: ```js const { createElement } = React const { render } = ReactDOM const title = createElement( 'h1', {id: 'title', className: 'header'}, 'Hello World' ) render( title, document.getElementById('react-container') ) ``` The \<title /\> component uses the createElement function from React to create a h1 header with the css class _header_, an id _title_ and a text string _Hello World_. The ReactDom render function will then render it into the div container with the id _react-container_. Now we need to create a html page called /dist/index.html that contains the container with named id: ```html <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <!-- Force latest available IE rendering engine and Chrome Frame (if installed) --> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <!-- Mobile Screen Resizing --> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <!-- HTML5 Shim for IE 6-8 --> <!--[if lt IE 9]> <script src="http://html5shiv.googlecode.com/svn/trunk/html5.js"></script> <![endif]--> <script src="https://cdnjs.cloudflare.com/ajax/libs/react/15.1.0/react.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/react/15.1.0/react-dom.min.js"></script> <meta charset="UTF-8"> <title>Hello World with React</title> </head> <body> <!--[if lt IE 8]> <section class="container"> Did you know that your web browser is a bit old? Some of the content on this site might not work right as a result. <a href="http://whatbrowser.org">Upgrade your browser</a> for a faster, better, and safer web experience. </section> <![endif]--> <div id="react-container"></div> <script src="./index.js"></script> </body> </html> ``` We add React and ReactDOM directly via CDN and link our _index.js_ inside the body tag. Now we need to put our React app onto a webserver - for testing, we will use the npm package httpster to serve our files: ``` npm install httpster -g ``` Now start the webserver with the port and directory flag: ``` httpster -p 3000 -d /e/react-under-the-hood/dist ``` Our app can now be accessed with a webbrowser on _http://localhost:3000_ ![](./ruth_01.png) We can easily style our title component by creating a style component: ```js const style = { backgroundColor: 'purple', color: 'teal', fontFamily: 'verdana' } ``` And assigning the style to our component: ```js const title = createElement( 'h1', {id: 'title', className: 'header', style: style}, 'Hello World' ) ``` ![](./ruth_02.png) ## 02 JSX and Babel React offers a way to write our mark-up directly inside the Javascript component - called JSX. The title component written in JSX looks like this: ```js render( <h1 id = 'title' className = 'header' style = {style}> Hello World </h1>, document.getElementById('react-container') ) ``` Since our webbrowser don't understand JSX, we will have to transpile it to pure Javascript using Babel - this can be quickly done with the babel-cli transpiler. Let us first initialize our node project by _npm init -y_ then install the babel-cli both globally as well as a development dependency inside our project: ### Transpilation ```bash npm install -g babel-cli npm install --save-dev babel-cli ``` now create a folder called src inside the root dir and move the index.js file into it - since we want to use Babel to transpile all JSX files from the source directory and copy them to the distribution directory, where they can be picked up and served by our webserver. Now we need to configure Babel to transpile JSX and all latest and proposed versions of ECMA Script, by adding a file .babelrc inside the root director: ```json { 'presets': ['latest', 'react', 'stage-0'] } ``` Now we need to install those presets as dev-dependencies __be advised__: _we later throw out babel-preset-latest babel-preset-stage-0 and replace it with [babel-preset-env](https://babeljs.io/docs/plugins/preset-env/) to work with webpack 3!_ : ```bash npm install --save-dev babel-preset-react babel-preset-latest babel-preset-stage-0 ``` We can now use the cli tool to transpile our JSX source file and create the browser-readable bundle.js file from it: ```bash babel ./src/index.js --out-file ./dist/bundle.js ``` Now open index.html inside the /dist directory and change the index.js to bundle.js. Reloading our webserver will now show our app again. To make our life easier we will add the httpster call as our npm start script inside the package.json file - then start your webserver with _npm start_ ```json "scripts": { "start": "httpster -p 3000 -d ./dist" } ``` We are now able to write our React code in JSX as well as to use ES2015 or ES2017 syntax inside our source files. Babel will transpile them into browser-friendly code inside /dist/bundle.js. But now we don't want to do this by hand, every time we made a small edit on our page - we need an automation solution for this process. ## 03 Webpack Webpak is a module bundler, that enables us to create static files from our React code. We can use it to automate processes like the Babel transpiling and use it to serve our app in an hot-reloading dev-server environment. First we need to add a [Webpack configuration](https://webpack.js.org/guides/getting-started/) file inside the root directory - webpack.config.js: ```js const path = require('path'); const webpack = require('webpack'); module.exports = { entry: path.resolve(__dirname, './src/index.js'), devServer: { contentBase: path.resolve(__dirname, './dist'), port: 3000, inline: true }, module: { rules: [{ test: /\.js$/, exclude: /(node_modules)/, use: { loader: 'babel-loader', options: { presets: ['env', 'react'] } } }] }, output: { path: path.resolve(__dirname, './dist/assets/'), filename: 'bundle.js', publicPath: 'assets' }, }; ``` Now we want to [install the latest version of Webpack](https://webpack.js.org/guides/installation/) together with the babel-loader & presets, as well as the Webpack Dev-Server to host our files: ``` npm install --save-dev webpack babel-loader babel-core babel-preset-env webpack-dev-server ``` We can create an npm script to start webpack from inside the repository (a global installation is not recommended). The start scripts hosts our webapp, according to the devServer configuration inside webpack.config.js. The build script takes all js files (node_modules excluded), babel-transpiles them with the babel-loader, and puts them bundled into the _./dist/assets_ directory. And the watch script will watch the directories for changes and starts up the loader automatically, when we saved an edit. ``` "scripts": { "start": "webpack-dev-server --open", "watch": "webpack --watch", "build": "webpack --config webpack.config.js" } ``` We can now run our build process with _npm run build_ / _npm run watch_ and start our devServer with _npm start_. Let us now use Webpack to load our react dependencies - instead of linking them into our HTML page. To do this we first have to install React to the project: ``` npm install --save react react-dom ``` ### Loading JSON And to demonstrate the function of module loading, we want to use some JSON data, being loaded into our react app by Webpack: ``` npm install --save-dev json-loader ``` Lets add the JSON loader to our Webpack config file: ```js module: { rules: [ { test: /\.js$/, exclude: /(node_modules)/, use: { loader: 'babel-loader', options: { presets: ['env', 'react'] } } }, { test: /\.json$/, exclude: /(node_modules)/, use: { loader: 'json-loader' } } ] }, ``` And create a exciting JSON file _./src/title.json_ : ```json { "data1": "first data", "data2": "second data" } ``` And create a JSX module that uses this data in _./src/lib.js_ : ```js import React from 'react' import text from './titles.json' export const data1 = ( <h1 id='title' className='header' style={{backgroundColor: 'teal', color: 'purple'}}> {text.data1} </h1> ) export const data2 = ( <h1 id='title' className='header' style={{backgroundColor: 'purple', color: 'teal'}}> {text.data2} </h1> ) ``` We are now using the module import statement to import React from the installed React dependency, as well as our own JSON file. This is a function that is not yet integrated in JavaScript but is available thanks to Webpack and Babel. Now we can rewrite our _./src/index.js_ file to receive the module that we just created: ```js import React from 'react' import { render } from 'react-dom' import {data1, data2} from './lib' render( <div> {data1} {data2} </div>, document.getElementById('react-container') ) ``` Notice that we need to import _react-dom_ here, since __render__ is not part of _react_. ![](./ruth_03.png) ### Adding SASS The same principle can be applied to add styles to our react app app - lets try to add some [SASS](http://sass-lang.com/) to our app with the [Kraken-Sass](http://jwebcat.github.io/kraken-sass/index.html) boilerplate: First we want to install the Webpack loaders for the job of preprocess the source [SASS](http://sass-lang.com/guide) into proper CSS: ``` npm install --save-dev style-loader css-loader sass-loader ``` You will get a warning, that _sass-loader_ requires another dependency called _[node-sass](https://github.com/sass/node-sass)_, which is a library that provides binding for Node.js to LibSass, the C version of the popular stylesheet preprocessor, Sass. This, on the other hand, requires - __under Windows__ - the installation of the [Windows Build Tools](https://github.com/felixrieseberg/windows-build-tools): ``` npm install --g --production windows-build-tools ``` ![](./ruth_04.png) Go and get yourself a cup of coffee - as this is going to take a while ¯\\_(ツ)_\/¯ Once this is through, continue with node-sass: ``` npm install --save-dev node-sass ``` Then add the [SASS loaders](https://webpack.js.org/loaders/sass-loader/) to our Webpack config: ```js { test: /\.scss$/, exclude: /(node_modules)/, use: [{ loader: "style-loader" // creates style nodes from JS strings }, { loader: "css-loader" // translates CSS into CommonJS }, { loader: "sass-loader" // compiles Sass to CSS }] } ``` [Download the master.zip](https://github.com/jwebcat/kraken-sass/archive/master.zip) from kraken-sass and unzip the kraken.scss file (together with the lib folder - that contains all the scss components) to _./src/assets/sass_. Now we can import the [kraken-sass styles](http://jwebcat.github.io/kraken-sass/kraken-way.html) into our _./src/index.js_ component: ```js import React from 'react' import { render } from 'react-dom' import {data1, data2} from './lib' import './assets/sass/kraken.scss' render( <div> <h1>Webpack Styling</h1> <h4>With Kraken-Sass Boilerplate</h4> <button className="btn btn-blue btn-block"> {data1} </button> <button className="btn btn-blue btn-block"> {data2} </button> </div>, document.getElementById('react-container') ) ``` ![](./ruth_05.png) As we can see by now - react allows us to create a collection of separate [JSX components](https://reactjs.org/docs/components-and-props.html) and [CSS modules](https://github.com/css-modules/css-modules) that offer isolation to our app logic and component styles. Each piece is a building block, that is then imported into our _./src/index.js_ react interface and bundled & transpiled by Webpack/Babel into a browser-conform website. Lets clean up our folder structure to show the separation between main pages (index.js) and components and modules that can be re-used in every page (make sure to also change the relative links inside each file): ![](./ruth_06.png) ## 04 React Components Let us now build a small component that list [how many countries] there are in the world, how many we have visited and how much we want to visit in total. We can also add a little bit of math to it and calculate the completion percentage of our endeavor. When you look at code examples on Github, you will find a couple of different ways to write such a component. The first, and oldest one uses the __createClass__ syntax and will no longer work in react v16 - [React 15.5.0: React.createClass officially deprecated](https://facebook.github.io/react/blog/2017/04/07/react-v15.5.0.html#new-deprecation-warnings). ```js import React from 'react' import '../assets/sass/kraken.scss' // cannot be rendered inside react 16 - you need to downgrade your react and reactDom version to react < 15.5 export const CountriesVisited = React.createClass({ percentToDecimal(decimal) { return ((decimal * 100) + '%') }, calcGoalProgress(total, goal) { return this.percentToDecimal(total/goal) }, render() { return ( <div className="countries-visited"> <hr/> <h3>The React.createClass Syntax is no longer supported inside React v16!</h3> <div className="total-contries"> <span>{this.props.total} </span> <span>total countries</span> </div> <div className="visited"> <span>{this.props.visited} </span> <span>visited countries</span> </div> <div className="wish-list"> <span>{this.props.liked} </span> <span>countries on wishlist</span> </div> <div> <span> {this.calcGoalProgress( this.props.total, this.props.goal )} </span> </div> </div> ) } }) ``` Here we are working with props (properties) that are passed down from the parent component in _./src/index.js_. That means, if we want to add this component, we also have to inject those properties. If you add the following to the render function inside the parent component (see further below, on how to implement it): ```js <CountriesVisited total={196} visited={86} liked={186} goal={96}/> ``` and given that you are using react < v16, our component would be rendered inside our main component, just as our buttons did in the example before. Just in case that you stumble over a code bit somewhere that looks like this... Now lets bring it up to speed and rewrite the component with ES16 syntax! ### ES6 Class Syntax ```js export class MyComponent extends Component { render() { return ( <div>{props.title}</div> ) } } ``` ```js import { Component } from 'react' import '../assets/sass/kraken.scss' import '../assets/sass/ui.scss' export class CountriesVisitedES6 extends Component { percentToDecimal (decimal) { return ((decimal * 100) + '%') } calcTravelProgress (visited, goal) { return this.percentToDecimal (visited/goal) } render() { return ( <div> <hr/> <div className="grid-full space-bottom text-center"> <span>{this.props.total} </span> <span>total countries </span> <Globe className="text-tall" /> </div> <div className="grid-half text-center space-bottom"> <span>{this.props.visited} </span> <span>visited countries </span> <Landing className="text-tall" /> </div> <div className="grid-half space-bottom text-center"> <span>{this.props.liked} </span> <span>countries on wishlist </span> <Heart className="text-tall" /> </div> <div className="grid-full space-bottom text-center"> <span>{this.calcTravelProgress ( this.props.visited, this.props.goal )} </span> <span> Completion </span> <Checked className="text-tall" /> </div> <p className="text-small text-muted">This Data is calculated inside an ES6 Class Component</p> </div> ) } } ``` One thing to point out, is that, written in this ES6 class syntax, we no longer need to wrap our component in parenthesis and the different methods inside the component don't have to be separated by commas anymore. But we can go even one step further and turned it into a Stateless functional component. ### Stateless Functions Stateless functional component - just as their name implies - are components that are created by a function. They do not have access to state - you cannot use __this__ to access variables. They follow the following structure: ```js const MyComponent = (props) => ( <div>{props.title}</div> ) ``` They take in property information from their parent component and return (unrendered) JSX Elements to them. That means, that we also do not have to import react anymore. But local methods - like our calculations - have to be removed from the component and put into their own functions: ```js import '../assets/sass/kraken.scss' import '../assets/sass/ui.scss' const percentToDecimal = (decimal) => { return ((decimal * 100) + '%') } const calcTravelProgress = (visited, goal) => { return percentToDecimal (visited/goal) } export const CountriesVisitedStateless = (props) => ( <div> <div className="grid-full space-bottom text-center"> <span>{props.total} </span> <span>total countries</span> </div> <div className="grid-half text-center space-bottom"> <span>{props.visited} </span> <span>visited countries</span> </div> <div className="grid-half space-bottom text-center"> <span>{props.liked} </span> <span>countries on wishlist</span> </div> <div className="grid-full space-bottom text-center"> <span>{calcTravelProgress ( props.visited, props.goal )} </span> <span> Completion</span> </div> </div> ) ``` To destructure this a little bit more, we can declaratively state only the object keys that we actually want to use from props - this way we don't have to add the __props.__ in front anymore: ```js import '../assets/sass/kraken.scss' import '../assets/sass/ui.scss' const percentToDecimal = (decimal) => { return ((decimal * 100) + '%') } const calcTravelProgress = (visited, goal) => { return percentToDecimal (visited/goal) } export const CountriesVisitedStateless = ({ total, visited, liked, goal }) => ( <div> <hr/> <div className="grid-full space-bottom text-center"> <span>{total} </span> <span> total </span> <Globe className="text-tall" /> </div> <div className="grid-half text-center space-bottom"> <span>{visited} </span> <span> visited </span> <Landing className="text-tall" /> </div> <div className="grid-half space-bottom text-center"> <span className="text-tall">{liked} </span> <span> wishlist </span> <Heart className="text-tall" /> </div> <div className="grid-full space-bottom text-center"> <span>{calcTravelProgress ( visited, goal )} </span> <Checked className="text-tall" /><br/><br/> </div> <p className="text-small text-muted">This Data is calculated inside a stateless Component</p> </div> ) ``` ## 05 Adding React Icons The [React-Icons](http://gorangajic.github.io/react-icons/) module allows you to include popular icons in your React projects. The module can be [installed by npm](https://www.npmjs.com/package/react-icons) React-Icons can be imported to our component: ```js import Globe from 'react-icons/lib/go/globe' import Landing from 'react-icons/lib/md/flight-land' import Heart from 'react-icons/lib/go/heart' import Checked from 'react-icons/lib/ti/input-checked' ``` And simply be added to as a child component: ```js <Globe /> <Landing /> <Heart /> <Checked /> ``` ## 06 Working with Props Lets call a new component CountryList inside _./src/index.js_ and give it some props - in form of an array of objects: ```js <CountryList countries= { [ { country: "Japan", date: new Date ("10/19/2010"), visited: true, liked: true }, { country: "Taiwan", date: new Date ("12/12/2006"), visited: true, liked: true }, { country: "China", date: new Date ("10/20/2010"), visited: true, liked: true } ] }/> ``` Now create this component in _./src/components/country-list.js_ : ```js import Landing from 'react-icons/lib/md/flight-land' import Heart from 'react-icons/lib/go/heart' import { CountryRow } from './country-row' export const CountryList = ({countries}) => ( <table> <thead> <tr> <th>Date</th> <th>Country</th> <th>Visited</th> <th>Liked</th> </tr> </thead> <tbody> {countries.map((country, i) => <CountryRow key={i} country={country.country} date={country.date} visited={country.visited} liked={country.liked}/> )} </tbody> </table> ) ``` We created another nested child component inside to create the actual table body of our country list. All the props that were given to us inside _index.js_ have now be handed further down to the CountryRow component. And we have a map function wrapped around it to go through all values inside the _countries_ array - and create a row for every entry. Which is an excellent time to introduce __The Spread Operator__ - [three dots that changed the world](https://dmitripavlutin.com/how-three-dots-changed-javascript/) - to clean up our code: ```js export const CountryList = ({countries}) => ( <div className="grid-full space-bottom space-top"> <br/><br/> <table className="grid-full space-bottom space-top float-center"> <thead> <tr> <th>Date</th> <th>Country</th> <th>Visited</th> <th>Liked</th> </tr> </thead> <tbody> {countries.map((country, i) => <CountryRow key={i} {...country} /> )} </tbody> </table> </div> ) ``` And the row component, that we already imported in the list component (above), can now use the props: _country, date, visited, liked_ from the _countries_ array to populate the rows of our table inside _./src/components/countru-row.js_ : ```js import Landing from 'react-icons/lib/md/flight-land' import Heart from 'react-icons/lib/go/heart' export const CountryRow = ({country, date, visited, liked}) => ( <tr> <td> { date.getMonth() +1 } / { date.getDate() } / { date.getFullYear() } </td> <td> { country } </td> <td> { (visited) ? <Landing /> : null } </td> <td> { (liked) ? <Heart /> : null } </td> </tr> ) ``` We added icons again to show, whether we visited and liked a country. The syntax of the [Inline If-Else Statement](https://reactjs.org/docs/conditional-rendering.html#inline-if-else-with-conditional-operator) __(visited) ? \<Landing /\> : null__ reads like: _if visited is true, display the Landing react-icon, otherwise, don't_. ### Default Props Default props populate our child component with properties, when they are not provided by the parent component. Lets add them to our country-visited component, to see how this looks like for the createClass syntax, the ES6 Class syntax and inside a stateless component: __createClass__ (see _.src/components/countries-visited-createClass.js_) The default props are just added like our custom methods before inside the component, by the __getDefaultProps__ function: ```js export const CountriesVisited = React.createClass({ getDefaultProps() { return { total : 196, visited : 50, liked : 100, goal : 99 } }, [...] }) ``` __ES6 Class__ (see _.src/components/countries-visited-ES6.js_) In case of an ES6 class we have to add default props to the class instance (written below the component itself): ```js CountriesVisitedES6.defaultProps = { total : 196, visited : 50, liked : 100, goal : 99 } ``` __stateless__ (see _.src/components/countries-visited-stateless.js_) the way above can also be used for stateless components - just copy&paste. But you can also assign default values to the props that you give the component - they will only be used if no props are provided by the parent component: ```js export const CountriesVisitedStateless = ({ total=196, visited=50, liked=100, goal=99 }) => ([...]) ``` ### PropType Validation As your app grows, you can catch a lot of bugs with typechecking. To run typechecking on the props for a component, you can assign the special [propTypes property](https://reactjs.org/docs/typechecking-with-proptypes.html): __createClass__ (see _.src/components/countries-visited-createClass.js_) ```js import { createClass, PropTypes } from 'react' export const CountriesVisited = React.createClass({ propTypes() { total : propTypes.number, visited : propTypes.number, liked : propTypes.number, goal : propTypes.number }, [...] }) ``` __ES6 Class__ (see _.src/components/countries-visited-ES6.js_) In case of an ES6 class we have to add propsTypes to the class instance (written below the component itself). For React >15.5 we also need to install _npm install --save prop-types_ separately and import it at the top of the file ! ```js import { Component } from 'react' import PropTypes from 'prop-types'; [...] CountriesVisitedES6.defaultProps = { total : propTypes.number.isRequired, visited : propTypes.number.isRequired, liked : propTypes.number, goal : propTypes.number.isRequired } ``` You can test it by "feeding" your component, e.g. a Boolean instead of a Number - you will now get an error message inside your console (the same would happen, if you remove a prop that is tagged as __.isRequired__): ![](./ruth_07.png) __stateless__ (see _.src/components/countries-visited-stateless.js_) the way above can also be used for stateless components - just copy&paste: ```js import PropTypes from 'prop-types'; [...] CountriesVisitedES6.defaultProps = { total : propTypes.number.isRequired, visited : propTypes.number.isRequired, liked : propTypes.number, goal : propTypes.number.isRequired } ``` We can now add Type Validation to our two list / row components: __stateless__ (see _.src/components/country-row.js_) ```js import PropTypes from 'prop-types'; [...] CountryRow.propTypes = { country: PropTypes.string.isRequired, date: PropTypes.instanceOf(Date).isRequired, visited: PropTypes.bool, liked: PropTypes.bool, } ``` __stateless__ (see _.src/components/country-list.js_) ```js import PropTypes from 'prop-types'; [...] CountryList.propTypes = { countries: PropTypes.array.isRequired, } ``` Beside those default Type Checks, we can also employ __Custom Validations__ - e.g. is _countries_ an array - if yes - does the array _countries_ have at least 1 entry: ```js CountryList.propTypes = { countries: function(props) { if(!Array.isArray(props.countries)) { return new Error ( "Country List has to be an Array!" ) } else if (!props.countries.length) { return new Error ( "Country List must have at least one record!" ) } else { return null } } } ``` ## 07 Working with State So far we used props to pass down data into our components. The other way to do this is by handling the state of a component. Here - when you are not using state managements like Redux - it is very important that we limit the amount of components that handle our components state. To do this, we want to create another component in _./src/components/app-createClass.js_ called \<App /\>: ```js import {createClass} from 'react' import { CountriesVisitedES6 } from './countries-visited-es6' import { CountryList } from './country-list' export const App = createClass({ getInitialState() { return { countries: [ { country: "Japan", date: new Date ("10/19/2010"), visited: true, liked: true }, { country: "Taiwan", date: new Date ("12/12/2006"), visited: true, liked: true }, { country: "China", date: new Date ("10/20/2010"), visited: true, liked: true } ] } }, render() { return ( <div className="app"> <CountryList countries={this.state.countries} /> <CountriesVisitedES6 total={196} visited={86} liked={186} /> </div> ) } }) ``` We can remove the CountryList and CountriesVisitedES6 components from _./src/index.js_ amd import App now, as their new parent component. The [countries] array is now made available t our CountryList component by the __getInitialState__ function in \<App /\>. We can also add a filter function that allows us to filter all countries, that are either _visited_ or _liked_ to ```js countCountries(filter) { return this.state.countries.filter(function(country) { if(filter { return country[filter] } else { return country } }) }).length }, ``` And again, we can use the the __inline If/Else syntax__ to clean up our _countCountries_ function: ```js countCountries(filter) { return this.state.countries.filter( (country) => (filter) ? country[filter] : country ).length } ``` This filter takes the countries array, takes all countries and returns them - unless the function call uses a filter string. In this case only the countries are returned that fulfill the filter condition. The function call and filter condition can then be applied in our render function: ```js render() { return ( <div className="app"> <CountryList countries={this.state.countries} /> <CountriesVisitedES6 total={this.countCountries()} visited={this.countCountries("visited")} liked={this.countCountries("liked")} /> </div> ) } ``` For _total_ we don't use a filter - we want to display the number of all countries. But for _visited_ and _liked_ we only want to count countries that have the corresponding variable set to true. This way we now managed to get rid of the hard-coded numbers. And instead started to simply count those countries inside the state of our component. Now lets re-write our App component using ES6 classes in _./src/components/app-es6.js_: ```js import {Component} from 'react' import { CountriesVisitedES6 } from './countries-visited-es6' import { CountryList } from './country-list' export class App extends Component { constructor(props) { super(props) this.state = { countries: [ { country: "Japan", date: new Date ("10/19/2010"), visited: true, liked: true }, { country: "Taiwan", date: new Date ("12/12/2006"), visited: true, liked: true }, { country: "China", date: new Date ("10/20/2010"), visited: true, liked: true }, { country: "Austria", date: new Date ("10/20/2010"), visited: true, liked: false } ] } } countCountries(filter) { return this.state.countries.filter( (country) => (filter) ? country[filter] : country ).length } render() { return ( <div className="app"> <CountryList countries={this.state.countries} /> <CountriesVisitedES6 total={this.countCountries()} visited={this.countCountries("visited")} liked={this.countCountries("liked")} /> </div> ) } } ```<file_sep>--- date: "2018-11-25" title: "Webhooks with URL Queries" categories: - IoT - Node-RED --- ![Sapporo, Japan](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Setup](#setup) - [What we want](#what-we-want) - [Node-RED Flow](#node-red-flow) - [Alarmserver Configuration](#alarmserver-configuration) - [Testing](#testing) <!-- /TOC --> INSTAR IP cameras offer an Alarmserver Function - [1080p Models](https://wiki.instar.com/Web_User_Interface/1080p_Series/Alarm/Alarm_Server/), [720p Models](https://wiki.instar.com/Web_User_Interface/720p_Series/Alarm/Alarmserver/) - that allows you to send a request to a home automation system on your network. Thus, notifying your smarthome and trigger an response. In an recent update they introduced parameters (__URL Queries__) that can be appended to this HTTP GET request. There are three custom parameters available as well as a specified parameter that can tell us what triggered the alarm. The latter - called `active` can have the following 10 values: * Alarm Area 1 triggered: `&active=1` * Alarm Area 2 triggered: `&active=2` * Alarm Area 3 triggered: `&active=3` * Alarm Area 4 triggered: `&active=4` * Alarm-In / PIR triggered: `&active=5` * Audio Alarm triggered: `&active=6` * Motion Detection & triggered (Area 1): `&active=7` * Motion Detection & triggered (Area 2): `&active=8` * Motion Detection & triggered (Area 3): `&active=9` * Motion Detection & triggered (Area 4): `&active=10` We now want to build a Webhook in Node-RED - as a placeholder for one of the many smarthome solutions available - and have this Webhook trigger different actions depending on the __URL Query__ it receives. ## Setup * First camera: IP=192.168.2.116, port=80, name=frontdoor, login=admin/instar * Second camera: IP=192.168.2.117, port=80, name=garden, login=admin/instar * Node-RED Server: IP=192.168.2.48, port=1880 ### What we want If our first camera is triggered we want it to contact the Node-RED Webhook, identify itself as __frontdoor__ and if the alarm was triggered by an [audio detection](https://wiki.instar.com/Web_User_Interface/1080p_Series/Alarm/Actions/) (`&active=6`), we want to camera to move to stored _position 3_ - which is where we estimate that the noise will be coming from. If the alarm was triggered by the internal motion detection (so the query will be __not__ `&active=6`), we want to contact our second camera __garden__ and have it swing around (_position 2_) to check out the area, where the motion was detected. And vice versa - if our second camera triggers an alert it should contact the Node-RED Webhook, identify as __garden__, and in case of an audio alarm have it move to _position 3_. If the alarm trigger was not the audio detection the __frontdoor__ camera should be contacted instead and move to _position 2_. To reset our setup after an alarm trigger, we can set _position 1_ to be our cameras [Park Position](https://wiki.instar.com/Web_User_Interface/1080p_Series/Features/PTZ/). Or actively send a command to our camera to go back to the idle position - with a delay of our choosing. ### Node-RED Flow ![Alarmserver Queries in Node-RED](./INSTAR_Alarmserver_Queries_in_Node-RED_01.png) In this flow we setup a Webhook under the URL `/as-webhook/` - so that we can contact it under the IP/Port of our Node-RED server + URL. In this case this is: `http://192.168.2.48:1880/as-webhook/`: ![Alarmserver Queries in Node-RED](./INSTAR_Alarmserver_Queries_in_Node-RED_02.png) When we receive a GET request via this URL, we now have to strip the URL queries that have been send with it. First we check for a query called `camera` and see if it has the value `frontdoor` or `garden`, to identify the camera that contacted us. An then we have to check for the `active` parameter, to if the alarm was triggered by the audio alarm - `&active=6` - or not. If yes, we have to return the CGI command to start a video recording and move to __position 3__ to the triggering camera. Otherwise, send the CGI command to move to __position 2__ and start recording to the other camera. And after a set time - I will choose 15s - we have to send the command to stop the recording and to move back to position 1. For our first camera those commands are as follows: __ALARM ACTION__ * `http://192.168.2.116/param.cgi?cmd=manualrec&-act=on&-time=60&cmd=preset&-act=goto&-number=2&-usr=admin&-pwd=<PASSWORD>` // if `&active=6` * `http://192.168.2.117/param.cgi?cmd=manualrec&-act=on&-time=60&cmd=preset&-act=goto&-number=1&-usr=admin&-pwd=<PASSWORD>` // if __not__ `&active=6` __RESET__ * `http://192.168.2.116/param.cgi?cmd=manualrec&-act=off&cmd=preset&-act=goto&-number=0&-usr=admin&-pwd=<PASSWORD>ar` // if `&active=6` * `http://192.168.2.117/param.cgi?cmd=manualrec&-act=off&cmd=preset&-act=goto&-number=0&-usr=admin&-pwd=<PASSWORD>` // if __not__ `&active=6` ### Alarmserver Configuration The Alarmserver inside our camera has to be configured as follows: ![Alarmserver Queries in Node-RED](./INSTAR_Alarmserver_Queries_in_Node-RED_03.png) For the other camera we just have to set the camera name to `camera = frontdoor`. All done! Now every time the alarm on our camera is triggered it will contact our Node-RED Webhook and sends both it's name and the alarm trigger. ### Testing You can use the __Inject Node__ to test that Node-RED is set up correctly. By clicking on the node, you will send a GET request to your Webhook and transmit the following parameter as query: ![Alarmserver Queries in Node-RED](./INSTAR_Alarmserver_Queries_in_Node-RED_04.png)<file_sep># Gatsby Starter: Minimal Blog Dark themed version of the [gatsby-starter-minimal-blog](https://github.com/LekoArts/gatsby-starter-minimal-blog) by [LekoArts](https://github.com/LekoArts/): ![Screenshot Preview](./screenshot.png) ## Features - Articles in MDX ([gatsby-mdx](https://github.com/ChristopherBiscardi/gatsby-mdx)) - Code highlighting (with [prism-react-renderer](https://github.com/FormidableLabs/prism-react-renderer)) and live preview (with [react-live](https://github.com/FormidableLabs/react-live)) - Styled Components 💅 - Categories - Offline Support - WebApp Manifest Support - SEO - Sitemap - Schema.org JSONLD - OpenGraph Tags - Twitter Tags ## Getting Started Check your development environment! You'll need [Node.js](https://nodejs.org/en/), the [Gatsby CLI](https://www.gatsbyjs.org/docs/) and [node-gyp](https://github.com/nodejs/node-gyp#installation) installed. The official Gatsby website also lists two articles regarding this topic: - [Gatsby on Windows](https://www.gatsbyjs.org/docs/gatsby-on-windows/) - [Check your development environment](https://www.gatsbyjs.org/tutorial/part-zero/) To copy and install this starter run this command (with "project-name" being the name of your folder you wish to install it in).<file_sep>--- date: "2018-11-28" title: "Home Assistant and MQTT" categories: - IoT - Smarthome --- ![Harbin, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Installation of Home Assistant on a Debian Stretch Server](#installation-of-home-assistant-on-a-debian-stretch-server) - [Installation of dependencies:](#installation-of-dependencies) - [Create a user and group](#create-a-user-and-group) - [Install Home Assistant](#install-home-assistant) - [Run Home Assistant as a Service](#run-home-assistant-as-a-service) - [Configuring Home Assistant](#configuring-home-assistant) - [Adding the MQTT Broker](#adding-the-mqtt-broker) - [The Home Assistant Lovelace UI](#the-home-assistant-lovelace-ui) - [Adding your camera's Live Video](#adding-your-cameras-live-video) - [Using Home Assistant as Alarmserver for your INSTAR Camera](#using-home-assistant-as-alarmserver-for-your-instar-camera) <!-- /TOC --> <div class="primarybox"> <h3>INSTAR MQTT Broker</h3> <p>The MQTT Interface enables you to access all camera functions through the native MQTT support from the Home Assistant Lovelace UI!</p> </div> ## Installation of Home Assistant on a Debian Stretch Server ### Installation of dependencies: ```bash sudo apt update sudo apt install python3-dev python3-pip python3-venv sudo pip3 install --upgrade virtualenv ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_01.png) --- ### Create a user and group Give the user access to serial lines (zwave, insteon, etc): ```bash sudo adduser --system homeassistant sudo addgroup homeassistant sudo adduser homeassistant dialout ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_02.png) --- 1. Create a directory to install HA in and set its ownership and permissions. ```bash sudo mkdir /opt/homeassistant sudo chown homeassistant:homeassistant /opt/homeassistant ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_03.png) --- 2. Change to the homeassistant user to do the installs. ```bash sudo su -s /bin/bash homeassistant ``` ### Install Home Assistant 1. Install a virtual env to sandbox the Home Assistant software and dependencies and activate it so further installs are done here. ```bash cd /opt/homeassistant python3 -m venv /opt/homeassistant source bin/activate ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_04.png) --- 2. Install HA from pip. You might get some pip install errors that do not cause an issue later on: ```bash pip3 install --upgrade homeassistant ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_05.png) --- 3. Open another terminal and login with your default (sudo enabled) user and open the Home Assistant port `8123` in your Firewall: ```bash sudo firewall-cmd --permanent --zone=public --add-port=8123/tcp sudo firewall-cmd --reload sudo firewall-cmd --list-all ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_06.png) --- Then switch back to the homeassistant user. 1. To keep all the config and log files in the same directory (rather than having them in /home/homassistant) we can run Home Assistant with the following command: ```bash mkdir config ./bin/hass -c /opt/homeassistant/config --log-file /opt/homeassistant/hass.log ``` Home Assistant should install a few things and make a default config file (let it run for a little while - it takes a bit on the first start up). Hit ctrl-c to stop it. The config directory now contains a bunch of sample config files for you to edit. --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_07.png) --- 5. You can now access Home Assistants web interface by opening the following URL (exchange the IP address __192.168.2.111__ with the IP of your Linux Server) inside your webbrowser `http://192.168.2.111:8123/`: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_08.png) --- 6. Type in your preferred login credentials and click to create the account: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_09.png) --- 7. And login to the Home Assistant web interface using your credentials: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_10.png) --- 8. Hit ctrl-c to stop Home Assistants. The config directory now contains a bunch of sample config files for you to edit. ### Run Home Assistant as a Service 1. Assuming it works, exit the homeassistant user, change to the `/etc/systemd/system/` directory and create a service to start Home Assistant automatically. ```bash exit cd /etc/systemd/system/ sudo nano homeassistant.service ``` 2. Create the following `systemd init` file: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_11.png) --- ``` [Unit] Description=Home Assistant After=network.target mosquitto.service [Service] Type=simple User=homeassistant ExecStart=/opt/homeassistant/bin/hass -c /opt/homeassistant/config --log-file /opt/homeassistant/hass.log [Install] WantedBy=multi-user.target ``` <br/><br/> 3. Exit the homeassistant user, copy the service file to the system, and `update systemd` to run the service. ```bash sudo systemctl --system daemon-reload sudo systemctl enable homeassistant sudo systemctl start homeassistant ``` 4. If something goes wrong with the start command, check the logs: ```bash sudo systemctl status homeassistant ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_12.png) --- Finally, to make it easier to edit config files and try out code changes, give your regular user write permissions in the `homeassistant directory`. For this we need to assign both our regular user - in my case this is the `nodeadmin` - and the homeassistant user to the `homeassistant group`. ```bash sudo groupadd homeassistant sudo usermod -a -G homeassistant nodeadmin sudo usermod -a -G homeassistant homeassistant ``` Make sure that both users are inside the created group: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_13.png) --- ```bash sudo chgrp -R homeassistant /opt/homeassistant sudo chmod -R g+w /opt/homeassistant/ ``` ## Configuring Home Assistant We set up Home Assistant to be installed in `/opt/homeassistant`. The configuration file can be found in the `config` subfolder. Let's open up the file to take a look at it: ``` sudo nano /opt/homeassistant/config/configuration.yaml ``` ### Adding the MQTT Broker --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_14.png) --- The first thing we should do, is adding our Mosquitto server (192.168.2.111) as MQTT broker. We can do this by adding the following lines at the end of the document: ``` # MQTT Communication mqtt: broker: 192.168.2.111 port: 1883 user: debian password: <PASSWORD> ``` <br/><br/> To verify that we did not mess up the configuration we can go to the __Configuration Tab__ in Home Assistant: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_15.png) --- Open the __General__ section: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_16.png) --- Click on __Check Config__ and - __when it passes__ - click on __Restart Server__: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_17.png) --- ## The Home Assistant Lovelace UI Now we can start with adding components to the Home Assistant UI - called __Lovelace__. Note that the INSTAR MQTT Broker is only included in _INSTAR Full HD cameras_. If you are using a HD (720p) or VGA camera you can still use MQTT to connect your camera. ### Adding your camera's Live Video Home Assistant comes with a __Camera Module__ that we can use to add our camera's Live Video. Lets start by accessing the `configuration.yaml`. When you followed the steps above you will find this file in `/opt/homeassistant/config/configuration.yaml`. Open the configuration file and add the line `/home/pi/.homeassistant/configuration.yaml`: ```yaml camera: !include cameras.yaml ``` Now create the `cameras.yaml` file next to the configuration file and add the following lines: ```yaml - platform: generic name: 'IN-8015FHD' still_image_url: http://192.168.2.165/tmpfs/snap.jpg stream_source: 'rtsp://192.168.2.165:554/11' verify_ssl: false authentication: 'basic' username: 'admin' password: '<PASSWORD>' ``` In case you need to restrict the bandwidth this live stream is requiring, use `auto.jpg` or `auto2.jpg` instead of `snap.jpg` in the __still_image_url__ above. Also the RTSP __stream_source__ can use a smaller live stream by replacing the `/11` with a `/12` or `/13`. Alternatively, you can access your camera's MJPEG stream with the following configuration: ```yaml - platform: mjpeg name: 'IN-9010FHD' mjpeg_url: http://192.168.2.117/mjpegstream.cgi?-chn=11 verify_ssl: false authentication: 'basic' username: 'admin' password: '<PASSWORD>' ``` And again, to reduce the bandwidth requirements of this live stream use `chn=12` or `chn=13` instead of `chn=11`. This will add a local INSTAR HD or Full HD camera with the IP address `192.168.2.165` on the default HTTP Port `80` and with the default RTSP Port 554. The camera login is `admin/instar` in this example - but note: you do not need to use the Administrator account to access the live video. Now reload Home Assistant to be able to see the changes you just made. Once the UI is back up, click on __Configure UI__ in the top right: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_18.png) --- You might want to add a new Tab for your camera by clicking on the small __+__ icon next to your existing tabs. Then click on the big __+ Button__ to add a new card. --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_19.png) --- Here we have to add a new Entity and select the camera that we just added - in the case above this was `camera.in_8015fhd`: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_20.png) ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_21.png) --- Save your setting and quit the UI configuration mode. You should see a small tile with a snapshot from your camera. Click on it to be directed to the live video: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_22.png) --- ### Using Home Assistant as Alarmserver for your INSTAR Camera All INSTAR Full HD cameras have a function to send a POST request to a HTTP Webhook when an alarm is triggered (HD cameras currently only support GET requests that are not supported by Home Assistant - though you will be able to use Node-RED to receive a GET request from your HD camera and trigger a POST request to Home Assistant). When such a post request is received by Home Assistant such an event can be used as a trigger for your automation to control your INSTAR Full HD camera. To set up a Webhook that can be used by your camera first open the Lovelace UI and go to __Configuration__ and open the __Automation__ tab: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_23.png) --- Click on the __+__ icon to add a new automation and select __Webhook__ as the automation trigger - the name you choose here for the Webhook will be used to name the resulting URL e.g. `mqtt_test` (better don't use spaces or special characters): --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_24.png) --- Now we need to define an action that we want to trigger. For this we are going to use a __MQTT Publish__ service we created in the. This service requires a MQTT topic (_Target_) and a MQTT message payload (_Message_) to be passed down: ```json { "message": "{\"val\":\"1\"}", "target": "instar/000389888811/features/ptz/preset" } ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_25.png) --- Here we choose the `features/ptz/preset` MQTT Topic and send the payload value `1` which will tell the camera with the MAC address `000389888811` to move to __Preset Position 2__ (numbering of those positions starts at `0`). When you hit __Save__ this automation will be saved inside the `automations.yaml` file inside the Home Assistant directory: ```yaml - id: '1571301476710' alias: TEST MQTT Webhook trigger: - platform: webhook webhook_id: mqtt_test condition: [] action: - data: message: '{"val":"1"}' target: instar/000389888811/features/ptz/preset service: script.send_mqtt_command ``` You could test triggering the above automation by sending a __POST HTTP Request__ to ```json http://your-home-assistant:8123/api/webhook/mqtt_test ``` --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_26.png) --- Sending the command `curl -d "" http://192.168.2.43:8123/api/webhook/mqtt_test` triggers the automation and sends the camera into the second preset position. Now that we know that our automation is working and the Webhook is set up, we can configure our camera to contact Home Assistant when an Alarm is triggered by the camera internal detection systems. Please open the Alarmserver configuration inside your camera's web user interface and add the Home Assistant Webhook to the following form. You have to activate the POST Request and add a dummy request parameter - in the screenshot below we added `homeassistant=alarm`. But this parameter can be chosen freely: --- ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_27.png) ![Home Assistant with your INSTAR IP Camera](./HomeAssistant_28.png) --- Now, every time an alert is triggered our camera will contact it's Alarmserver - the Home Assistant installation - and start the automation.<file_sep>--- date: "1980-01-02" title: "Frogfoot Checklists" categories: - dcs --- ![SU-25T Frogfoot](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Su-25T Key Map](#su-25t-key-map) - [Numbers](#numbers) - [Instruments](#instruments) - [Fuel](#fuel) - [Warning Lamps](#warning-lamps) - [Hydraulics](#hydraulics) - [Threat Types](#threat-types) - [RTB](#rtb) - [Airport/Airfield/Aerodrome IDs](#airportairfieldaerodrome-ids) - [HARM Ground Radar Signatures](#harm-ground-radar-signatures) - [List of Radar Designations](#list-of-radar-designations) - [Radar/IR Threat (range/height)](#radarir-threat-rangeheight) - [Enemy radar threats overview (range/height)](#enemy-radar-threats-overview-rangeheight) - [Enemy IR threats overview (range/height)](#enemy-ir-threats-overview-rangeheight) - [Su-25T Pylons](#su-25t-pylons) - [Air-To-Air (IR)](#air-to-air-ir) - [Bombs (Guided/Free fall)](#bombs-guidedfree-fall) - [Missiles (Guided)](#missiles-guided) - [Pods/Fuel Tanks](#podsfuel-tanks) - [Rockets (S - Unguided)](#rockets-s---unguided) <!-- /TOC --> ## Su-25T Key Map | Action | Standard assignment | | -- | -- | | Engines Start | RShift + Home | | Engines Stop | RShift + End | | Electric Power | RShift + L | | Canopy | LCtrl + C | | Wheel Brake On/Off | W | | Gear Retraction/extension | G | | Flaps | F | | Landing Flaps Extend | LShift + F | | Landing Flaps Raise | LCtrl + F | | Nosewheel Steering | LAlt + Q | | Air Brake | B | | Communication Menu | \ | | HUD Navigation Modes | 1 | | | __a__ Base Navigation | | | __b__ Enroute | | | __c__ Return | | | __d__ Landing | | HUD A2A Mode | 6 | | HUD A2G Mode | 7 | | HUD Gunsight | 8 | | Electro-Optical System | O | | Lazing | Ctrl+O | | Drop Tanks / Weapons | LCtrl + W | | Trim Wing Left Down | RCtrl + , | | Trim Wing Right Down | RCtrl + / | | Trim Nose Up | RCtrl + . | | Trim Nose Down | RCtrl + ; | | Trim Rudder Left | RCtrl + Z | | Trim Rudder Right | RCtrl + X | | Parachute | P | | Active Pause | LShift+LWin+Pause | ![SU-25T Frogfoot](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.png) ## Numbers | Airframe | Value | | -- | -- | | Rotate Speed | 240 km/h | | Rotate Angle | 10 deg (max. 12 deg) | | Flaps Up | 400 km/h | | Cruise Speed | 500 km/h | | Landing Speed | 300 km/h | | Landing Speed (Heavy) | 350 km/h | | Prachute | 280 km/h | ## Instruments ![SU-25T Instruments](./Frogfoot_Instruments_01.jpg.png) ### Fuel A lamp is lit it means that a particular tank is empty. ![SU-25T Instruments](./Frogfoot_Instruments_02.jpg.png) The meanings of the lamps: * __I__ - outer two external tanks; * __II__ - inner two external tanks; * __КР__ - wing tanks; * __3/4__ - third and fourth tanks; * __Ц__ - central fuselage tank; * __0.5__ - emeregency fuel quantity - equal or less 500 kilogramms. ### Warning Lamps I've noticed only 4 warning lamps work in the Su-25T ![SU-25T Instruments](./Frogfoot_Instruments_03.jpg) 1. `Смотри таьло` -General warning lamp, indicates either stall speed, pull up, low air-speed, and along with most any other warning lamp. 2. `Пожар` - Engine failure/fire (_Currently it can be any engine_) ![SU-25T Instruments](./Frogfoot_Instruments_04.jpg) * Engine 1 controls hydraulics 1 * Engine 2 controls hydraulics 2 3. `гидро1` - hydraulics failure 1 (Hydro 1 controls the nosewheel steering unit, the initial chambers of the aileron boosters, the airbrakes, the slats, the flaps and the elevators (_Rudders are not controlled by hydraulic system_) 4. `гидро2` - hydraulics failure 2 (Hydro 2 controls the undercarriage extension and retraction, mainwheel braking, the yaw damper, and the second chambers of the aileron boosters) * Looking at the engine temperatures also can show indication of a fire if the temperature is in the red zone, when engine is on fire you should turn it off or it will cause fuel tanks to explode. ### Hydraulics (_Same stuff explained above, with addition to brakes and tips_) ![SU-25T Instruments](./Frogfoot_Instruments_05.jpg) 1. Nosewheel braking 2. Mainwheel brake 3. Hydro 1 controls the nosewheel steering unit, the initial chambers of the aileron boosters, the airbrakes, the slats, the flaps and the elevators (_Rudders are not controlled by hydraulic system_) 4. Hydro 2 controls the undercarriage extension and retraction, mainwheel braking, the yaw damper, and the second chambers of the aileron boosters * If a hydraulic leak occurs and is of risk falling below 50%, it is best to lower the gears, otherwise they will not lower fully * Airbreaks also should be closed quickly if a leak occurs or they will stay open once fluid leaks completely * Flaps are not as important as landing than gear, and they also relly on the hydraulic fluid. Gears are more of a priority than flaps * Functions of hydraulics do not function properly below 50% ### Threat Types ![SU-25T Instruments](./Frogfoot_Instruments_06.png) * __П__ = airborne radar * __3__ = long range radar * __X__ = medium range radar * __H__ = short range radar * __F__ = early warning radar * __C__ = AWACS ### RTB 1. Hit `1` on your keyboard until you are in __RTN__ mode. 2. Align your __HSI__. 3. Center your __ADI__. 4. Keep your Heading Indicator inside the __Circle__ in your __HUD__. ![SU-25T Instruments](./Frogfoot_Instruments_09.png) ## Airport/Airfield/Aerodrome IDs These IDs are used with the navigation system built into the HUD of the Su-25T, when in navigation mode (pressing __1__ for Route mode (`MPW` or `ENR`), Airfield beacon mode (`B3B` or `RTN`), and Landing mode (`ПОС` or `LNDC`) , pressing __Ctrl+`__ will cycle through the waypoints/ID's of Airfields in __Return Navigation Mode__ and in __Landing Mode__. ![SU-25T Instruments](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_j.png) | ID | ICAO | Aerodrome Name | Alternate | | -- | -- | -- | -- | | 01 | URKA | Anapa | 03 | | 02 | URKL | Krasnodar Center | 08 | | 03 | URKN | Novorossiysk | 06 | | 04 | URKW | Krymsk | 03 | | 05 | URKH | Maykop-Khanskaya | 08 | | 06 | URKG | Gelendzhik | 03 | | 07 | URSS | Sochi-Adler | 10 | | 08 | URKK | Krasnodar-Pashkovsky | 02 | | 09 | UGSS | Sukhumi-Babushara | 10 | | 10 | UG23 | Gudata | 09 | | 11 | UGSB | Batumi | 13 | | 12 | UGKS | Senaki-Kolkhi | 14 | | 13 | UG5X | Kobuleti | 12 | | 14 | UGKO | Kutisi-Kopitnari | 12 | | 15 | URMM | Mineralnye Vody | 16 | | 16 | URMN | Nalchik | 17 | | 17 | XRMF | Mozdkok | 21 | | 18 | UGTB | Tiblisi-Lochini | 19 | | 19 | UG24 | Tiblisi-Soganlug | 18 | | 20 | UG27 | Vaziani | 18 | | 21 | URMO | Beslan | 17 | ## HARM Ground Radar Signatures When equipped with __Fantasmagoria__ pod , going into ground attack mode (__7__) then activate the pod with (__I__) and Radar targets will appear on your HUD. Some have designations to identify them with. ![SU-25T Instruments](./Frogfoot_Instruments_08.jpg) ### List of Radar Designations | SAM System | Radar Designation | HUD Designation | | -- | -- | -- | | Patriot | AN/MPQ-53 | P | | Hawk | AN/MPQ-50 | H50 | | Hawk | AN/MPQ-46 | H46 | | Roland | Roland | G | | Roland | Roland | R | | SA-10 S-300PS SR 64N6E | Big Bird | BB | | SA-10 S-300PS SR 5N66M | Clam Shell | CS | | SA-10 S-300PS TR 30N6 | Flap Lid | FL | | SA-11 Buk SR 9S18M1 | 9S18Mq | S11 | | SA-6 Kub STR 9S91 | 1S91 | SA6 | | SA-8 Osa 9A33 | 9A33 | SA8 | | SA-15 Tor 9A331 | 9A331 | S15 | | SA-19 Tunguska 2S6 | 2S6 | S19 | | SA-3 SR P-19 | Flat Face | FLF | | SA-3 TR SNR-125 | SNR-125 | SA3 | <br/><br/> | Ships | Radar Designation | HUD Designation | | -- | -- | -- | | USS <NAME> | <NAME> | SS | | CG Ticonderoga | SM2 | SM2 | | FFG <NAME> | SM2 | SM2 | | <NAME> | SA-N-9 Gauntlet | SN9 | | Neutrashimy | SA-N-9 Gauntlet | SN9 | | Moskva | SA-N-6 Grumble | SN6 | | Albatros | SA-N-4 | SA8 | | Rezky | SA-N-4 | SA8 | ### Radar/IR Threat (range/height) ![SU-25T Frogfoot](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_g.png) #### Enemy radar threats overview (range/height) * 2km/1.5km M163 Vulcan * 2.5km/2km ZSU-23-4 Shilka * 3km/2.5km Gepard * 6km/3km Roland * 7km/6.5km OSA/Gecko * 8km/5km Tunguska/Grison * 12km/8km TOR/Gauntlet * 16km/10km MIM-23-Hawk * 16km/11km Kub/Gainful * 25km/10km Newa/Goa * 32km, 25km S-11 BOK/Gadfly * 45km/30km Volkhlov/Grumble * 90km/35km Patriot #### Enemy IR threats overview (range/height) * 3km/2.5km M6 Linebacker * 3km/2.5km Stinger (Manpad) * 3km/2.5km M1097 Avenger PMS * 4.2km/3.5km Strela-1/Gaskin * 4.5km/3km 9k38 IGLA/Grouse (Manpad) * 5km/3.5km Strela-10/Gopher * 8.5km/3km M48 Chaparral ## Su-25T Pylons The modeling of lateral and longitudinal center of mass is taken into effect. This can change depending on fuel load and weapon loads. The asymmetrical loading of weapon and fuel pylons, which influence the characteristics of lateral control (depending on flight speed, regular overload, etc), is also modeled. ![SU-25T Instruments](./Frogfoot_Instruments_12.png) ### Air-To-Air (IR) | Weapon | Type | Guidance | Range (km) | Warhead (kg) | Weight (kg) | Speed | Intended Targets | | -- | -- | -- | -- | -- | -- | -- | -- | | R-60M | Short-range, infrared, air-to-air missile | infared | 5 | 6 | 65 | Mach 2 | Aircraft (Pylon 11,1) | | R-73 | Medium-range, infrared, air-to-air missile | infared and Inertial | 15 | 7.4 | 110 | Mach 2.5 | Aircraft (Pylon 10,2) | ### Bombs (Guided/Free fall) _Range of bombs depends on the height and horizontal speed they were dropped at_ | Weapon | Type | Guidance | Range (km) | Warhead (kg) | Weight (kg) | Intended Targets | | -- | -- | -- | -- | -- | -- | -- | | BetAB-500 | Penetration bomb with re-tarded system | Free fall | - | 75.8 | 478 | Concrete piercing, bunkers, concrete runways. (_Pylon 2,3,4,5,7,8,9,10_) | | BetAB-500ShP | Penetration bomb with rocket / parachute system | Parachute / Rocket | - | 77 | 380 | Concrete piercing, bunkers, concrete runways (_Pylon 2,3,4,5,7,8,9,10_) | | FAB-100 | General purpose bomb | Free fall | - | 44.9 | 99.8 | Wide variety of targets, including artillery, trucks, bunkers, surface-to-air missile sites, antiaircraft artillery sites, ammunition dumps, railway engines, light surface vessels and supply points. (_Pylon 2,3,4,5,7,8,9,10_) | | FAB-250 | General purpose bomb | Free fall | - | 120 | 249 | Military-industrial sites, railway junctions, field facilities, personnel, light armoured vehicles and trucks (_Pylon 2,3,4,5,7,8,9,10_) | | FAB-500 M62 | General purpose bomb | Free fall | - | 240 | 506 | Defence industry facilities, light armoured vehicles, railway junctions, fortifications and personnel. (_Pylon 2,3,4,5,7,8,9,10_) | | KAB-500kr | TV-guided bomb | TV-command | 17 | 195 | 560 | Wide variety of targets, including artillery, trucks, bunkers, Scuds, surface-to-air missile sites, antiaircraft artillery sites, early warning radars, and supply points. (__fire and forget weapon__) (_Pylon 5,7_) | | KMGU-2 96 AO 2.5RT | Dispenser | Free fall cluster disp. | 50-150m | 96x2.5kg | 525 | 96 x 2.5kg Frag bomblets - Each bomblet has a kill radius of 30m against material, 20m against unsheltered personnel and 10m against entrenched personnel. (_Pylon 2,3,4,5,7,8,9,10_) | | KMGU-2 96 PTAB 2.5KO | Dispenser | Free fall cluster disp. | 50-150m | 96x2.5kg | 525 | 96 x 2.5kg Armor piercing - Anti-tank and anti-personnel capability. (_Pylon 2,3,4,5,7,8,9,10_) | | MBD-2-67U - 4 FAB-100 | Bomb rack of 4 FAB-100's | Free fall | - | 4 x 44.9 | 4 x 99.8 | Wide variety of targets, including artillery, trucks, bunkers, surface-to-air missile sites, antiaircraft artillery sites, ammunition dumps, railway engines, light surface vessels and supply points. (_Pylon 2,3,4,5,7,8,9,10_) | | RBK-250 PTAB-2.5M | Multipurpose cluster bomb | Cluster free fall | 2.5 per 30 bmblt. | 94 | 275 | 30 x 2.5kg General purpose, armoured targets (_Pylon 2,3,4,5,7,8,9,10_) | | RBK-500 PTAB-10.5 | Anti-personnel/anti-material cluster bomb | Cluster free fall | 10 per 30 bmblt. | - | 504 | 30 x 10kg General purpose, armoured targets(_Pylon 2,3,4,5,7,8,9,10_) | | RBK-500U PTAB-1M | Anti-personnel/anti-material cluster bomb | Cluster free fall | 1 (per) 352 bmblt. | - | 504 | 352 x 1kg General purpose, armoured targets (_Pylon 2,3,4,5,7,8,9,10_) | | SAB-100 | Illumination bomb | Free fall / Parachute | 1-3km altitude | - | - | Dispenses 8 LUU-2B/B illumination flares. Each flare burn during 2 minutes, lock it onto a target and fly over it to deploy. (_Pylon 2,3,4,5,7,8,9,10_) | ### Missiles (Guided) | Weapon | Type | Guidance | Range(km) | Warhead (kg) | Weight (kg) | Speed | Intended Targets | | -- | -- | -- | -- | -- | -- | -- | -- | | 9A4172 Vikhr | Medium-range, anti-tank, laser-guided | Semi-Active Laser | 10 | 12 | 45 | Mach 1.8 | Armored vehicles, slow air targets (_Pylon 4,8_) | | Kh-29L | Medium-range, laser-guided, air-to-surface missile | Semi-Active Laser | 10 | 317 | 657 | Mach 2.5 | Fortifications, strong points, bridges, command and control centers, artillery and missile emplacements, boats. (_Pylon 5,7_) | | Kh-29T | Medium-range, TV-guided, air-to-surface missile | TV-command | 12 | 317 | 670 | Mach 2.5 | Fortifications, strong points, bridges, command and control centers, artillery and missile emplacements, boats. (__fire and forget weapon__) (_Pylon 5,7_) | | Kh-58 | Medium-range, anti-radar, air-to-surface missile | Inertial and Passive Radar | 70 | 320 | 640 | Mach 4 | SAM Radars - NEEDS L-081 __Fantasmagoria__ Pod (_Pylon 5,7_) | | S-25L | 340 mm laser guided aircraft rocket | Semi-Active Laser | 3 | 190 | 480 | 2520 km/h | Soft targets, concrete targets (_Pylon 2,3,4,5,7,8,9,10_) | | Kh-25 ML | Medium-range, laser-guided, air-to-surface missile | Semi-Active Laser | 11 | 90 | 300 | Mach 0.72 | Fortifications, strong points, bridges, command and control centers, artillery and missile emplacements. (_Pylon 3,4,5,9,8,7_) | | Kh-25 MP | Medium-range, anti-radar, air-to-surface missile | Semi-Active Laser | 25 | 90 | 320 | Mach 0.76 | SAM Radars - NEEDS L-081 __Fantasmagoria__ Pod Press (I) to activate/deactivate it in air-to-ground mode(__fire and forget weapon__) (7) (_Pylon 3,4,5,9,8,7_) | ![SU-25T Instruments](./photo-kt443t6d_64hdh43hfh6dgjdfhgjhgv.png) ## Pods/Fuel Tanks | Name | Type | Location | Purpose | | -- | -- | -- | -- | | MPS-410 | ECM Pod | Wingtip | Jams enemy radar homing missiles coming at you. Gives position away, (Activated with (SHIFT+E), but as of 1.2.12 ECM pods do not have an indicator inside the cabin, remember when you turn them on/off!) (_Pylon 11,1_) | | Mercury LLTV | Targeting | Centerline | Targeting pod with better low light camera in it than standard Shkval (_Pylon 6_) | | L-081 Fantasmagoria | ELINT Pod | Centerline | Passive electronics intelligence pod that receives and locates enemy radars - needed for anti-radar missiles (_Pylon 6_) | | Fuel tank 800L Wing | Fuel Tank | Stations 3, 5, 7, and 9 | Total added weight per FULL tank is 760 KG | Total added fuel weight per FULL tank is 620 KG. Means that when empty there is still 140KG per tank of useless weight on your aircraft, not to mention the additional drag. DROP WHEN EMPTY!!! (`Lalt` + `R`) (_Pylon 5,7,3,9_) | | SPPU - 22-2 | Gun pod | Stations 5, 4, 7, and 8 | Firing rate, rounds/minute: 2800-3000 | Muzzle velocity, m/s: 690-890 Range, km: 3 Ammunition storage, rounds: 260 Deflection method: manual or laser lock (_Pylon 5,4,7,8_) | | Smoke Generator -Color (_Blue,Green,Orange,Red,White,Yellow_) | Smoke | Stations 5, 4, 7, and 8 | Look like R-73 missiles, activated with (T) (_Pylon 1,11_) | ## Rockets (S - Unguided) ![SU-25T Instruments](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_7.png) | Weapon | Type | Guidance | Range (km) | Warhead (kg) | Weight (kg) | Speed (km/h) | Intended Targets | | -- | -- | -- | -- | -- | -- | -- | -- | | S-13 OF | 122 mm unguided aircraft rocket | Unguided | 2.5 | 32.5 | 68 (pod 505) | 2700 | 5 x APAM Fragmentation Rocket , Designed for strikes against fortified and hardened objects (pillboxes, shelters, airport aprons and runways) (_Pylon 2,3,4,5,7,8,9,10_) | | S-8 KOM | 80 mm unguided aircraft rocket | Unguided | 1.3-4 | 3,6 | 11.3 (pod 386) | 2196 | 20 x AT Fragmentation Rocket ,(400mm penetration) light anti-armor. This rocket is intended to engage modern tanks, lightly armored and soft-skinned combat materiel. Owing to the fragmentation effect, the rocket also inflicts damage on manpower. (_Pylon 2,3,4,5,7,8,9,10_) | | S-8 OFP2 | 80 mm unguided aircraft rocket | Unguided | 6-7 | 9.2 | 16.7 (pod 402) | - | 20 x HE Fragmentation Rocket , This rocket is designed to engage personnel, soft and lightly armored targets. (_Pylon 2,3,4,5,7,8,9,10_) | | S-8 TsM | 80 mm unguided aircraft rocket | Unguided | 1.3-3 | 3.6 | 11.1 (pod 382) | - | 20 x Smoke, This rocket is intended to mark ground targets, routes of movement and landing areas in daytime. (_Pylon 2,3,4,5,7,8,9,10_) | | S-24 B | 240 mm unguided aircraft rocket | Unguided | 2 | 123 | 235 | 2520 | 1 x Blast Fragmentation - frag warhead for large or area soft targets (_Pylon 2,3,4,5,7,8,9,10_) | | S-25 OFM | 340 mm unguided aircraft rocket | Unguided | 3 | 190 | 480 | 2520 | 1 x Ultra heavy FFAR (folding-fin-aerial-rocket) HE ,soft targets, concrete targets (_Pylon 2,3,4,5,7,8,9,10_) | | S-5 KO | 57mm unguided aircraft rocket | Unguided | 3 | 1,05 | 3.86 (pod 264) | 2422.8 | 32 x HEAT Fragmentation Rocket frag warhead soft /armour targets. (_Pylon 2,3,4,5,7,8,9,10_) |<file_sep>import paramiko import datetime import os.path import time import sys import re # Check username/password file user_file = input("\n# enter user login file path - e.g. ./userlogin.env : ") # Verifying file exists if os.path.isfile(user_file) == True: print("\n* login file accepted\n") else: print("\n* file {} does not exist \n* exiting program \n".format(user_file)) sys.exit() # Check commands file cmd_file = input("\n# enter commands file path - e.g. ./commands.env : ") # Verifying file exists if os.path.isfile(cmd_file) == True: print("\n* command file accepted\n") else: print("\n* file {} does not exist \n* exiting program \n".format(cmd_file)) sys.exit() # Open SSHv2 connection to the server def ssh_connection(ip): global user_file global cmd_file try: # Get SSH user login selected_user_file = open(user_file, 'r') # Read from beginning selected_user_file.seek(0) # Get username from file username = selected_user_file.readlines()[0].split(',')[0].rstrip("\n") #Read from beginning selected_user_file.seek(0) # Get password from file password = selected_user_file.readlines()[0].split(',')[1].rstrip("\n") #Login session = paramiko.SSHClient() # This allows auto-accepting unknown host keys # Default would be RejectPolicy session.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # Connect to the device using username and password session.connect(ip.rstrip("\n"), username = username, password = <PASSWORD>) # Start an interactive shell session connection = session.invoke_shell() # Open command file for reading selected_cmd_file = open(cmd_file, 'r') # Read from beginning selected_cmd_file.seek(0) # Writing each line in the file to the shell for each_line in selected_cmd_file.readlines(): connection.send(each_line + '\n') time.sleep(2) # Close the user file selected_user_file.close() # Close the command file selected_cmd_file.close() # Checking command output for syntax errors server_response = connection.recv(65535) if re.search(b"% Invalid input", server_response): print("* There was at least one syntax error in the given command {}".format(ip)) else: print("\nServer {} Response:\n".format(ip)) # Test for reading command output print(str(server_response) + "\n") # Searching for the CPU utilization value within the output of "show processes top once" cpu = re.search(b"%Cpu\(s\):(\s)+(.+?)(\s)* us,", server_response) # Extracting the second group, which matches the actual value of the CPU utilization and decoding to the UTF-8 format from the binary data type utilization = cpu.group(2).decode("utf-8") # Printing the CPU utilization value to the screen # print(utilization) # Opening the CPU utilization text file and appending the results with open("D:\\App3\\cpu.txt", "a") as f: #f.write("{},{}\n".format(str(datetime.datetime.now()), utilization)) f.write(utilization + "\n") #Closing the connection session.close() except paramiko.AuthenticationException: print("\n* invalid username or password \n* exiting program \n")<file_sep>import React from 'react' import { Link } from 'gatsby' import styled from 'styled-components' import cv from '../assets/cv.png' import github from '../assets/github.png' import linkedin from '../assets/linkedin.png' import twitter from '../assets/twitter.png' import flickr from '../assets/flickr.png' const CategoriesButton = styled.button` background: ${props => props.theme.colors.primary}; border: none; display: inline-flex; align-items: center; border-radius: ${props => (props.big ? '.2rem' : '1rem')}; font-size: ${props => (props.big ? '1.2rem' : '1rem')}; color: ${props => props.theme.colors.white}; padding: ${props => (props.big ? '0.6rem 1.76rem' : '0.35rem 1.65rem')}; margin-right: 25px; transition: all ${props => props.theme.transitions.normal}; box-shadow: 0 4px 6px rgba(50, 50, 93, 0.11), 0 1px 3px rgba(0, 0, 0, 0.08); &:hover { background: ${props => props.theme.colors.primaryLight}; cursor: pointer; transform: translateY(-2px); } &:focus { outline: none; } ` const CvButton = styled.button` background-image: url(${cv}); background-color: ${props => props.theme.colors.primary}; width: 45px; height: 45px; margin-right: 7px; background-size: contain; transition: all ${props => props.theme.transitions.normal}; border-radius: .2rem; border: none; padding: -2px; color: ${props => props.theme.colors.white}; &:hover { background-color: ${props => props.theme.colors.primaryLight}; cursor: pointer; transform: translateY(-2px); } &:focus { outline: none; } ` const GithubButton = styled.button` background-image: url(${github}); background-color: ${props => props.theme.colors.primary}; width: 45px; height: 45px; margin-right: 7px; background-size: contain; transition: all ${props => props.theme.transitions.normal}; border-radius: .2rem; border: none; padding: -2px; color: ${props => props.theme.colors.white}; &:hover { background-color: ${props => props.theme.colors.primaryLight}; cursor: pointer; transform: translateY(-2px); } &:focus { outline: none; } ` const LinkedinButton = styled.button` background-image: url(${linkedin}); background-color: ${props => props.theme.colors.primary}; width: 45px; height: 45px; margin-right: 7px; background-size: contain; transition: all ${props => props.theme.transitions.normal}; border-radius: .2rem; border: none; padding: -2px; color: ${props => props.theme.colors.white}; &:hover { background-color: ${props => props.theme.colors.primaryLight}; cursor: pointer; transform: translateY(-2px); } &:focus { outline: none; } ` const TwitterButton = styled.button` background: #1a8f6e; background-image: url(${twitter}); background-color: ${props => props.theme.colors.primary}; width: 45px; height: 45px; margin-right: 7px; background-size: contain; transition: all ${props => props.theme.transitions.normal}; border-radius: .2rem; border: none; padding: -2px; color: ${props => props.theme.colors.white}; &:hover { background-color: ${props => props.theme.colors.primaryLight}; cursor: pointer; transform: translateY(-2px); } &:focus { outline: none; } ` const FlickrButton = styled.button` background: #1a8f6e; background-image: url(${flickr}); background-color: ${props => props.theme.colors.primary}; width: 45px; height: 45px; margin-right: 7px; background-size: contain; transition: all ${props => props.theme.transitions.normal}; border-radius: .2rem; border: none; padding: -2px; color: ${props => props.theme.colors.white}; &:hover { background-color: ${props => props.theme.colors.primaryLight}; cursor: pointer; transform: translateY(-2px); } &:focus { outline: none; } ` const Wrapper = styled.section` display: inline-flex; align-items: right; `; const SocialButtons = () => ( <Wrapper> <Link to="/categories/"> <CategoriesButton big> Categories </CategoriesButton> </Link> <Link to="/curriculum-vitae/"> <CvButton/> </Link> <a href="https://github.com/mpolinowski" target="_blank" rel="noopener noreferrer"> <GithubButton/> </a> <a href="https://www.linkedin.com/in/mike-polinowski-6396ba121/" target="_blank" rel="noopener noreferrer"> <LinkedinButton/> </a> <a href="https://www.flickr.com/people/149680084@N06/" target="_blank" rel="noopener noreferrer"> <FlickrButton/> </a> <a href="https://twitter.com/MikePolinowski" target="_blank" rel="noopener noreferrer"> <TwitterButton/> </a> </Wrapper> ) export default SocialButtons <file_sep>--- date: "2019-02-15" title: "IFTTT IP Camera" categories: - IoT - Smarthome --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Basic Setup](#basic-setup) - [IFTTT](#ifttt) <!-- /TOC --> ## Basic Setup _If This Then That_, also known as [IFTTT](https://ifttt.com/) is a free web-based service to create chains of simple conditional statements, called applets. Build your own applets to connect web services with your INSTAR IP Camera. ## IFTTT 1. First you need to [create an account](https://ifttt.com/join) on IFTTT and [login to your account](https://ifttt.com/login?wp_=1). Then go to the __Create__ and click on __IF + This__: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_01.png) --- 2. We now want to set up the [Date & Time](https://ifttt.com/services/maker_webhooks) service to send scheduled commands to our camera. Search for _date_ and select the __Date & Time__: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_02.png) --- 3. We want to automate a daily camera function, for this we have to choose __Every day at__ as a trigger for our applet. --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_03.png) --- 4. For example we set the trigger to _every day at 6am_: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_04.png) --- 5. Now we can add an action that is triggered by our schedule: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_05.png) --- 6. We now want to add a [webhook service](https://ifttt.com/services/maker_webhooks) - which is basically a service that contacts a web address when triggered. That address will be the INSTAR DDNS address of our INSTAR Full HD camera and carry a [CGI Command](/1080p_Series_CGI_List/) to activate/deactivate functions on our camera. (__Note__ that the camera has to be available via the internet for this to work - this means you will have to set up a [port forwarding rule inside your router](/Internet_Access/Port_Forwarding/)). Search for _web_ and select the __Webhooks Service__: --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_06.png) --- 7. We now need to add our cameras DDNS address, e.g. `myaddress.ddns3-instar.de` with the `https://` prefix followed by the [HTTPS Port](/Web_User_Interface/1080p_Series/Network/IP_Configuration/) ([that has to be forwarded inside your router](/Internet_Access/Port_Forwarding/)), e.g. `8081`. --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_07.png) --- We now want that our camera moves to the Preset Position 1 when our applet is triggered. The [CGI Command](/1080p_Series_CGI_List/) to do this is `/param.cgi?cmd=preset&-act=goto&-number=0` followed by your camera login `&-usr=admin&-pwd=<PASSWORD>`. ``` https://myaddress.ddns3-instar.de:8081/param.cgi?cmd=preset&-act=goto&-number=0&-usr=admin&-pwd=<PASSWORD> ``` Save your settings and you are done. --- ![IFTTT for INSTAR Alarmserver](./IFTTT_for_INSTAR_Alarmserver_08.png) ---<file_sep>--- date: "2017-08-15" title: "Gatsby Blog Starter" categories: - Javascript - React - Gatsby --- ![Harbin, China](./photo-34475132951_e3ff74c679_o.png) <!-- TOC --> - [01 gatsby-source-filesystem](#01-gatsby-source-filesystem) - [02 gatsby-transformer-remark](#02-gatsby-transformer-remark) - [03 Creating a first Blog Post](#03-creating-a-first-blog-post) - [04 Creating an Index Page](#04-creating-an-index-page) - [05 Filters & Sorting With GraphQL](#05-filters--sorting-with-graphql) - [06 Static Serve](#06-static-serve) <!-- /TOC --> For an overview of the project structure please refer to the [Gatsby documentation - Building with Components](https://www.gatsbyjs.org/docs/building-with-components/) Install this starter (assuming Gatsby is installed) by running from your CLI: ```bash gatsby new gatsby-blog ``` This is the second mini-project that I am going to use Gatsby in - check out the [first one](https://github.com/mpolinowski/gatsby-wiki) for a more detailed description of all the basics steps of setting up Gatsby. - [01 gatsby-source-filesystem](#01-gatsby-source-filesystem) - [02 gatsby-transformer-remark](#02-gatsby-transformer-remark) - [03 Creating a first Blog Post](#03-creating-a-first-blog-post) - [04 Creating an Index Page](#04-creating-an-index-page) - [05 Filters & Sorting With GraphQL](#05-filters--sorting-with-graphql) - [06 Static Serve](#06-static-serve) ## 01 gatsby-source-filesystem First we install a plugin that allows us to pull in data. There are a couple of [Source Plugins](https://www.gatsbyjs.org/docs/plugins/) available to get your data from MongoDB or a CMS source like Wordpress. But we only want to grab Markdown files from our local file system -> hence we are going to install [gatsby-source-filesystem](https://www.gatsbyjs.org/packages/gatsby-source-filesystem/), using npm: ```bash npm install --save gatsby-source-filesystem ``` The plugin then has to be added to our Gatsby build by adding it to /gatsby-config.js. It is added as an object, carrying some configuration - in this case the path to the directory that will store our Markdown files: ```js plugins: [ `gatsby-plugin-react-helmet`, { resolve: `gatsby-source-filesystem`, options: { name: `pages`, path: `${__dirname}/src/pages/`, }, }, ], ``` ## 02 gatsby-transformer-remark Since we want to use Markdown as a data source, we need a transformer plugin, that turns the Markdown into something that GraphQL can query against. This is the [gatsby-transformer-remark](https://www.gatsbyjs.org/packages/gatsby-transformer-remark/) plugin, that can also be installed by npm: ```bash npm install --save gatsby-transformer-remark ``` This plugin comes without any additional configuration, and can simply be added to gatsby-config.js: ```js plugins: [ `gatsby-transformer-remark`, ] ``` Now we can start the Gatsby development environment with: ```bash gatsby develop ``` The app will start on http://localhost:8000 ![Gatsby Blog Starter](./gatsby_01.png) ## 03 Creating a first Blog Post Now we want to prepare our first blog post. We can add a folder to the pages directory with the date of today ~ as this might make sense to a blog (naming convention is up to you) -> */src/pages/2017-10-05-first-post*. Now add a new file to this folder, named *index.md*. The Markdown file needs some information at the top of the file - called [FrontMatter](https://jekyllrb.com/docs/frontmatter/): ```json --- path: "/first-post" title: "First Blog Post" date: "2017-10-05" --- ``` This [FrontMatter](https://jekyllrb.com/docs/frontmatter/) will be used to Query against with GraphQL and can carry a couple of different information - see also [my previous repo](https://github.com/mpolinowski/gatsby-wiki#11-working-with-markdown). But we only use it to add an title to our blogpost and assign an URL under which we want to be able to access the file. Now just add some Markdown to the file and save it: ``` ## Hello from Markdown! --- ### This is a first blog Post How exciting Web Development can be, amazing! ``` The markdown represents the data that is going to be displayed. But now we need to create a style template that is used with this data. Lets start by adding a new folder inside /src called *templates*. Now add a file to it called **post.js** that will contain the structure template for every post entry. The file contains the JSX markup for our post: ```js import React from 'react'; import Helmet from 'react-helmet'; export default function Template({data}) { const {markdownRemark: post} = data; // const post = data.markdownRemark; return ( <div> <h1>{post.frontmatter.title}</h1> <div dangerouslySetInnerHTML={{__html: post.html}} /> </div> ) } ``` The \<Template /\> component receives {data} props, that are retrieved by an GraphQL query: ```js export const postQuery = graphql` query BlogPostByPath($path: String!) { markdownRemark(frontmatter: { path: { eq: $path} }) { html frontmatter { path title } } } ` ``` The query looks for a markdown post, where the called URL equals the $path given inside it's frontmatter. So if the URL that you type into your browser was */first-post*, a markdown file with a *path: '/first-post'* inside it's frontmatter, would be a hit. The query then uses the markdownRemark plugin to transform the post markdown to HTML and make both the path and title from it's frontmatter available iside {data}, that is passed down into the \<Template /\> component and then rendered. Gatsby is already configured to route all pages inside /src/pages as pages for our website. But now we have to register our posts, that are from the markdown files and the post.js template. To do this, we have to create a file named **gatsby-node.js** inside the root directory of our app. We are going to use the [createPages](https://www.gatsbyjs.org/docs/node-apis/#createPages) Gatsby API to create pages from our post template: ```js const path = require('path'); exports.createPages = ({boundActionCreators, graphql}) => { const {createPage} = boundActionCreators; // const createPage = boundActionCreators.createPage; const postTemplate = path.resolve('src/templates/post.js'); return graphql(`{ allMarkdownRemark { edges { node { html id frontmatter { path title } } } } }`) .then(res => { if(res.errors) { return Promise.reject(res.errors); } res.data.allMarkdownRemark.edges.forEach(({node}) => { createPage({ path: node.frontmatter.path, component: postTemplate }) }) }) } ``` Save and restart your app - then open **http://localhost:8000/first-post** inside your web browser: ![Gatsby Blog Starter](./gatsby_02.png) ## 04 Creating an Index Page Now that we have a blog post (you can duplicate the first one a couple of times - changing the title, path and date), we will need an index page with a collection of links for all our posts. We will use the same GraphQL query used above to create this list. We can add optional sorting, limiting parameters for the GraphQL query - see also [1](https://github.com/graphql/graphiql/issues/587), [2](https://gist.github.com/DSchau/86ee9288b05d236dada81148f66db8db): ```js allMarkdownRemark(sort: { fields: [frontmatter___title], order: ASC}) allMarkdownRemark(sort: { fields: [frontmatter___date], order: DESC }) allMarkdownRemark(limit: 1000) ``` Gatsby comes with the GraphiQL debugger that allows us to test queries before we add them to our page - the debugger is running under: **http://localhost:8000/___graphql** ![Gatsby Blog Starter](./gatsby_03.png) ```js { allMarkdownRemark(limit: 10, sort: {fields: [frontmatter___date], order: DESC}) { edges { node { frontmatter { path title date } } } } } ``` This query shows the latest 10 Markdown posts in descending order - let's add it to our index page */src/pages/index.js*. First we add the GraphQL query, below the \<IndexPage /\> component: ```js export const pageQuery = graphql` query IndexQuery { allMarkdownRemark(limit: 10 sort: {fields: [frontmatter___date], order: DESC} ) { edges { node { id frontmatter { path title date } } } } } ` ``` Then we inject the data from that query into the \<IndexPage /\> component and loop through it to generate our index: ```js const IndexPage = ({data}) => ( <div> <h1>Hi people</h1> <p>Welcome to your new Gatsby site.</p> <p>Now go build something great.</p> <br/><br/> <Link to="/page-2/">Go to page 2</Link> <br/><br/><br/><br/> <h2>Index</h2> <table> <thead> <tr> <th>Date</th> <th>Link</th> </tr> </thead> <tbody> {data.allMarkdownRemark.edges.map(post => ( <tr key={post.node.id}> <td> {post.node.frontmatter.date} </td> <td> <Link to={post.node.frontmatter.path}> {post.node.frontmatter.title} </Link> </td> </tr> ))} </tbody> </table> </div> ) ``` ![Gatsby Blog Starter](./gatsby_04.png) ## 05 Filters & Sorting With GraphQL We already sorted our results by date and name in the examples above. Now let's add another value to our frontmatter, to decide whether a post is a draft or should already be shown as published. Set the first post to *published: true* and the second one to *published: false* : ``` --- path: '/second-post' title: 'Second Blog Post' date: "2017-10-07" published: false --- ``` Now we just need to configure our GraphQL query to only show post where *published* is set to true: ```js export const pageQuery = graphql` query IndexQuery { allMarkdownRemark( limit: 10 sort: {fields: [frontmatter___date], order: DESC} filter: { frontmatter: { published: {eq: true} }} ) { edges { node { id frontmatter { path title date published } } } } } ` ``` ## 06 Static Serve To serve our website we can use the command: ```bash gatsby build ``` This will build a static version of our React app inside /public. Just upload it to a webserver or to GitHub Pages and you are good to go. **After** building the static page, you can also use the command: ```bash gatsby serve ``` That - unlike our development environment - fires up the webpack server in production mode - that means the files that are served, are the optimized files from the */public* directory! You can access this version on *http://localhost:9000*<file_sep>--- date: "2017-12-31" title: "Securing Elasticsearch with X-Pack" categories: - LINUX - Elasticsearch --- ![Battambang, Cambodia](./photo-11627765794_33a4cf2a0c_o.png) <!-- TOC --> - [Install X-Pack](#install-x-pack) - [Elasticsearch Security](#elasticsearch-security) - [Kibana Security](#kibana-security) - [Enabling Anonymous Access](#enabling-anonymous-access) <!-- /TOC --> In an earlier project we set up Elasticsearch to only be accessible via localhost to protect our data. This worked well, since we build our static website server side in Node/Express/EJS and send ready HTML to our client - already containing the rendered response from the database. But what if our app runs client-side? The official solution is part of the Elastic Extension Pack, which contains a lot of functionality that you might not really need - and comes at a prize that isn't quoted anywhere... That isn't very inviting ~ but lets take a look at it before turning to some free open source alternatives: * [Elastic X-Pack](https://www.elastic.co/downloads/x-pack) * [SearchGuard](https://github.com/floragunncom/search-guard) * [ReadOnlyREST](https://github.com/sscarduzio/elasticsearch-readonlyrest-plugin) ## Install X-Pack X-Pack is a single extension that integrates handy features — security, alerting, monitoring, reporting, graph exploration, and machine learning — you can trust across the Elastic Stack. ### Elasticsearch Security We need to add a user athentication to our Elasticsearch / Kibana setup. We will do this by installing X-Pack. To get started with installing the Elasticsearch plugin, go to _/etc/elasticsearch/_ and call the following function: ``` bin/elasticsearch-plugin install x-pack ``` Now restart Elasticsearch: ``` sudo systemctl stop elasticsearch.service sudo systemctl start elasticsearch.service ``` You can either use the auto function to generate user passwords for Elasticsearch, Kibana (and the not yet installed Logstash): ``` bin/x-pack/setup-passwords auto ``` or swap the _auto_ flag with _interactive_ to use your own user logins. The auto output will look something like this: ``` Changed password for user kibana PASSWORD kibana = *&$*(80gfddzg Changed password for user logstash_system PASSWORD logstash_system = 58#$)<PASSWORD> Changed password for user elastic PASSWORD elastic = jgfisg)#*%&(@*#) ``` __Now every interaction with Elasticsearch or Kibana will require you to authenticate with _username: elastic_ and _password: jgfisg)#*%&(@*#)___ ### Kibana Security Now we repeat these steps with Kibana. First navigate to _/etc/kibana/_ and call the following function: ``` bin/kibana-plugin install x-pack ``` And we have to add the login that Kibana has to use to access Elasticsearch (auto generated above) to the _kibana.yml_ file in _/etc/kibana/_: ``` elasticsearch.username: "kibana" elasticsearch.password: <PASSWORD>" ``` Now restart Kibana: ``` sudo systemctl stop kibana.service sudo systemctl start kibana.service ``` Now navigate your browser _http://localhost:5601/_ and login with the "elastic" user we generated above. ### Enabling Anonymous Access Incoming requests are considered to be anonymous if no authentication token can be extracted from the incoming request. By default, anonymous requests are rejected and an authentication error is returned (status code 401). To allow anonymous user to send search queries (Read access to specified indices), we need to add the following lines to the _elasticsearch.yml_ file in _/etc/elasticsearch/_: ``` xpack.security.authc: anonymous: username: anonymous_user roles: wiki_reader authz_exception: true ``` Now we have to switch to the Kibana webUI on _http://localhost:5601/_ and create the _role:_ *wiki_reader* to allow read access to the wiki indices. First switch to the __Management__ tab and click on user: ![Add a Elasticsearch User with Read Access](./kibana_01.png) Then click on __Add a User__ and add a user with the __watcher_user__ role: ![Add a Elasticsearch User with Read Access](./kibana_02.png) Switch back to the __Management__ tab and click on role: ![Add a Elasticsearch User with Read Access](./kibana_03.png) Click on __Create Role__ and add the name **wiki_reader** that we choose for the role of the anonymous user inside the elasticsearch.yml file, assign the **monitor_watcher** privilege and choose the indices that you want the anonymous user to have __READ__ access to: ![Add a Elasticsearch User with Read Access](./kibana_04.png) Your configuration will be active after restarting Elasticsearch. Now you can use webservices to read from your ES database. But only the __elastic__ user has the privileg to __WRITE__ and to work in Kibana.<file_sep>--- date: "2018-11-28" title: "Node-RED on Android" categories: - IoT - Node-RED --- ![Shenzhen, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> <!-- /TOC --> [Termux](https://termux.com) now allows you to install [Node-RED](https://nodered.org) directly on your Android phone or tablet - turning it into the a powerful smarthome gateway. --- ![INSTAR MQTT on Android](./Node-RED_on_Android_01.jpg) --- Start by installing [Termux](https://termux.com) on your Android device and start the app. Make sure that everything is up-to-date by running `apt update && apt upgrade`. --- ![INSTAR MQTT on Android](./Node-RED_on_Android_02.jpg) --- [Node-RED](https://nodered.org) runs in [Node.js](https://nodejs.org/) which we can now install on our Android device by running the following command: `apt install coreutils nano nodejs`. --- ![INSTAR MQTT on Android](./Node-RED_on_Android_03.jpg) --- Now with [Node.js](https://nodejs.org/) in place we can install [Node-RED](https://nodered.org) with the following command: `npm install -g --unsafe-perm node-red`. --- ![INSTAR MQTT on Android](./Node-RED_on_Android_04.jpg) --- You can start up [Node-RED](https://nodered.org) by typing `node-red`. --- ![INSTAR MQTT on Android](./Node-RED_on_Android_05.jpg) --- Now start your default Android web browser and open `http://localhost:1880` to access the [Node-RED](https://nodered.org) interface.<file_sep>--- date: "2019-06-16" title: "Migrating from vBulletin 5 to Discourse on CentOS 8" categories: - LINUX - Docker - Discourse --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Preparation](#preparation) - [Get a SQL Dump](#get-a-sql-dump) - [Prepare the Docker Container](#prepare-the-docker-container) - [Install Dependencies](#install-dependencies) - [Import the Database](#import-the-database) - [Update](#update) - [Run the Import Script](#run-the-import-script) <!-- /TOC --> ## Preparation Setup your production environment by [following the installation guide](/installing-discourse-on-centos-8). Then go to the Admin section and configure a few settings. 1. Start with backing up your settings (if you already made changes to your installation): ![Discourse Migration on CentOS 8](./Discourse_Migrastion_01.png) 2. The guide mentions that you should change your slug settings - but does not say to what. The default ASCII version works fine for me ¯\\\_(ツ)_/¯ : ![Discourse Migration on CentOS 8](./Discourse_Migrastion_02.png) 3. Enable login_required (recommended. At least to finish the importing): ![Discourse Migration on CentOS 8](./Discourse_Migrastion_03.png) 4. If you enabled `download_remote_images_to_local` you should enable `disable_edit_notifications` to prevent your users from becomming bombarded by edit notifications. ![Discourse Migration on CentOS 8](./Discourse_Migrastion_04.png) ## Get a SQL Dump Export the database dump from your __vBulletin__ server: ```sql mysqldump -u [:user-name] -p [:name-of-database] > vb5_dump.sql ``` Copy the database to your Discourse server. Now we have to get the dumped file into our discourse container. We can do this by using the `docker cp` command: ```bash docker cp vb5_dump.sql mycontainerID:/vb5_dump.sql ``` You can find your Docker container ID with `docker ps -a`. In my case this ID is `8c0066364a7a` and the name of my SQL dump is `Forum_DE_2019-10-31.sql` inside the folder `/oldbackup` - the corresponding command looks like this: ```bash docker cp /oldbackup/Forum_DE_2019-10-31.sql 8c0066364a7a:/Forum_DE_2019-10-31.sql ``` ![Discourse Migration on CentOS 8](./Discourse_Migrastion_07.png) The attachments of your forum can be exported from the SQL database onto the file system: ![Discourse Migration on CentOS 8](./Discourse_Migrastion_10.png) We can also copy this folder to our Discourse host and then copy it into the Discourse container: ```bash docker cp /oldbackup/anhang/. 8c0066364a7a:/vb5-attachments ``` ![Discourse Migration on CentOS 8](./Discourse_Migrastion_11.png) If your forum has custom avatars do the same with them and store them in `./vb5-avatars` inside the container. ## Prepare the Docker Container 1. Start by entering the __Discourse__ container: ```bash cd /opt/discourse ./launcher enter app # Now your are inside the container. ``` 2. Install the MariaDB server: ```bash apt-get update && apt-get install libmariadb-dev mariadb-server-10.3 ``` After finishing installing MariaDB check its status: ```bash sudo service mysql status ``` If MySQl service is not running/active: ```bash sudo service mysql start ``` ![Discourse Migration on CentOS 8](./Discourse_Migrastion_05.png) ## Install Dependencies ```bash echo "gem 'mysql2', require: false" >> /var/www/discourse/Gemfile echo "gem 'php_serialize', require: false" >> /var/www/discourse/Gemfile cd /var/www/discourse echo "discourse ALL = NOPASSWD: ALL" >> /etc/sudoers su discourse -c 'bundle install --no-deployment --without test --without development --path vendor/bundle' ``` ![Discourse Migration on CentOS 8](./Discourse_Migrastion_06.png) ## Import the Database Now we need to create a database inside MariaDB and import the dumped SQL data we copied into the Discourse container: ```bash mysql -uroot -p -e 'CREATE DATABASE vb5' mysql -uroot -p vb5 < /Forum_DE_2019-10-31.sql ``` ![Discourse Migration on CentOS 8](./Discourse_Migrastion_08.png) __You will be asked for a password - just leave it__ _blank_ [MariaDB Default Password](https://mariadb.com/kb/en/library/mysql_secure_installation/). We can verify that our database was created successfully with the following command: ```bash mysqlcheck -c vb5 -u root -p ``` When successful you should now see a list of all tables inside your database: ![Discourse Migration on CentOS 8](./Discourse_Migrastion_09.png) ### Update __Create missing tables that the later import script is requiring__: ``` mysql MariaDB [(none)]> use vb5; MariaDB [vb5]> CREATE TABLE customprofilepic (userid INT(11), customprofileid INT(11), thumbnail VARCHAR(255)); ``` __Make sure that the discourse user has access to the database before running the import script__ ```bash su discourse mysql -u root ERROR 1698 (28000): Access denied for user 'root'@'localhost' ``` ```bash mysql -u root -p *enter your password* UPDATE mysql.user SET plugin = 'mysql_native_password' WHERE User='root'; FLUSH PRIVILEGES; EXIT; ``` ![Discourse Migration on CentOS 8](./Discourse_Migrastion_12.png) * MySQL Native Password Basically, mysql_native_password is the traditional method to authenticate- it is not very secure (it uses just a hash of the password), but it is compatible with older drivers. If you are going to start a new mysql service, you probably want to use the new plugin from the start (and TLS). If you have special needs, you can use other method- you can even program one if you have certain special needs). You can chose a different method for each individual user- for example, your normal applications can use mysql_native_password or the new sha2 one, but you can make sure your admin accounts use a 2-factor authentication token, and unix_socket for a monitoring user gathering statistics on the mysql server. Those other authentication methods may or may not use the password field on the mysql.user table, like the native one does (they may store the password elswhere, or they may not even have a concept of a password!). ```sql ALTER USER 'root'@'localhost' IDENTIFIED WITH mysql_native_password BY '<password>'; ``` and ```sql ALTER USER 'root'@'localhost' IDENTIFIED BY '<password>'; ``` Are essentially the same, mysql_native_password is normally the default authentication method. With WITH you can decide which method to use. For example, if you use GRANT USAGE ON *.* TO root@localhost IDENTIFIED WITH socket_auth, you are setting that user to use unix socket authentication. MariaDB uses a slightly different syntax: VIA unix_socket. Running those command mainly results in an update of the mysql.user table. Note ALTER / GRANT works automatically on next user login, while UPDATEing directly the mysql.user table may require a FLUSH PRIVILEGES, and has some issues on certain scenarios (Galera, etc.). __Activate Public Registration__ If you have activated the __User Approvale__ mode during development it might be a good idea to deactivate it during the import, if you don't want to have to manually review each added user: ![Discourse Migration on CentOS 8](./Discourse_Migrastion_13.png) ## Run the Import Script Create your import script based on the [official Discourse vBulletin5 Script](https://github.com/discourse/discourse/blob/master/script/import_scripts/vbulletin5.rb): ```bash nano script/import_scripts/instarvb5.rb ``` ```ruby # frozen_string_literal: true require 'mysql2' require File.expand_path(File.dirname(__FILE__) + "/base.rb") require 'htmlentities' class ImportScripts::VBulletin < ImportScripts::Base BATCH_SIZE = 1000 DBPREFIX = "vb5." ROOT_NODE = 2 # CHANGE THESE BEFORE RUNNING THE IMPORTER DATABASE = "vb5" TIMEZONE = "Europe/Berlin" ATTACHMENT_DIR = '/vb5-attachments/' AVATAR_DIR = '/vb5-avatars/' def initialize super @old_username_to_new_usernames = {} @tz = TZInfo::Timezone.get(TIMEZONE) @htmlentities = HTMLEntities.new @client = Mysql2::Client.new( host: "localhost", username: "root", database: DATABASE, password: "" ) end ``` Run the importer and wait until the import is done. You can restart it if it slows down. ```bash cd /var/www/discourse su discourse -c 'bundle exec ruby script/import_scripts/instarvb5.rb' ``` You should run the script in a detachable terminal - as it might run for a loooong time. See [tmux](https://mpolinowski.github.io/working-with-tmux) for example. ```bash su discourse -c 'bundle exec ruby script/import_scripts/instarvb5.rb' Loading existing groups... Loading existing users... Loading existing categories... Loading existing posts... Loading existing topics... importing groups... 17 / 17 (100.0%) [2707941 items/min] importing users 20180 / 20180 (100.0%) [706239 items/min] importing top level categories... 5 / 5 (100.0%) [6177924 items/min] importing child categories... importing topics... importing posts... 5 / 5 (100.0%) [290527 items/min] importing attachments... 20182 / 5 (403640.0%) Closing topics... Postprocessing posts... 20182 / 20182 (100.0%) Updating topic status Updating bumped_at on topics Updating last posted at on users Updating last seen at on users Updating topic reply counts... 20181 / 20181 (100.0%) [19985 items/min] ] Updating first_post_created_at... Updating user post_count... Updating user topic_count... Updating topic users Updating post timings Updating featured topic users Updating featured topics in categories 9 / 9 (100.0%) [6433 items/min] ] Updating user topic reply counts 20181 / 20181 (100.0%) [21271 items/min] ] Resetting topic counters Done (00h 02min 06sec) ``` ## SQL Database debugging ```bash sudo service mysql start mysql -h hostname -u root -p mysqladmin -u root -h hostname.blah.org -p password '<PASSWORD>' mysqldump -u root -ppassword --opt >/tmp/alldatabases.sql mysqldump -u username -ppassword --databases databasename >/tmp/databasename.sql mysqldump -c -u username -ppassword databasename tablename > /tmp/databasename.tablename.sql mysql -u username -ppassword databasename < /tmp/databasename.sql ``` ```sql MariaDB [(none)]> create database [databasename]; MariaDB [(none)]> show databases; MariaDB [(none)]> use [db name]; MariaDB [(none)]> show tables; MariaDB [(none)]> describe [table name]; MariaDB [(none)]> drop database [database name]; MariaDB [(none)]> drop table [table name]; MariaDB [(none)]> SELECT * FROM [table name]; MariaDB [(none)]> show columns from [table name]; MariaDB [(none)]> SELECT * FROM [table name] WHERE [field name] = "whatever"; MariaDB [(none)]> SELECT * FROM [table name] WHERE name = "Alex" AND phone_number = '3444444'; MariaDB [(none)]> SELECT * FROM [table name] WHERE name != "Alex" AND phone_number = '3444444' order by phone_number; MariaDB [(none)]> SELECT * FROM [table name] WHERE name like "Alex%" AND phone_number = '3444444'; MariaDB [(none)]> SELECT * FROM [table name] WHERE rec RLIKE "^a$"; MariaDB [(none)]> SELECT [col1],[col2] FROM [table name] ORDER BY [col2] DESC; MariaDB [(none)]> SELECT COUNT(*) FROM [table name]; MariaDB [(none)]> SET PASSWORD FOR 'user'@'hostname' = PASSWORD('<PASSWORD>'); MariaDB [(none)]> DELETE from [table name] where [field name] = 'whatever'; MariaDB [(none)]> FLUSH PRIVILEGES; MariaDB [(none)]> alter table [table name] drop column [column name]; MariaDB [(none)]> alter table [table name] add column [new column name] varchar (20); MariaDB [(none)]> alter table [table name] change [old column name] [new column name] varchar (50); MariaDB [(none)]> Make a column bigger. alter table [table name] modify [column name] VARCHAR(3); MariaDB [(none)]> LOAD DATA INFILE '/tmp/filename.csv' replace INTO TABLE [table name] FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' (field1,field2,field3); ```<file_sep>--- date: "2017-12-17" title: "Gatsby.js with Bootstrap 4" categories: - Javascript - React - Gatsby - Bootstrap --- ![Harbin, China](./photo-33795713253_5b71110d90_o.png) # Gatsby.js & Bootstrap 4 [Github](https://github.com/mpolinowski/gatsby-reactstrap) <!-- TOC --> - [Gatsby.js & Bootstrap 4](#gatsbyjs--bootstrap-4) - [Install Gatsby's command line tool](#install-gatsbys-command-line-tool) - [Using the Gatsby CLI](#using-the-gatsby-cli) - [Install reactstrap](#install-reactstrap) - [Optional Dependencies](#optional-dependencies) - [Import the Components](#import-the-components) - [Testing your build](#testing-your-build) - [Setting up a Webserver](#setting-up-a-webserver) <!-- /TOC --> Let's try to use the static site generator for React - [Gatsby](https://www.gatsbyjs.org) together with the Bootstrap 4 React components from [reactstrap](https://reactstrap.github.io). To get started, I want to reproduce one of the official examples from [getbootstrap.com](http://getbootstrap.com/docs/4.0/examples/album/). ![Gatsby-reactstrap](./_gatsby-reactstrap_01.png) ## Install Gatsby's command line tool ``` npm install --global gatsby-cli ``` ## Using the Gatsby CLI 1. Create a new site. __gatsby new gatsby-reactstrap__ 2. __cd gatsby-reactstrap__ 3. __gatsby develop__ — Gatsby will start a hot-reloading development environment accessible at localhost:8000 ## Install reactstrap ``` npm install [email protected] --save npm install --save reactstrap@next ``` ### Optional Dependencies These libraries are not included in the main distribution file reactstrap.min.js and need to be manually included when using components that require transitions or popover effects (e.g. Tooltip, Modal, etc). * [react-transition-group](https://www.npmjs.com/package/react-transition-group) * [react-popper](https://www.npmjs.com/package/react-popper) ## Import the Components Import Bootstrap CSS in the ./src/layouts/index.jsx file: ```js // import 'bootstrap/dist/css/bootstrap.min.css'; ``` __UPDATE__: The import statement above works fine during development. But the Bootstrap CSS will not be imported when you build your static site - _gatsby build_ You can copy the minified CSS to into the _./src/layouts_ folder and change the import accordingly: ```js import './bootstrap.min.css'; ``` Import required reactstrap components within your custom component files e.g. _./src/components/ReactNavbar.jsx_: ```js import { Collapse, Navbar, NavbarToggler, NavbarBrand, Nav, NavItem, NavLink, Button } from "reactstrap"; ``` And add the react class according the [reactstrap documentation](https://reactstrap.github.io/components/navbar/): ```js export default class ReactNavbar extends React.Component { constructor(props) { super(props); this.toggleNavbar = this.toggleNavbar.bind(this); this.state = { collapsed: true }; } toggleNavbar() { this.setState({ collapsed: !this.state.collapsed }); } render() { return <div> <Navbar color="dark" light> <NavbarBrand to="/" className="mr-auto"> <img src="/static/instar-logo-s.png" /> </NavbarBrand> <NavbarToggler onClick={this.toggleNavbar} className="mr-2" /> <Collapse isOpen={!this.state.collapsed} navbar> <Nav navbar> <NavItem> <Link to="/"> <Button color="primary" block> Indoor Cameras </Button> </Link> </NavItem> <NavItem> <Link to="/page-2/"> <Button color="primary" block> Outdoor Cameras </Button> </Link> </NavItem> <NavItem> <NavLink href="https://github.com/mpolinowski/gatsby-reactstrap" target="_blank"> <Button color="danger" block> Github Repository </Button> </NavLink> </NavItem> </Nav> </Collapse> </Navbar> </div>; } } ``` This component can then be imported into any page or layout you want: ```js import ReactNavbar from '../components/ReactNavbar' [...] <ReactNavbar /> ``` ## Testing your build Stop the development process and type in the following command to build the static version of your React App: ``` gatsby build ``` To quickly check your build, you can use httpster: ``` npm install httpster -g ``` Then run your build on localhost:3000 - e.g. if you have your repository in _E:\gatsby-reactstrap_ - by typing: ``` httpster -p 3000 -d /e/gatsby-reactstrap/public ``` __For Windows User__: I noticed that httpster does not seem to like my [Hyper Terminal](https://hyper.is) - it runs fine in [Git Bash](http://gitforwindows.org). ## Setting up a Webserver I want to use [Express.js](https://expressjs.com) to serve the generated static files: ``` npm install express --save npm install compression --save ``` Add an index.js file to the root directory of your app _./index.js_ and copy in the following code to serve the _./public_ folder on _localhost:8888_: ```js const express = require("express"); const app = express(); const compression = require("compression"); // compress all responses app.use(compression()); app.use(express.static("public")); app.listen(8888, () => console.log("gatsby-reactstrap listening on http://localhost:8888")); ``` Now add a start script to your _./package.json_ file to allow you to start your server by typing __npm start__: ``` [...], "scripts": { "start": "node ./index.js" }, [...] ```<file_sep>--- date: "2019-09-10" title: "Magento 2 Dev Environment with Docker Compose" categories: - Docker - Magento --- ![<NAME>, Cambodia](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Getting Started](#getting-started) - [Download Docker Magento2](#download-docker-magento2) - [Download Magento2 Source Code](#download-magento2-source-code) - [Spinning up your Containers](#spinning-up-your-containers) - [Problems](#problems) - [Reinstalling Docker Compose](#reinstalling-docker-compose) <!-- /TOC --> ## Getting Started ### Download Docker Magento2 Clone the following [repository](https://github.com/fballiano/docker-magento2): ```bash cd /opt sudo git clone https://github.com/fballiano/docker-magento2.git cd /docker-magento2 ``` Then start all containers with `docker-compose` - make sure that you have both [Docker](https://docs.docker.com/engine/install/ubuntu/) and [Docker Compose](https://docs.docker.com/compose/install/) installed on your system. ### Download Magento2 Source Code [Download Magento 2](https://magento.com/tech-resources/download) in any way you want (zip/tgz from website, composer, etc) and extract in the __magento2__ subdirectory of this project - __right next to the docker-compose.yml__! ## Spinning up your Containers ```bash sudo docker-compose up -d ``` ### Problems ```bash sudo docker-compose up -d ERROR: Can't find a suitable configuration file in this directory or any parent. Are you in the right directory? Supported filenames: docker-compose.yml, docker-compose.yaml ``` ### Reinstalling Docker Compose First, confirm the latest version available in their [releases page](https://github.com/docker/compose/releases). At the time of this writing, the most current stable version is `1.26.0`. The following command will download the 1.26.0 release and save the executable file at /usr/local/bin/docker-compose, which will make this software globally accessible as docker-compose: ``` sudo curl -L "https://github.com/docker/compose/releases/download/1.26.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose ``` Set the correct permissions so that the docker-compose command is executable: ```bash sudo chmod +x /usr/local/bin/docker-compose ``` Verify that the installation was successful: ```bash docker-compose --version docker-compose version 1.26.0, build d4451659 ``` Retry to `sudo docker-compose up -d` - it should work now. ```bash docker-compose up -d Creating network "docker-magento2_default" with the default driver Creating volume "docker-magento2_dbdata" with default driver Pulling db (mariadb:)... latest: Pulling from library/mariadb 23884877105a: Pull complete bc38caa0f5b9: Pull complete 2910811b6c42: Pull complete 36505266dcc6: Pull complete ... ``` <file_sep>--- date: "2017-12-13" title: "Getting started with Python Part II" categories: - Python --- ![Port Vila, Vanuatu](./photo-34607491985_e91fa7d4bc_o.png) <!-- TOC depthFrom:2 depthTo:4 --> - [IF Elif Else](#if-elif-else) - [For For-Else](#for-for-else) - [Looping through Arrays](#looping-through-arrays) - [Looping through Strings](#looping-through-strings) - [Working with Ranges](#working-with-ranges) - [While While-Else](#while-while-else) - [Nested Statements](#nested-statements) - [If](#if) - [For](#for) - [While](#while) - [Controlling a Loop](#controlling-a-loop) - [Break](#break) - [Continue](#continue) - [Pass](#pass) - [Exceptions Handling](#exceptions-handling) - [Functions](#functions) - [Arguments](#arguments) - [Namespaces](#namespaces) - [Importing Modules](#importing-modules) - [Custom Module](#custom-module) - [Thirdparty Modules](#thirdparty-modules) - [Working with Files](#working-with-files) - [Reading](#reading) - [Writing](#writing) - [Closing a File](#closing-a-file) - [Regular Expressions](#regular-expressions) - [match()](#match) - [search()](#search) - [findall()](#findall) - [sub()](#sub) - [Object Oriented Programming](#object-oriented-programming) - [Class Inheritance](#class-inheritance) - [Comprehensions](#comprehensions) - [List Comprehensions](#list-comprehensions) - [Set Comprehensions](#set-comprehensions) - [Dictionary Comprehensions](#dictionary-comprehensions) - [Lambda Functions](#lambda-functions) - [Maps](#maps) - [Filter](#filter) - [Iterator](#iterator) - [Generator](#generator) - [Generator Expressions](#generator-expressions) - [Itertools](#itertools) - [Chain](#chain) - [Count](#count) <!-- /TOC --> <br/><br/><hr/> ## IF Elif Else ![Python](./python-basics_01.png) ![Python](./python-basics_02.png) ![Python](./python-basics_03.png) <br/><br/><hr/> ## For For-Else ### Looping through Arrays ![Python](./python-basics_04.png) ### Looping through Strings ![Python](./python-basics_05.png) ### Working with Ranges ![Python](./python-basics_06.png) We can use the `len()` function to assign an index to an array, loop through the elements and print out their by their assigned index: ![Python](./python-basics_07.png) A similar result can be achieved with the `enumerate()` function, that will give us both the index, as well as the value of the array element: ![Python](./python-basics_08.png) We can also add an __Else__ statement to the __For Loop__ that is triggered when the loop reaches the end of the list: ![Python](./python-basics_09.png) <br/><br/><hr/> ## While While-Else ![Python](./python-basics_10.png) ![Python](./python-basics_11.png) <br/><br/><hr/> ## Nested Statements ### If The first __If Statement__ checks if the string has the letter __a__ in it. If true, the second if statement is triggered and checks if the length of the string is greater than 6. Only if both statements are true, we will get a printout of the string itself and it's length: ![Python](./python-basics_12.png) This code can be shortened by using the __And Operator__ instead of the inner if statement: ![Python](./python-basics_13.png) ### For ![Python](./python-basics_14.png) ![Python](./python-basics_15.png) ### While In the following nested __While Loop__ we have __x__ being incremented with each loop until x reaches the value of 10. Each time the first while loop runs, the inner while loop is triggered to run until the value of __z__ reaches 10. This is repeated ten times until __x = 10__: ![Python](./python-basics_16.png) <br/><br/><hr/> ## Controlling a Loop ### Break ![Python](./python-basics_17.png) The loop runs until the if statement becomes __True__ and the break command is executed. ![Python](./python-basics_18.png) In the second example we have a for loop that tries to multiply every element of _list1_ with every element in _list2_. But the __Break__ command stops the execution when the last element of _list2_ is reached: ``` 5 x 1 = 5 5 x 2 => break 6 x 1 = 6 6 x 2 => break 7 x 1 = 7 7 x 2 => break 8 x 1 = 8 8 x 2 => break ``` ### Continue ![Python](./python-basics_19.png) The __Continue__ command is triggered when the second element of _list2_ is reached in the multiplication. But, unlike the __Break__ command, it does not stop the execution of the following multiplication: ``` 4 x 1 = 4 4 x 2 => break 4 x 3 = 12 5 x 1 = 5 5 x 2 => break 5 x 3 = 15 6 x 1 = 6 6 x 2 => break 6 x 3 = 18 ``` ### Pass ![Python](./python-basics_20.png) The pass statement is just a placeholder that does nothing on execution - for debugging reasons or during the early concept phase of development. <br/><br/><hr/> ## Exceptions Handling ![Python](./python-basics_21.png) The function is trying to divide all members of the range by Zero, which would cause ein __Exception Error__ in our code. But because we wrapped our function in a __try/except__ block, the error triggers the code below the _except command_ instead of just crashing the program. If the first function is successful, the exception code is not triggered. To catch all possible errors, just don't specify the error you are looking for - above we only caught `ZeroDivisionError`s. But it is recommended to rather add all possible errors to the block instead, to be able to differentiate between the possible root causes of the error (Catching all errors just gives you a generic error message): ![Python](./python-basics_22.png) For debugging purposes you can also add an __Else Clause__ at the bottom to get a feedback when a function was executed successfully: ![Python](./python-basics_23.png) The code behind a __Finally Clause__ - unlike with __Else__ - is execute wether the function inside the __Try Block__ _was successfully executed or not_. ![Python](./python-basics_24.1.png) ![Python](./python-basics_24.2.png) <br/><br/><hr/> ## Functions Functions are code blocks that are reuseable and follows this general syntax: ![Python](./python-basics_25.png) A function can be given an argument: ![Python](./python-basics_26.png) When creating a variable inside a function, this needs to be returned, to be able to further process it: ![Python](./python-basics_27.png) ### Arguments In the example above, the position of the argument is mapped to according to the parameter definition - so the __first argument__ is assigned to __x__ and the __second__ one becomes the value of __y__. This is called a __Positional Argument__. The function can also be used with __Keyword Arguments__, where the arguments are assigned explicitly: ![Python](./python-basics_28.png) Positional and Keyword arguments can be mixed - but the Keywords always have to follow the Positionals: ![Python](./python-basics_29.png) You can also define a value by a Parameter default: ![Python](./python-basics_30.png) This parameter is used as the default value - note that you can overwrite it by adding a different value as an argument when calling the function: ![Python](./python-basics_31.png) When working with an unknown amount of arguments, you can add an empty tuple as a parameter to your function definition: ![Python](./python-basics_32.png) For __Keyword Arguments__ the expression __**kwargs__ is used: ![Python](./python-basics_33.png) <br/><br/><hr/> ## Namespaces Namespaces allow you to define multiple version of the same function or class, separated by different namespaces they are in. Python comes with 3 types of namespaces: 1. The __built-in__ namespace for built-in functions like `len()`, `max()`, etc. 2. The __global__ namespace contains all functions you define or import into your program. 3. The __local__ namespace is used inside the context of a particular function. ![Python](./python-basics_34.png) <br/><br/><hr/> ## Importing Modules Python comes with a variety of modules that group sets of built-in functions. There are also plenty 3rd party modules / libraries available that can be downloaded and installed. They are brought into your program using the __import statement__: ![Python](./python-basics_35.png) ### Custom Module You can create a custom module by defining them in Python file, e.g. `my_module.py` : ```python my_var = 224 def my_function(): print("The function was called") ``` And importing it with the __import statement__ into the global namespace of your program: ```python import my_module print(my_module.my_var) my_module.my_function() ``` To keep your code light, you can also just import a single element from a module: ```python from my_module import my_function my_function() ``` ### Thirdparty Modules Thirdparty modules can be installed with the [pip package manager](https://packaging.python.org/tutorials/installing-packages/#use-pip-for-installing). For example if we want to install the open source software library for high performance numerical computation [Tensorflow](https://pypi.org/project/tensorflow/), but we want to use the newest [alpha version](https://www.tensorflow.org/alpha) of it: ```bash python -m pip install --upgrade pip pip install tensorflow==2.0.0-alpha0 ``` ![Python](./python-basics_36.png) __Note__: if you get the following error - you have Python 32bit installed. Use [64bit version](https://www.python.org/downloads/windows/) instead: ``` Could not find a version that satisfies the requirement tensorflow==2.0.0-alpha0 (from versions: ) No matching distribution found for tensorflow==2.0.0-alpha0 ``` <br/><br/><hr/> ## Working with Files ### Reading Python comes with the `open()` function that enables you to read the content from files and work with them inside your programs. You can test it by creating a text file, e.g. `test.txt`: ``` Hydrogen Helium Lithium Boron Beryllium Carbon Nitrogen Oxygen Fluorine Neon Sodium Magnesium Aluminium Silicone Phosphorus Sulfur Chlorine Argon ``` And put it next to your Python file that contains your program. You can now use the `open()` method to make the file content accessible to your program, choosing one of the following modes: * r = reading * w = writing * a = appending * b = binary * x = exclusive creation (fails if file already exists otherwise identical to the _w-mode_) * r+, w+, a+ = all allow both writing to a file as well as reading it ![Python](./python-basics_37.png) To iterate over the complete list, we can use the `readlines()` method. This way we can use the complete content of the file in form of a __List__ inside our function: ![Python](./python-basics_38.png) ### Writing As we learnt before, we can use both the `w` or `x` method to create a new file (make sure that you have administrative access to your computer to be allowed to create the file): ```python my_file= open("test_data.txt", "x") my_file.write("Scandium\nTitanium\nVanadium\nChromium\nManganese\nIron\nCobalt\nNickel\nCopper\nZinc\nYttrium\nZirconium\nNiobium\nMolybdenum\nTechnetium\nRuthenium\nRhodium\nPalladium\nSilver\nCadmium") my_file.close() ``` ![Python](./python-basics_39.png) Check the text file - to verify that it now contains the following data: ``` Scandium Titanium Vanadium Chromium Manganese Iron Cobalt Nickel Copper Zinc Yttrium Zirconium Niobium Molybdenum Technetium Ruthenium Rhodium Palladium Silver Cadmium ``` > If you use the `open()` function again on this file, this will overwrite the old content - that is why the the `x method` exist, to prevent accidental data loss. If you need to modify an existing file - without overwriting the existing data - you have to use the `a method` to append your new input: __Appending Data__ ```python my_file= open("test_data.txt", "a") my_file.write("\nHafnium\nTantalum\nTungsten\nRhenium\nOsmium\nIridium\nPlatinum\nGold\nMercury\nRutherfordium\nDubnium\nSeaborgium\nBohrium\nHassium\nMeitnerium\nDarmstadium\nRoentgenium\nCopernecium") my_file.close() ``` ### Closing a File As we have seen, we can use the `close()` to close a file and save our edits to it. To check the state of a file, we can use `my_file.closed` and will either receive a __True__ or __False__. To do a single read or write operation and closing the file automatically afterwards, we can use the following method: ![Python](./python-basics_40.png) <br/><br/><hr/> ## Regular Expressions To work with regular expressions you have to `import re` module into your program. This module offers a couple of methods that we can use: ### match() The `match()` method __compares the beginning of the string__ to your query and returns the match object, if a match was found: ![Python](./python-basics_41.png) ### search() The `search()` method check the entire string for a match. We are going to use a regular expression this time: ```python import re arp = "10.112.78.1 0 ca:fe:ca:fe:b7:ce VLAN#345 L" a = re.search(r"(.+?) +(\d) +(.+?)\s{2,}(\w)*", arp) a ``` * `r` = we are going to analyze the RAW string * First group: `(.+?)` * `.` represent any character, except a new-line character * `+?` means that the previous expression repeats one or more times - the space character after the group `(.+?) SPACE ` represents the stop signal for the expression. The added `?` stops our match before the first space `10.112.78.1` . If you remove the `?` the match will include all spaces until a new character is found `10.112.78.1 SPACE `. ![Python](./python-basics_42.png) * Second group: `SPACE +(\d)` * `SPACE +` this will flexibly match any number of space characters between the IP address from our ARP request and the following number. * `/d` represents a single decimal digit between 0-9 ![Python](./python-basics_43.png) * Third Group `SPACE +(.+?)\s{2,}` * `SPACE +` matches all spaces between the `0` and following MAC address * `(.+?)\s{2,}` the first part `.+?` again matches any character. The `\s{2}` represents 2 repetitions of a white space character (SPACE, TAB or New-Line) - so that this block does not only include the MAC address, but also the following VLAN number that is separated by only 1 white space character. By adding a comma `\s{2,}` we match 2 or more white spaces and `\s{1}` would only take 1 whitespace - separating the MAC address from the VLAN number in the results, as the 1 whitespace between those two would already satisfy the match. ![Python](./python-basics_44.png) * Fourth Group `(\w)*` * `\w` matches any character __a-z__, __A-Z__, __0-9__ and the __\___ underscore * `*` represents 0 or more repetitions of the character before it (`+` matches 1 or more repetition) ![Python](./python-basics_45.png) To output all matches we can use the `group()` method without any argument. Or use the `groups()` method to return the result in form of a tuple: ![Python](./python-basics_46.png) ### findall() Given the same reply to an ARP request, we are now going to use the findall() method to find the IP address inside the string. This method, conveniently already returns it's match in form of a list: ```python import re arp = "10.112.78.223 0 ca:fe:ca:fe:b7:ce VLAN#345 L" a = re.search(r"\d\d\.\d{2,}\.[0-9][0-9]\.[0-9]{1,3}", arp) a ``` * `\d\d` matches 2 single digits * `\.` now we want to find a `.` character - since this character stands for any character in regular expression syntax, we need to escape it with a backslash. * `\d{2}` is just a different way to write `\d\d` - note that you can extend this to `\d{2,}` if you need to accept IP addresses that have 2 or 3 digits inside an octet - as seen in the example. * `\.` another escaped dot character * `[0-9][0-9]` another way to represent 2 digits - but this also accepts __a-z__, __A-Z__ and the __\___ underscore besides numbers. * `\.` another escaped dot character * `[0-9]{1,3}` and yet another way to express that we are looking for a single digit that can occur 1 - 3 times. ![Python](./python-basics_47.png) By surrounding every block in brackets you will get a list of tuples as a result instead - __Note__ that we added a second IP address to the string we are matching: ```python a = re.search(r"(\d\d)\.(\d{2,})\.([0-9][0-9])\.([0-9]{1,3})", arp) ``` ![Python](./python-basics_48.png) ### sub() The `sub()` method substitutes every matching pattern with another string that you provide as an argument: ![Python](./python-basics_49.png) ## Object Oriented Programming Python allows us to use classes and create multiple objects from it: ![Python](./python-basics_50.png) Attributes inside objects can be updated directly: ![Python](./python-basics_51.png) Alternatively zou can also use the `getattr()` or `setattr()` methods to to this operation. To check if an attribute is present inside an object use `hasattr()`. And delete an attribute with `delattr()` ![Python](./python-basics_52.png) To verify what class an object belongs to, use the isinstance() - `isinstance(router2, MyRouter)` would get you a `True` in our case. ### Class Inheritance In the previous example we had a class that did not inherit any attributes from another class - it is convention in Python to add the _default object_ `object` to it in such a case: ```python class MyRouter(object): "A descriptive string for the class" def __init__(self, routername, model, serialno, ios): # class constructor self.routername = routername self.model = model self.serialno = serialno self.ios = ios def print_router(self, manuf_date): # class method print("Router Name: ", self.routername) print("Router Model: ", self.model) print("Router Serial #: ", self.serialno) print("IOS version: ", self.ios) print("Date: ", manuf_date) ``` We can now use our class as parent class for our next example: ```python class MyNewRouter(MyRouter): "A descriptive string for the class" # class constructor def __init__(self, routername, model, serialno, ios, portsno): MyRouter.__init__(self, routername, model, serialno, ios) self.portsno = portsno def print_new_router(self, string): # class method print(string, self.model) ``` ![Python](./python-basics_53.png) ## Comprehensions ### List Comprehensions Syntactically, list comprehensions consist of an iterable containing an expression followed by a for clause. This can be followed by additional for or if clauses. Pythons List Comprehensions allows you to write the following for-loop (`list1`) in a single line (`list2`) - even adding conditional logic (`list3`): ![Python](./python-basics_54.png) ### Set Comprehensions ![Python](./python-basics_55.png) ### Dictionary Comprehensions ![Python](./python-basics_56.png) ## Lambda Functions Python use the lambda keyword to declare an anonymous function. Syntactically they look different, lambda functions behave in the same way as regular functions that are declared using the def keyword. The following are the characteristics of Python lambda functions: ![Python](./python-basics_57.png) * A lambda function can take any number of arguments, but they contain only a single expression. An expression is a piece of code executed by the lambda function, which may or may not return any value. * Lambda functions can be used to return objects. * Syntactically, lambda functions are restricted to only a single expression. ![Python](./python-basics_58.png) ## Maps ![Python](./python-basics_59.png) ## Filter ![Python](./python-basics_60.png) ## Iterator ![Python](./python-basics_61.png) ## Generator ![Python](./python-basics_62.png) ### Generator Expressions ![Python](./python-basics_63.png) ## Itertools ### Chain ![Python](./python-basics_64.png) ### Count ![Python](./python-basics_65.png) <file_sep>--- date: "2020-05-05" title: "GraphQL Queries and Mutations" categories: - Databases - Javascript --- ![<NAME>](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [GraphQL Queries](#graphql-queries) - [Standard Query](#standard-query) - [Aliases](#aliases) - [Fragments](#fragments) - [Variables](#variables) - [Mutations](#mutations) - [Mutations with Variables](#mutations-with-variables) <!-- /TOC --> ## GraphQL Queries ### Standard Query Use the standard query to get all __Posts__ out of your database: 1. Query: ```json query{ posts{ title author{ name } } } ``` 2. Response: ```json "data": { "posts": [ { "title": "First Post", "author": { "name": "<NAME>" } }, { "title": "Second Post", "author": { "name": "<NAME>" } }, { "title": "Third Post", "author": { "name": "<NAME>" } } ] } ``` ### Aliases Use aliases to query for multiple separate posts: 1. Query: ```json query{ firstquery: post(id:2){ title author{ name } } secondquery: post(id:3){ title author{ name } } } ``` 2. Response: ```json "data": { "firstquery": { "title": "Second Post", "author": { "name": "<NAME>" } }, "secondquery": { "title": "Third Post", "author": { "name": "<NAME>" } } } ``` ### Fragments Use fragments to shorten your queries: 1. Query: ```json query{ firstquery: post(id:2){ ...basicPostDetails } secondquery: post(id:3){ ...basicPostDetails } } fragment basicPostDetails on Post{ title author{ name } } ``` 2. Response: ```json "data": { "firstquery": { "title": "Second Post", "author": { "name": "<NAME>" } }, "secondquery": { "title": "Third Post", "author": { "name": "<NAME>" } } } ``` ### Variables Use fragments to shorten your queries - the query variable has to be send alongside, e.g. user clicked on post number two -> `{"postID": 2}`: 1. Query: ```json query($postId: Int!) { post(id:$postId){ ...basicPostDetails } } fragment basicPostDetails on Post{ title author{ name } } ``` 2. Response: ```json { "data": { "post": { "title": "Second Post", "author": { "name": "<NAME>" } } } } ``` ### Mutations Queries retrieve data from your database. A mutation can be used to make changes to your data - e.g. add a post: 1. Query: ```json mutation { addPost( post: { title: "New Post", content: "Post content", author: "sd35xzdfg" }){ title, author: { name } } } ``` 2. Response: ```json { "data": { "post": { "title": "New Post", "author": { "name": "<NAME>" } } } } ``` ### Mutations with Variables With a variable the new post data has to be passed along as a javascript object, e.g.: ```json {"post": { "title": "New Post", "content": "Post content", "author": "sd35xzdfg" } } ``` 1. Query: ```json mutation ($post: PostInput!) { addPost( post: $post){ title, author: { name } } } ``` 2. Response: ```json { "data": { "post": { "title": "New Post", "author": { "name": "<NAME>" } } } } ```<file_sep>--- date: "2019-08-08" title: "MQTT Networks with Homematic" categories: - MQTT - Smarthome - IoT --- ![Central, Hong Kong](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Redmatic](#redmatic) - [Getting Started with Node-RED](#getting-started-with-node-red) - [Homematic Sensors (Motion Detector)](#homematic-sensors-motion-detector) - [Motion Detection](#motion-detection) - [Download the Node-RED Flow](#download-the-node-red-flow) - [System Variables](#system-variables) - [Download the Node-RED Flow](#download-the-node-red-flow-1) <!-- /TOC --> ## Redmatic We already prepared a tutorial on how to install Redmatic - [Node-RED](https://nodered.org) for Homematic - and take control over your camera via it's HTTP Interface. You can find all the information there that you need to connect Homematic sensors to your CCU and how to set up your Homematic software to communicate with your INSTAR IP camera. This time we are going to use the same tools, but will connect them to MQTT interface of your INSTAR Full HD camera. ### Getting Started with Node-RED We have already worked with Node-RED and in conjunction with the __MQTT protocol__ automated our IP Camera. Node-RED is very easy to install on the Homematic control panel using the [RedMatic](https://github.com/HM-RedMatic/RedMatic) addon. RedMatic combines several software components into one CCU add-on, a software package that can be conveniently installed on a Homematic CCU3 as additional software via the WebUI. The Node-RED version installed comes with the CCU nodes for Node-RED, making it possible to implement rules, automation's, scripts and connections of external services and systems for a homematic system in a simple, visual way - and this largely without any programming knowledge. The visualization and control includes RedMatic WebApp and Node-RED Dashboard. RedMatic WebApp is a user interface that can be used immediately without further configuration. 1. First, we need to download the latest version [from Github](https://github.com/HM-RedMatic/RedMatic/releases). You need the file `redmatic-<version>.tar.gz`: --- ![Homematic CCU3](./Homematic_MQTT_01.png) --- 2. In the Homematic WebUI, go to Control Panel Add-on Software: --- ![Homematic CCU3](./Homematic_MQTT_02.png) ![Homematic CCU3](./Homematic_MQTT_03.png) --- 3. Select and install the downloaded file (__can take up to 10min__): --- ![Homematic CCU3](./Homematic_MQTT_04.png) ![Homematic CCU3](./Homematic_MQTT_05.png) --- 4. Then wait until the CCU restarts - patience is required. The installation of RedMatic takes some time. Node-RED will be reachable under `http://<ccu-adresse>/addons/red`. And the Node-RED Dashboard under `http://<ccu-adresse>/addons/red/ui`: --- ![Homematic CCU3](./Homematic_MQTT_06.png) --- __The default login is `Admin` (with a capital `A`) and the admin password on your CCU3!__ ## Homematic Sensors (Motion Detector) ### Motion Detection We now want to use a Homematic Motion Detector to have our Pan&Tilt camera pan to __Preset Position 2__ when a motion is detected. After the Motion Detector falls back to it's idle state, we want our camera to pan back to __Position 1__. 1. We start by dragging a CCU Value Node onto the workspace and double-clicking on it to open the configuration page. 2. Select BidCos-RF as an interface to embed your Homematic PIR motion detector. 3. If you click in the Channel field, you will see all RF devices connected to your Homematic center. When we trained our motion detector to the control panel, we saw that the name of the detector begins with NEQ1545018. We have two entries at our disposal - the first ends at :0 and the second at :1. We'll pick the second one here (if you take the first one and you'll see that there are other variables available, including LOWBAT, which includes a low battery warning that we could include in our Node-RED flows.) 4. As data point we choose Motion. This will always give us a message payload of true once the sensor has detected a movement. --- ![Homematic CCU3](./Homematic_MQTT_07.png) --- The motion sensor will either send a `false` or `true` when his state changes - detected motion triggers the `true` state and after a cooldown it will fall back to `false`. To use this information we will add a __Switch Node__ that separates those two messages: --- ![Homematic CCU3](./Homematic_MQTT_08.png) --- In case that we receive a `true` from our sensor (__Alarm__) we want to change this message payload to `{"val":"1"}`. A `false` signifies that the sensor went back to sleep (__Idle__) and we have to change the message to `{"val":"0"}`: --- ![Homematic CCU3](./Homematic_MQTT_09.png) --- This payload we can now use to trigger a preset on our camera through the MQTT interface. A value of `0` represents the first and a value of `1` the second preset position on our camera. The MQTT topic we have to send this information to is called `features/ptz/preset`. We can use a __MQTT Out__ node to send this status update to our camera: --- ![Homematic CCU3](./Homematic_MQTT_10.png) --- For Node-RED to be able to connect to your camera's MQTT Broker you first have to define the server. Type in your camera's IP address and MQTT port, that you set inside your camera's WebUI. If you choose the regular port (default `1883`) skip the following step. But if you want to use the TLS encryption for your MQTT network, choose the SSL port instead (default `8883`) and click to add your camera's SSL certificate below: --- ![Homematic CCU3](./Homematic_MQTT_11.png) --- Open your camera's webUI and navigate to the MQTT Broker configuration menu under __Features__. There click on the __Download__ button to download the client certificate file from your camera. Back in Node-RED click on __Upload__ to upload the certificate: --- ![Homematic CCU3](./Homematic_MQTT_12.png) --- With the TLS configuration set switch to the __Security__ tab and enter the login you set in your MQTT broker configuration menu: --- ![Homematic CCU3](./Homematic_MQTT_13.png) --- Ok, you are all set. Every time the motion sensor is triggered your camera will now receive a command to go to __Preset Position 2__ and return to __Position 1 when the sensor switches back to idle. We can repeat the same steps for the door sensor we previously added to our Homematic system and send our camera to __Preset Position 3__ to check out who opened the garage door: --- ![Homematic CCU3](./Homematic_MQTT_14.png) --- #### Download the Node-RED Flow You can download the complete flow and import it into your Node-RED admin panel. Copy the JSON code below and, in Node-RED, click on the hamburger menu in the top right. Choose __Import__ and copy the code into the input field. Click on red __Import__ button and drag the flow in position. ```json [{"id":"d6b5f7c8.7183e8","type":"ccu-value","z":"427fc89b.e34038","name":"Motion Detector","iface":"BidCos-RF","channel":"NEQ1545018:1 HM-Sen-MDIR-O-2 NEQ1545018:1","datapoint":"MOTION","mode":"","start":true,"change":true,"cache":false,"queue":false,"on":0,"onType":"undefined","ramp":0,"rampType":"undefined","working":false,"ccuConfig":"38263145.35ea0e","topic":"${CCU}/${Interface}/${channel}/${datapoint}","x":100,"y":100,"wires":[["2471cac0.29f4d6"]]},{"id":"d3434bbe.05fa58","type":"change","z":"427fc89b.e34038","name":"Alarm","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"1\"}","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":380,"y":76,"wires":[["a8958f92.4cb8c"]]},{"id":"a8958f92.4cb8c","type":"mqtt out","z":"427fc89b.e34038","name":"features/ptz/preset","topic":"instar/000389888811/features/ptz/preset","qos":"1","retain":"false","broker":"6df4e12a.0c747","x":556,"y":99,"wires":[]},{"id":"2471cac0.29f4d6","type":"switch","z":"427fc89b.e34038","name":"","property":"payload","propertyType":"msg","rules":[{"t":"true"},{"t":"false"}],"checkall":"true","repair":false,"outputs":2,"x":252,"y":100,"wires":[["d3434bbe.05fa58"],["6197b16.f4d705"]]},{"id":"6197b16.f4d705","type":"change","z":"427fc89b.e34038","name":"Idle","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"0\"}","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":380,"y":127,"wires":[["a8958f92.4cb8c"]]},{"id":"c7056a66.400448","type":"ccu-value","z":"427fc89b.e34038","name":"Door Sensor","iface":"BidCos-RF","channel":"OEQ1985716:1 HM-Sec-SCo OEQ1985716:1","datapoint":"STATE","mode":"","start":true,"change":true,"cache":false,"queue":false,"on":0,"onType":"undefined","ramp":0,"rampType":"undefined","working":false,"ccuConfig":"38263145.35ea0e","topic":"${CCU}/${Interface}/${channel}/${datapoint}","x":90,"y":200,"wires":[["1e91dd74.85f263"]]},{"id":"daa36829.abb308","type":"change","z":"427fc89b.e34038","name":"Alarm","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"2\"}","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":380,"y":176,"wires":[["7de0a4c1.0f702c"]]},{"id":"7de0a4c1.0f702c","type":"mqtt out","z":"427fc89b.e34038","name":"features/ptz/preset","topic":"instar/000389888811/features/ptz/preset","qos":"1","retain":"false","broker":"6df4e12a.0c747","x":556,"y":199,"wires":[]},{"id":"1e91dd74.85f263","type":"switch","z":"427fc89b.e34038","name":"","property":"payload","propertyType":"msg","rules":[{"t":"true"},{"t":"false"}],"checkall":"true","repair":false,"outputs":2,"x":252,"y":200,"wires":[["daa36829.abb308"],["10361ae7.7e6fb5"]]},{"id":"10361ae7.7e6fb5","type":"change","z":"427fc89b.e34038","name":"Idle","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"0\"}","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":380,"y":227,"wires":[["7de0a4c1.0f702c"]]},{"id":"28e6bcd8.29e804","type":"comment","z":"427fc89b.e34038","name":"Homematic Sensors","info":"","x":110,"y":40,"wires":[]},{"id":"38263145.35ea0e","type":"ccu-connection","z":"","name":"localhost","host":"localhost","regaEnabled":true,"bcrfEnabled":true,"iprfEnabled":true,"virtEnabled":true,"bcwiEnabled":false,"cuxdEnabled":false,"regaPoll":true,"regaInterval":"30","rpcPingTimeout":"60","rpcInitAddress":"127.0.0.1","rpcServerHost":"127.0.0.1","rpcBinPort":"2047","rpcXmlPort":"2048"},{"id":"6df4e12a.0c747","type":"mqtt-broker","z":"","name":"IN-9010 Full HD","broker":"192.168.2.165","port":"8883","tls":"95eab325.77ade","clientid":"","usetls":true,"compatmode":false,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","closeTopic":"","closeQos":"0","closePayload":"","willTopic":"","willQos":"0","willPayload":""},{"id":"95eab325.77ade","type":"tls-config","z":"","name":"","cert":"","key":"","ca":"","certname":"instar-cert.cert","keyname":"","caname":"","servername":"","verifyservercert":true}] ``` * You can use this flow with your own sensors - just change the selected sensor in the first node of each flow. --- ![Homematic CCU3](./Homematic_MQTT_15.png) --- ### System Variables We can also use a __System Variable__ to send a MQTT update to our camera. As an example we want to create a variable that `Daheim` (_home_) that we want to trigger every time we enter our our home. When this happens we want the motion detection on our camera's to be deactivated. Lets start by creating this variable by clicking on __Einstellungen__ and __System Variable__. The variable should be `true` when I am at home and `false` when I am away: --- ![Homematic CCU3](./Homematic_MQTT_16.png) --- Now we can switch to Node-RED and drag a __SysVar__ node onto the canvas. Select the `Daheim` variable we just created and check that you want it's value to be emitted on change: --- ![Homematic CCU3](./Homematic_MQTT_17.png) --- The rest of this flow is identical to the 2 we created earlier. We have to distinguish between a message with payload `true` or `false`, change the payload according to the command we want to send and assign the MQTT topic. If you are using the internal PIR sensor of your camera to detect motion, you can use the `alarm/actions/pir/enable` topic to switch it on or off by either sending `{"val":"1"}` or `{"val":"o"}`. __Note__ this works for the camera models IN-8015 FHD and IN-9008 FHD. The camera model IN-9020 FHD uses the `alarm/actions/alarmin` command instead to switch the PIR. --- ![Homematic CCU3](./Homematic_MQTT_18.png) --- #### Download the Node-RED Flow You can download the complete flow and import it into your Node-RED admin panel. Copy the JSON code below and, in Node-RED, click on the hamburger menu in the top right. Choose __Import__ and copy the code into the input field. Click on red __Import__ button and drag the flow in position. ```json [{"id":"e23a5982.cd37b8","type":"ccu-sysvar","z":"427fc89b.e34038","name":"Daheim","ccuConfig":"38263145.35ea0e","topic":"ReGaHSS/${Name}","change":true,"cache":true,"x":80,"y":320,"wires":[["1bc1f6f2.fbf029"]]},{"id":"f36b8a95.38f2b8","type":"change","z":"427fc89b.e34038","name":"home","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"0\"}","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":340,"y":296,"wires":[["91a19433.03d868"]]},{"id":"91a19433.03d868","type":"mqtt out","z":"427fc89b.e34038","name":"alarm/actions/pir/enable","topic":"instar/000389888811/alarm/actions/pir/enable","qos":"1","retain":"false","broker":"6df4e12a.0c747","x":536,"y":319,"wires":[]},{"id":"1bc1f6f2.fbf029","type":"switch","z":"427fc89b.e34038","name":"","property":"payload","propertyType":"msg","rules":[{"t":"true"},{"t":"false"}],"checkall":"true","repair":false,"outputs":2,"x":212,"y":320,"wires":[["f36b8a95.38f2b8"],["da80c0a1.e74de"]]},{"id":"da80c0a1.e74de","type":"change","z":"427fc89b.e34038","name":"away","rules":[{"t":"set","p":"payload","pt":"msg","to":"{\"val\":\"1\"}","tot":"json"}],"action":"","property":"","from":"","to":"","reg":false,"x":340,"y":347,"wires":[["91a19433.03d868"]]},{"id":"38263145.35ea0e","type":"ccu-connection","z":"","name":"localhost","host":"localhost","regaEnabled":true,"bcrfEnabled":true,"iprfEnabled":true,"virtEnabled":true,"bcwiEnabled":false,"cuxdEnabled":false,"regaPoll":true,"regaInterval":"30","rpcPingTimeout":"60","rpcInitAddress":"127.0.0.1","rpcServerHost":"127.0.0.1","rpcBinPort":"2047","rpcXmlPort":"2048"},{"id":"6df4e12a.0c747","type":"mqtt-broker","z":"","name":"IN-9010 Full HD","broker":"192.168.2.165","port":"8883","tls":"95eab325.77ade","clientid":"","usetls":true,"compatmode":false,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","closeTopic":"","closeQos":"0","closePayload":"","willTopic":"","willQos":"0","willPayload":""},{"id":"95eab325.77ade","type":"tls-config","z":"","name":"","cert":"","key":"","ca":"","certname":"instar-cert.cert","keyname":"","caname":"","servername":"","verifyservercert":true}] ```<file_sep>--- date: "2019-06-21" title: "User Login with Facebook for Discourse" categories: - LINUX - Discourse --- ![Bakhtapur, Nepal](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) Go to the [Facebook Developer page](https://developers.facebook.com/apps) Login with the credentials of the account you want to connect to Discourse, click on __Get Started__ and follow the wizard: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_01.png) Select Developer: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_02.png) Provide a name for the app, for example `Discourse Login` and click on __Create App ID__: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_03.png) Click Set Up below __Facebook Login__: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_04.png) From the menu on the left, exit Quickstart by clicking on Settings under Facebook Login: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_05.png) Setup the Valid OAuth redirect URI field, entering e.g. `https://forum.instar.com/auth/facebook/callback`: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_06.png) Navigate to __Settings/Basic__, enter your Discourse URL (`https://forum.instar.com`) in the __App Domains__ field and also enter the URL for your Discourse site privacy policy and Terms of Service in the appropriate fields and also upload the icon of your site: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_07.png) At the bottom of the page click on __+ Add Platform__ and select __Website__: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_08.png) Enter your Discourse URL here, for example `https://forum.instar.com` and click __Save Changes__: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_09.png) Click on the Status button to change your app from in development to public: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_10.png) In Discourse site settings, enter your Facebook app’s __App ID__ and __App Secret__ in the `facebook_app_id` and `facebook_app_secret` fields: ![Using Facebook Login in Discourse](./Use_Facebook_to_Login_to_Discourse_11.png)<file_sep>--- date: "2019-11-07" title: "Building an RTMP Streaming Server with Podman" categories: - LINUX - Smarthome - IoT - Docker --- ![Harbin, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Building the Docker Image](#building-the-docker-image) - [Preparing Ubuntu](#preparing-ubuntu) - [Installing NGINX](#installing-nginx) - [Configuring NGINX to use RTMP](#configuring-nginx-to-use-rtmp) - [Open Broadcaster Software](#open-broadcaster-software) - [Installing ffmpeg](#installing-ffmpeg) - [Create Directories](#create-directories) - [Configuring NGINX](#configuring-nginx) - [Backup Default NGINX Config](#backup-default-nginx-config) <!-- /TOC --> > [This is an update / modified version of this tutorial on obsproject.com](https://obsproject.com/forum/resources/how-to-set-up-your-own-private-rtmp-server-using-nginx.50/) ## Building the Docker Image ### Preparing Ubuntu Start by pulling the latest [Ubuntu Image from DockerHub](https://hub.docker.com/_/ubuntu) and recommit it with a descriptive name: ```bash docker pull ubuntu podman run ubuntu:latest ``` ```bash podman ps -a CONTAINER ID IMAGE COMMAND CREATED cd3d48f2ca99 localhost/ubuntu:latest /bin/bash 7 seconds ago ``` ```bash podman commit --author "<NAME>" cd3d48f2ca99 nginx-rtmp-streamer ``` ```bash podman images REPOSITORY TAG IMAGE ID CREATED SIZE localhost/nginx-rtmp-streamer latest 603c26d12092 30 seconds ago 66.6 MB ``` ```bash podman run \ --name rtmp \ --net=host \ --privileged \ --rm \ -ti -u root localhost/nginx-rtmp-streamer /bin/bash ``` ### Installing NGINX Make sure you have the necessary tools to build `nginx` using the following command:: ```bash apt update && apt upgrade apt install build-essential libpcre3 libpcre3-dev libssl-dev wget unzip zlibc zlib1g zlib1g-dev nano ``` From your home directory, download and unpack the [latest version of the nginx source code](http://nginx.org/en/download.html): ```bash cd ~ wget http://nginx.org/download/nginx-1.16.1.tar.gz tar -zxvf nginx-1.16.1.tar.gz ``` Next, get and unpack the RTMP module source code from git: ```bash wget https://github.com/sergey-dryabzhinsky/nginx-rtmp-module/archive/dev.zip unzip dev.zip ``` Now to building `nginx`: ```bash cd nginx-1.16.1 ./configure --with-http_ssl_module --add-module=../nginx-rtmp-module-dev make make install ``` By default it installs to `/usr/local/nginx`. You can start it with: ```bash /usr/local/nginx/sbin/nginx ``` And to test to make sure `nginx` is running, point your browser to `http://<your server ip>/`: ```bash Welcome to nginx! If you see this page, the nginx web server is successfully installed and working. Further configuration is required. For online documentation and support please refer to nginx.org. Commercial support is available at nginx.com. Thank you for using nginx. ``` ### Configuring NGINX to use RTMP Open your config file, located by default at `/usr/local/nginx/conf/nginx.conf` and add the following at the very end of the file: ```conf rtmp { server { listen 1935; chunk_size 4096; application live { live on; record off; } } } ``` This is an extremely basic configuration with a "live" application that simply forwards the RTMP stream on to whoever requests it. [Here's the whole configuration guide](https://github.com/arut/nginx-rtmp-module/wiki/Directives), which shows you how to forward streams to other places (such as Twitch), save recordings of uploads, output stats, etc. Restart nginx with: ```bash /usr/local/nginx/sbin/nginx -s stop /usr/local/nginx/sbin/nginx ``` ### Open Broadcaster Software ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_01.png) We can now configure the [OBS Studio software](https://github.com/obsproject/obs-studio/wiki/Install-Instructions) to push camera streams to our RTMP server. Create a new profile in OBS, and change your Broadcast Settings: ```bash Streaming Service: Custom Server: rtmp://<your server ip>/live Play Path/Stream Key: test ``` ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_02.png) Save your settings and click on __Start Streaming__: ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_03.png) You can access the RTMP stream with a software like the [VLC Player](https://www.videolan.org/vlc/index.html) via the following URL: ```bash rtmp://<your server ip>/live/test ``` ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_04.png) You should be able to see the broadcasted video stream: ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_05.png) Adding user authentication: ![NGINX RTMP Streaming Server](./NGINX_RTMP_Streaming_Server_06.png) ## Backup Default NGINX Config ```conf #user nobody; worker_processes 1; #error_log logs/error.log; #error_log logs/error.log notice; #error_log logs/error.log info; #pid logs/nginx.pid; events { worker_connections 1024; } http { include mime.types; default_type application/octet-stream; #log_format main '$remote_addr - $remote_user [$time_local] "$request" ' # '$status $body_bytes_sent "$http_referer" ' # '"$http_user_agent" "$http_x_forwarded_for"'; #access_log logs/access.log main; sendfile on; #tcp_nopush on; #keepalive_timeout 0; keepalive_timeout 65; #gzip on; server { listen 80; server_name localhost; #charset koi8-r; #access_log logs/host.access.log main; location / { root html; index index.html index.htm; } #error_page 404 /404.html; # redirect server error pages to the static page /50x.html # error_page 500 502 503 504 /50x.html; location = /50x.html { root html; } # proxy the PHP scripts to Apache listening on 127.0.0.1:80 # #location ~ \.php$ { # proxy_pass http://127.0.0.1; #} # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000 # #location ~ \.php$ { # root html; # fastcgi_pass 127.0.0.1:9000; # fastcgi_index index.php; # fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name; # include fastcgi_params; #} # deny access to .htaccess files, if Apache's document root # concurs with nginx's one # #location ~ /\.ht { # deny all; #} } # another virtual host using mix of IP-, name-, and port-based configuration # #server { # listen 8000; # listen somename:8080; # server_name somename alias another.alias; # location / { # root html; # index index.html index.htm; # } #} # HTTPS server # #server { # listen 443 ssl; # server_name localhost; # ssl_certificate cert.pem; # ssl_certificate_key cert.key; # ssl_session_cache shared:SSL:1m; # ssl_session_timeout 5m; # ssl_ciphers HIGH:!aNULL:!MD5; # ssl_prefer_server_ciphers on; # location / { # root html; # index index.html index.htm; # } #} } # rtmp { # server { # listen 1935; # chunk_size 4096; # # application live { # live on; # record off; # } # } # } ```<file_sep>--- date: "2020-05-01" title: "Getting to Know Neo4j" categories: - Databases - Docker --- ![Mong<NAME>](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Running in Docker](#running-in-docker) - [Cypher](#cypher) - [CREATE](#create) - [MATCH](#match) - [MERGE](#merge) - [WHERE, WITH and AND](#where-with-and-and) - [SET](#set) - [REMOVE](#remove) - [WITH](#with) - [DELETE](#delete) - [NULL](#null) - [Nth Degree Relationships](#nth-degree-relationships) - [Importing Scripts](#importing-scripts) <!-- /TOC --> ## Running in Docker Create the folders Neo4j will need to persists it's data: ```bash mkdir /opt/neo4j/data mkdir /opt/neo4j/logs mkdir /opt/neo4j/import mkdir /opt/neo4j/plugins ``` `chown 101:101` all folders to make them available for your Docker user. Pull and run the latest version of Neo4j from Docker Hub. ```bash docker run \ --name neo4j \ -p7474:7474 -p7687:7687 \ -d \ -v /opt/neo4j/data:/data \ -v /opt/neo4j/logs:/logs \ -v /opt/neo4j/import:/var/lib/neo4j/import \ -v /opt/neo4j/plugins:/plugins \ --env NEO4J_AUTH=<PASSWORD>/<PASSWORD> \ neo4j:latest ``` By default, the Docker image exposes three ports for remote access: * 7474 for HTTP * 7473 for HTTPS * 7687 for Bolt Now head over to `localhost:7474` with your browser and login with `neo4j/password`. ## Cypher ### CREATE Use the __Create__ command to create nodes with properties and relationships. ```bash CREATE(p1: Person{name:'<NAME>'}) -[r:UNIVERSITY_BERKELEY{attr:"Susanne followed Simon"}]-> (p2: Person{name: '<NAME>'}) RETURN p1,p2,r ``` Create two nodes of type __Person__ and the property __Name__ and set them into a __Relationship__, e.g. both persons went to the same university. The return command allows us to see a visualization of the result of our query. ### MATCH Use the __Match__ command to find pattern among your nodes. ```bash MATCH (p1: Person {name:'<NAME>'}), (p2:Person {name: '<NAME>'}) CREATE (p1) -[r:APPLIED_PHYSICS{attr:"Susanne invited Simon"}]-> (p2) RETURN p1,p2,r ``` Find two nodes of type __Person__ with specific __Name__ properties and add a new relationship between them - a shared course at the university. ```bash MATCH (p1:Person {name: '<NAME>'})-[r:APPLIED_PHYSICS]->(p2) RETURN p2 ``` Show me who joined Simon in his AP1 course. ```bash MATCH ()-[r:UNIVERSITY_BERKLY]->(p2) RETURN p2 ``` Show me everyone who was invited into Berkley. ### MERGE Just like the match operator you get the nodes or relationships returned. But unlike match, if the node, property or relationship does not exist it will be created by the merge query: ```bash MATCH (person:Person{name:'<NAME>'}) RETURN person ``` Does not find anything - Merge creates the node for you: ```bash MERGE (person:Person{name:'<NAME>'}) MERGE (bäckerei:Work{name:'<NAME>'}) MERGE (person)-[:Worked_IN]->(bäckerei) RETURN person, bäckerei ``` __ON CREATE SET__ The Merge Operator will either create the node or update it if it already exists. Everything after On Create will only be executed if the node does not yet exists: ```bash MERGE (location:Location{name: 'Linz'}) ON CREATE SET location.created_at = timestamp(), location.created_by = 'GPS100x5' RETURN location ``` __ON MATCH SET__ If we want to update a property every time we get a match: ```bash MERGE (location:Location{name: 'Linz'}) ON CREATE SET location.created_at = timestamp(), location.update_count = 0 ON MATCH SET location.modified_at = timestamp(), location.update_count = (location.update_count + 1) RETURN location ``` How often die <NAME> watch Top Gun? Increment a relationship property every time a query is run: ```bash MATCH (actor:Person{name: '<NAME>'}) MATCH (movie:Movie{title: 'Top Gun'}) MERGE (actor)-[rel:VIEWED]->(movie) ON CREATE SET rel.count = 1 ON MATCH SET rel.count = (rel.count + 1) RETURN actor, movie ``` ![Getting to know Neo4j](./Getting_to_know_Cypher_03.png) ### WHERE, WITH and AND ```bash MATCH (a:Person),(b:Person) WHERE a.name='<NAME>' AND b.name='<NAME>' WITH a,b CREATE (a) -[r:ANALYTICAL_CHEMISTRY]->(b) RETURN a,b,r ``` ### SET Use the __Set__ command to change a property of a node. ```bash MATCH (p:Person{name: '<NAME>'}) SET p.name= '<NAME>' RETURN p ``` __Set__ can also be used to create an property: ```bash MATCH (p:Person {name: '<NAME>'}) SET p.title = 'Freshman' RETURN p ``` Or to set a relationship: ```bash MATCH (director:Person{name:'<NAME>'}), (movies:Movie)<-[rel:DIRECTED]-(director) SET rel.rating = 5 ``` Or to set another label: ```bash MATCH (director:Person{name:'<NAME>'}), (movies:Movie)<-[rel:DIRECTED]-(director) SET director:Hero ``` Setting a relationship to a new type is not supported by Neo4j. But you can create a new relationship and copy over the old properties: ```bash MATCH (actor1:Person{name:'<NAME>'})-[old_rel:HAS_CONTACT]->(actor2:Person{name:'<NAME>'}) CREATE (actor1)-[new_rel:OLD_CONTACT]->(actor2) SET new_rel=old_rel DELETE old_rel RETURN actor1, actor2 ``` ### REMOVE Remove a relationship: ```bash MATCH (director:Person{name:'<NAME>'}), (movies:Movie)<-[rel:DIRECTED]-(director) REMOVE rel.rating ``` Remove a label: ```bash MATCH (director:Person{name:'<NAME>'}), (movies:Movie)<-[rel:DIRECTED]-(director) REMOVE director:Hero ``` ### WITH Set dynamic properties to nodes with the __WITH__ operator: ```bash MATCH (actor:Person{name:"<NAME>"})-[rel:ACTED_IN]->(movie) WITH actor, count(movie) AS num_of_movies SET actor.num_of_movies = num_of_movies RETURN actor ``` ![Getting to know Neo4j](./Getting_to_know_Cypher_02.png) ### DELETE ```bash MATCH (n) DELETE n ``` Only works if Nodes don't have any relations - otherwise those have to be selected and deleted as well. __Delete All__: ```bash MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n,r ``` Select all nodes and relationships and delete the selection. Or short - remove node and cut all relations that were attached to it: ``` MATCH (node) DETACH DELETE node ``` Selective delete: ```bash MATCH (actor:Person{name:'<NAME>'}), (movies:Movie)<-[:ACTED_IN]-(actor) DETACH DELETE movies ``` ### NULL Give me all person nodes that have a earnings property that is not undefined: ```bash MATCH (person:Person) WHERE person.earnings IS NOT NULL RETURN person ``` ### Nth Degree Relationships Get all 1st degree contacts of actor <NAME>: ```bash MATCH (actor:Person{name:'<NAME>'})-[:HAS_CONTACT]->(contact) RETURN actor, contact ``` Get all 2nd degree contacts: ```bash MATCH (actor:Person{name:'<NAME>'})-[:HAS_CONTACT]->(contact)-[:HAS_CONTACT]->(contact2) RETURN actor, contact2 ``` Get the nth degree contacts by simply adding __*n__ to the relationship in question - e.g. get the 5th degree contacts: ```bash MATCH (actor:Person{name:'<NAME>'})-[rel:HAS_CONTACT*5]->(contact) RETURN actor, rel, contact LIMIT 1 ``` Show all 0 - 2nd degree contacts: ```bash MATCH (actor:Person{name:'<NAME>'})-[rel:HAS_CONTACT*0..2]->(contact) RETURN actor, rel, contact LIMIT 5 ``` __PATH__ How many contacts are between <NAME> and <NAME>: ```bash MATCH (neo:Person{name:'<NAME>'}) MATCH (maverick:Person{name:'<NAME>'}) MATCH path=((neo)-[rel:HAS_CONTACT*]->(maverick)) RETURN length(path) LIMIT 1 ``` To find the shortest path limit the degree of contact until no path can be found anymore: ```bash MATCH (neo:Person{name:'<NAME>'}) MATCH (maverick:Person{name:'<NAME>'}) MATCH path=((neo)-[rel:HAS_CONTACT*..2]->(maverick)) RETURN length(path) AS path_length LIMIT 1 ``` The minimum path length is 2. The easier way to find the path length is the __SHORTEST PATH__ function: ```bash MATCH (neo:Person{name:'<NAME>'}) MATCH (maverick:Person{name:'<NAME>'}) MATCH path=shortestPath((neo)-[rel:HAS_CONTACT*..20]->(maverick)) RETURN length(path) AS path_length ``` Since there might be several shortest paths we can use the __allShortestPath__ function to resolve all of them: ```bash MATCH (neo:Person{name:'<NAME>'}) MATCH (maverick:Person{name:'<NAME>'}) MATCH path=allShortestPaths((neo)-[rel:HAS_CONTACT*..20]->(maverick)) RETURN path, length(path) AS path_length ``` Get the shortest path between actors that acted in the movie The Matrix and actors from the movie Top Gun: ```bash MATCH (actor1:Person)-[:ACTED_IN]->(movie1:Movie{title:'Top Gun'}),(actor2:Person)-[:ACTED_IN]->(movie2:Movie{title:'The Matrix'}), path = shortestPath((actor1)-[:HAS_CONTACT*..20]->(actor2)) RETURN path, movie1, movie2, length(path) AS pathLength ORDER BY pathLength LIMIT 2 ``` To exclude path where an actor worked in both movies: ```bash MATCH (actor1:Person)-[:ACTED_IN]->(movie1:Movie{title:'Top Gun'}),(actor2:Person)-[:ACTED_IN]->(movie2:Movie{title:'The Matrix'}), path = shortestPath((actor1)-[:HAS_CONTACT*..20]->(actor2)) WHERE actor1 <> actor2 RETURN path, movie1, movie2, length(path) AS pathLength ORDER BY pathLength LIMIT 1 ``` ![Getting to know Neo4j](./Getting_to_know_Cypher_04.png) ## Importing Scripts Just write down all your Cypher queries into a text file. Add a comment on top to name the script: ```bash // Neo4j Sample Data CREATE(p1: Person{name:'<NAME>'}) -[r:UNIVERSITY_BERKELEY{attr:"Susanne followed Simon"}]-> (p2: Person{name: '<NAME>'}) MATCH (p1: Person {name:'<NAME>'}), (p2:Person {name: '<NAME>'}) CREATE (p1) -[r:APPLIED_PHYSICS{attr:"Susanne invited Simon"}]-> (p2) MATCH (p1:Person {name: '<NAME>'})-[r:APPLIED_PHYSICS]->(p2) MATCH (a:Person),(b:Person) WHERE a.name='<NAME>' AND b.name='<NAME>' WITH a,b CREATE (a) -[r:ANALYTICAL_CHEMISTRY]->(b) MATCH (p:Person{name: '<NAME>'}) SET p.name= '<NAME>' MATCH (p:Person {name: '<NAME>'}) SET p.title = 'Freshman' ``` And drag&drop them into the favorite section of the Neo4j web interface: ![Getting to know Neo4j](./Getting_to_know_Cypher_01.png) You can run all scripts at once by clicking on the play button next to your scripts name. <file_sep>--- date: "2018-11-17" title: "A Collection of Node-RED MQTT Recipes" categories: - IoT - Node-RED - MQTT --- ![Tanna Island, Vanuatu](./photo-34221455520_6e72413b0c_o.jpg) <!-- TOC --> - [Connect to an MQTT Broker](#connect-to-an-mqtt-broker) <!-- /TOC --> We are going to use a [MQTT](https://thenewstack.io/mqtt-protocol-iot/) Broker to test our Node-RED flows under Windows - the MQTT.fx Software can be [downloaded here](https://mqttfx.jensd.de/index.php/download). ## Connect to an MQTT Broker The Node-RED flow that we are going to write in a moment, is going to broadcast `@sensors/livingroom/mqtt07` - so the first step is to subscribe our broker to it: ![MQTT.fx](./node-red-mqtt_01.png) Import the following flow to Node-RED - It uses the __MQTT Input__ input and __MQTT Output__ node and an associated __MQTT Config__ node to connect to an MQTT broker. ```json [{"id":"1be4a3d2.9a1e7c","type":"mqtt in","z":"9bdc093c.fd4b08","name":"","topic":"sensors/livingroom/mqtt07","qos":"2","broker":"47feb3e4.56f11c","x":100,"y":100,"wires":[["7c273733.6783e8"]]},{"id":"7c273733.6783e8","type":"debug","z":"9bdc093c.fd4b08","name":"","active":true,"tosidebar":true,"console":false,"complete":"false","x":290,"y":100,"wires":[]},{"id":"47feb3e4.56f11c","type":"mqtt-broker","z":"","broker":"localhost","port":"1883","clientid":"","usetls":false,"compatmode":true,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","willTopic":"","willQos":"0","willPayload":""}] ``` ![MQTT.fx](./node-red-mqtt_02.png) Triggering the flow will send a JSON Message via the MQTT protocol, that will now be caught by MQTT.fx ![MQTT.fx](./node-red-mqtt_03.png) We can now use the __MQTT Output__ node to publish messages to a topic: ```json [{"id":"9c138886.116928","type":"mqtt out","z":"eda2a949.74ea98","name":"","topic":"sensors/kitchen/mqtt07","qos":"","retain":"","broker":"61de5090.0f5d9","x":430,"y":100,"wires":[]},{"id":"ff654e7f.32e9e","type":"inject","z":"eda2a949.74ea98","name":"temperature","topic":"","payload":"22","payloadType":"num","repeat":"","crontab":"","once":false,"x":230,"y":100,"wires":[["9c138886.116928"]]},{"id":"61de5090.0f5d9","type":"mqtt-broker","z":"","broker":"localhost","port":"1883","clientid":"","usetls":false,"compatmode":true,"keepalive":"60","cleansession":true,"willTopic":"","willQos":"0","willPayload":"","birthTopic":"","birthQos":"0","birthPayload":""}] ``` You can use the __MQTT Input__ node and a __JSON node__ to receive a parsed JSON message: ```json [{"id":"8024cb4.98c5238","type":"mqtt in","z":"eda2a949.74ea98","name":"","topic":"sensors/#","qos":"2","broker":"61de5090.0f5d9","x":260,"y":580,"wires":[["b5098b7f.2361d8"]]},{"id":"15d727dd.33e808","type":"debug","z":"eda2a949.74ea98","name":"","active":true,"console":"false","complete":"false","x":530,"y":580,"wires":[]},{"id":"2aed678c.3de738","type":"mqtt out","z":"eda2a949.74ea98","name":"","topic":"sensors/livingroom/temp","qos":"","retain":"false","broker":"61de5090.0f5d9","x":310,"y":520,"wires":[]},{"id":"3b613a69.a247c6","type":"inject","z":"eda2a949.74ea98","name":"temp json","topic":"","payload":"{\"sensor_id\":1234,\"temperature\":13}","payloadType":"json","repeat":"","crontab":"","once":false,"x":120,"y":520,"wires":[["2aed678c.3de738"]]},{"id":"b5098b7f.2361d8","type":"json","z":"eda2a949.74ea98","name":"","pretty":false,"x":390,"y":580,"wires":[["15d727dd.33e808"]]},{"id":"61de5090.0f5d9","type":"mqtt-broker","z":"","broker":"localhost","port":"1883","clientid":"","usetls":false,"compatmode":true,"keepalive":"60","cleansession":true,"willTopic":"","willQos":"0","willPayload":"","birthTopic":"","birthQos":"0","birthPayload":""}] ``` Use the __MQTT Input__ node to subscribe to messages sent to a topic or a topic pattern: ```json [{"id":"31edb119.efc91e","type":"mqtt in","z":"9bdc093c.fd4b08","name":"","topic":"hello/sensor","qos":"2","broker":"520ad9a0.e248f8","x":80,"y":360,"wires":[["ebdeaa8d.d92938"]]},{"id":"d2e90de6.cdb69","type":"debug","z":"9bdc093c.fd4b08","name":"","active":true,"tosidebar":true,"console":false,"complete":"false","x":350,"y":360,"wires":[]},{"id":"ebdeaa8d.d92938","type":"json","z":"9bdc093c.fd4b08","name":"","pretty":false,"x":210,"y":360,"wires":[["d2e90de6.cdb69"]]},{"id":"a3d8c237.39c09","type":"mqtt in","z":"9bdc093c.fd4b08","name":"","topic":"hello/sensor","qos":"2","broker":"7564ccbd.a16c44","x":80,"y":300,"wires":[["f8f863ee.c6892"]]},{"id":"f8f863ee.c6892","type":"debug","z":"9bdc093c.fd4b08","name":"","active":true,"tosidebar":true,"console":false,"complete":"false","x":230,"y":300,"wires":[]},{"id":"520ad9a0.e248f8","type":"mqtt-broker","z":"","broker":"localhost","port":"1883","clientid":"","usetls":false,"compatmode":true,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","willTopic":"","willQos":"0","willPayload":""},{"id":"7564ccbd.a16c44","type":"mqtt-broker","z":"","broker":"localhost","port":"1883","clientid":"","usetls":false,"compatmode":true,"keepalive":"60","cleansession":true,"birthTopic":"","birthQos":"0","birthPayload":"","willTopic":"","willQos":"0","willPayload":""}] ``` ![MQTT.fx](./node-red-mqtt_04.png) We are now able to receive the MQTT message from our broker in Node-RED: ![MQTT.fx](./node-red-mqtt_05.png)<file_sep>--- date: "2020-02-02" title: "Running Gitlab as Docker Registry" categories: - LINUX - Docker --- ![<NAME>, Cambodia](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Install the Local Docker Registry](#install-the-local-docker-registry) - [Copy an image from Docker Hub to your Registry](#copy-an-image-from-docker-hub-to-your-registry) - [Podman and insecure Registries](#podman-and-insecure-registries) - [Pushing Images into your local Registry](#pushing-images-into-your-local-registry) - [Pulling Images from your local Registry](#pulling-images-from-your-local-registry) - [Enable the Container Registry in Gitlab](#enable-the-container-registry-in-gitlab) <!-- /TOC --> ## Install the Local Docker Registry Use a command like the following to start the [registry container](https://docs.docker.com/registry/deploying/): ```bash podman run -d -p 5000:5000 --restart=always --name registry registry:2 podman ps -a CONTAINER ID IMAGE STATUS PORTS NAMES 1a14df139a2f docker.io/library/registry:2 Up 28 seconds ago 0.0.0.0:5000->5000/tcp registry ``` ### Copy an image from Docker Hub to your Registry Pull the ubuntu:16.04 image from Docker Hub: ```bash podman pull hello-world:latest ``` Tag the image as `localhost:5000/my-world`. This creates an additional tag for the existing image. When the first part of the tag is a hostname and port, Docker interprets this as the location of a registry, when pushing. ```bash podman tag hello-world:latest localhost:5000/my-world podman images REPOSITORY docker.io/gitlab/gitlab-ce latest 8065f4b39790 4 days ago 2.06 GB docker.io/library/registry 2 708bc6af7e5e 3 months ago 26.3 MB localhost:5000/my-world latest bf756fb1ae65 4 months ago 20 kB ``` ### Podman and insecure Registries We can add our local and non-TLS protected Docker Registry through the system-wide registries configuration file. On CentOS 8, that file resides at `/etc/containers/registries.conf`: ```conf # The only valid categories are: 'registries.search', 'registries.insecure', # and 'registries.block'. [registries.search] registries = ['docker.io', 'quay.io'] # If you need to access insecure registries, add the registry's fully-qualified name. # An insecure registry is one that does not have a valid SSL certificate or only does HTTP. [registries.insecure] registries = ['localhost:5000'] ``` Here you can see I have two registries defined under the search header and a single registry defined as an insecure registry - our local registry on port `5000`. The registries under the search header are registries that Podman will search when you try to find an image that is not fully-qualified. ### Pushing Images into your local Registry Push the image to the local registry running at `localhost:5000`: ```bash podman push localhost:5000/my-world Getting image source signatures Copying blob 9c27e219663c done Copying config bf756fb1ae done Writing manifest to image destination Storing signatures ``` ### Pulling Images from your local Registry Remove the locally-cached `hello-world:latest` and `localhost:5000/my-world` images, so that you can test pulling the image from your registry. This does not remove the `localhost:5000/my-world` image from your registry. ```bash podman rmi hello-world:latest podman rmi localhost:5000/my-world podman images ``` Pull the `localhost:5000/my-world` image from your local registry: ```bash podman pull localhost:5000/my-world podman run localhost:5000/my-world ``` ## Enable the Container Registry in Gitlab After the installation is complete, you will have to configure the Registry’s settings in gitlab.yml in order to enable it. ```bash nano /srv/gitlab/data/gitlab-rails/etc/gitlab.yml ``` Scroll down to the __Container Registry__ section and add the following information: ```yml ## Container Registry registry: enabled: true host: centos8.fritz.box port: 5005 api_url: http://localhost:5000/ key: /var/opt/gitlab/gitlab-rails/etc/gitlab-registry.key path: shared/registry issuer: omnibus-gitlab-issuer ``` Your `nano /srv/gitlab/config/gitlab.rb` should contain the Registry URL as well as the path to the existing TLS certificate and key used by GitLab: ```ruby ########################################################################## ## Container Registry settings ##! Docs: https://docs.gitlab.com/ee/administration/container_registry.html ########################################################################### registry_external_url 'http://centos8.fritz.box:5005' ``` Your local registry is now available from inside your repository: ![Running Gitlab as Docker Registry](./Gitlab_Docker_Registry_01.png) <file_sep>--- date: "2019-08-05" title: "MQTT Networks with OpenHAB 2" categories: - MQTT - Smarthome - IoT --- ![Bhaktapur, Nepal](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [OpenHAB Configuration though Paper UI](#openhab-configuration-though-paper-ui) - [MQTT Binding](#mqtt-binding) <!-- /TOC --> ## OpenHAB Configuration though Paper UI The Paper UI is a new interface that helps setting up and configuring your openHAB instance. It does not (yet) cover all aspects, so you still need to resort to textual configuration files. __Add-on management__: Easily install or uninstall [openHAB add-ons](https://www.openhab.org/docs/configuration/%7B%7Bdocu%7D%7D/addons/uis/paper/readme.html): ### MQTT Binding Go to the __Add-Ons__ Menu and click on the __Bindings Tab__. We are going to use the MQTT protocol to communicate with our INSTAR IP camera from OpenHAB. To be able to connect to the [INSTAR MQTT Server](https://wiki.instar.com/Advanced_User/INSTAR_MQTT_Broker) we need to install the [MQTT Binding](https://www.openhab.org/addons/bindings/mqtt/) - make sure that you choose the __Version 2__ (at the moment of writing this is the _2.5.0_ version). --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_1.png) --- Now switch to your OpenHAB Inbox: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_2.png) --- Click on the __+__ icon to add the MQTT Binding: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_3.png) --- Find the MQTT Binding inside the list and select it: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_4.png) --- Click on __Add Manually__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_5.png) --- And choose the __MQTT Broker__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_6.png) --- Add your brokers IP address and the broker port - by default this is __1883__. Note that we are first not going to use [TLS encryption](https://community.openhab.org/t/mqtt-binding-and-ssl/40622). We can add it once we are able to establish the connection: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_7.png) --- Your INSTAR Full HD cameras MQTT login is the one you added in the [MQTT Menu](https://wiki.instar.com/Advanced_User/INSTAR_MQTT_Broker/). Then save your setting by clicking on the blue confirmation button: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_8.png) --- Back in the __Things Menu__ click on __+__ again - this time to add a __Thing__ that we can use for an _PaperUI_ button later on: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_9.png) --- Click on __MQTT Binding__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_10.png) --- And __Add Manually__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_11.png) --- This time we need to add a __Generic MQTT Thing__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_12.png) --- I am going to create switch that activates or deactivates the alarm areas of an INSTAR IN-8015FHD camera. The `Thing ID` represents the MQTT topic that we will have to address to send the command to the selected camera. As __Bridge__ you need to select the MQTT broker you just created. Click on the blue button to confirm: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_13.png) --- Back in the __Thing Menu__ click to edit the __Thing__ you just created: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_14.png) --- Click on the blue __+__ button to add a __Channel__ to the __Thing__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_15.png) --- I am going to use an __ON/Off Switch__. The MQTT topic to go to switch the first alarm area is created by adding the prefix `instar` and your cameras [LAN MAC Address](https://wiki.instar.com/Web_User_Interface/1080p_Series/System/Info/) (e.g. `00:03:89:88:88:11` - without the colons) to the corresponding [INSTAR MQTT topic](https://wiki.instar.com/Advanced_User/INSTAR_MQTT_Broker/) `alarm/area1/enable` -> `instar/000389888811/alarm/area1/enable` for the __COMMAND Topic__ and as message payload we need to add `{"val":"1"}` and `{"val":"0"}` to either switch the area on or off. To set the state of our switch, we can use the __STATUS Topic__ `instar/000389888811/status/alarm/area1/enable`: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_16.png) ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_16a.png) --- In the expanded settings we have to set up the Quality-of-Service `QoS` to deliver every update __At-least-once__. Now click on Save to continue. By clicking on the blue icon in front of the created channel we are now able to link it to a switch in the __OpenHAB PaperUI__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_17.png) --- Click on __Create new item...__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_18.png) --- Click on __Link__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_19.png) --- You can now click on __Control__ to open the __PaperUI__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_20.png) --- Our Alarm Areas can now be switched from the OpenHAB __Paper UI__. Clicking the switch will send the MQTT message to our MQTT server that is going to notify our camera to either switch [ area 1 on or off](https://wiki.instar.com/Web_User_Interface/1080p_Series/Alarm/Areas/). All right that's it - the Binding was successfully added and we are now able to control all of our camera functions with the MQTT interface.<file_sep>--- date: "2018-11-13" title: "Node-RED with IFTTT" categories: - Node-RED - IoT --- import GifContainer from "../../src/components/ImageContainer"; ![Shenzhen, China](./photo-15491736416_6abd8de751_o.png) <!-- TOC --> - [Basic Setup](#basic-setup) - [Node-RED Introduction](#node-red-introduction) - [IFTTT Setting up a Webhook](#ifttt-setting-up-a-webhook) - [Node-RED Contacting the external Webhook](#node-red-contacting-the-external-webhook) - [IFTTT Contacting a Node-RED Endpoint](#ifttt-contacting-a-node-red-endpoint) - [Real-world example](#real-world-example) - [Deactivate the Alarm when I am Home](#deactivate-the-alarm-when-i-am-home) <!-- /TOC --> ## Basic Setup _If This Then That_, also known as [IFTTT](https://ifttt.com/) is a free web-based service to create chains of simple conditional statements, called applets. Build your own applets to connect web services with your INSTAR IP Camera. ### Node-RED Introduction Use your Node-RED Installation under Windows, Linux (e.g. on a Raspberry Pi) to serve as an interface between your camera and the [IFTTT services](https://ifttt.com/discover). Node-RED will serve both to send messages to the IFTTT Webhook API when an event occurs on your local network, as well as provides an HTTP Endpoint for your IFTTT Applets to trigger events on your local network. An example would be a camera triggers an alarm, contacts Node-RED that forwards a message to IFTTT, that in turns sends a message to your smartphone. Or the other way around - your phones GPS detects that you are arriving home, the IFTTT smartphone app contacts the IFTTT server that your local Node-RED installation should be informed to deactivate the motion detection on your INSTAR IP cameras. ## IFTTT Setting up a Webhook 1. First you need to [create an account](https://ifttt.com/join) on IFTTT and [login to your account](https://ifttt.com/login?wp_=1). Then go to the __My Applets__ tab and click on __New Applet__: --- ![IFTTT](./IFTTT_01.png) --- 2. Click on __this__ to create the conditional part of the script - we want to select what needs to happen for this script to be executed: --- ![IFTTT](./IFTTT_02.png) --- 3. As mentioned above, we want to use the [webhook service](https://ifttt.com/services/maker_webhooks) - which is basically just a web address that your Node-RED installation needs to contact to trigger the IFTTT applet. Search for _web_ and select the __Webhooks Service__: --- ![IFTTT](./IFTTT_03.png) ![IFTTT](./IFTTT_04.png) --- 4. Then name the event - this will be part of the URL that we will later have to contact with Node-RED to trigger the event: --- ![IFTTT](./IFTTT_05.png) --- 5. With the trigger set up we now need to assign an IFTTT service we want to trigger. To do this click on __that__: --- ![IFTTT](./IFTTT_06.png) --- 6. Let's use the __Email service__ - when the URL (webhook) for this applet is contacted we want to receive an Email to the address that we used to register with IFTTT: --- ![IFTTT](./IFTTT_07.png) ![IFTTT](./IFTTT_08.png) --- 7. Now we can configure the subject and body text the email should have. Notice the words that are highlighted in white - these are variables that will be filled with data before the mail is sent. __EventName__ will be the name that you choose in step 4 above, __OccuredAt__ will be filled with a time stamp. The __Value 1-3__ below are variables that we can define in Node-RED and send them whenever the event is triggered in our flow - the naming, as well as the amount of variables that you want to add is up to you: --- ![IFTTT](./IFTTT_09.png) --- 8. Confirm your setup and save your applet: --- ![IFTTT](./IFTTT_10.png) --- ### Node-RED Contacting the external Webhook Now we need to set up Node-RED to contact the webhook that we just configured whenever a local event is fired (we will be using a manual inject node to trigger it for now). The flow that we are going to build will consist of the following nodes: --- ![IFTTT](./IFTTT_11.png) --- 1. First, copy and import the following code into the Node-RED Admin Panel: ```json [{"id":"87f67c26.0ff7b","type":"inject","z":"b8ce5e67.23af6","name":"","topic":"","payload":"","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":"","x":90,"y":40,"wires":[["3b378299.bdbede"]]},{"id":"e8acbbcd.763f78","type":"http request","z":"b8ce5e67.23af6","name":"Web Req","method":"POST","ret":"txt","url":"https://maker.ifttt.com/trigger/{{event}}/with/key/API_KEY","tls":"","x":374,"y":40,"wires":[["9fe779e1.db2bb8"]]},{"id":"9fe779e1.db2bb8","type":"debug","z":"b8ce5e67.23af6","name":"","active":true,"tosidebar":true,"console":false,"complete":"false","x":521,"y":40,"wires":[]},{"id":"3b378299.bdbede","type":"function","z":"b8ce5e67.23af6","name":"Req Params","func":"msg.event = \"test_request\";\nmsg.payload = {\"value1\": \"testvalue1\", \"value2\": \"testvalue2\", \"value3\": \"testvalue3\"};\nreturn msg;","outputs":1,"noerr":0,"x":226,"y":40,"wires":[["e8acbbcd.763f78"]]}] ``` 2. The Inject Node is set up to insert an empty string when clicked manually. It follows a __Function Node__ that has to set the __Event Name__ - needed to set the right URL for our IFTTT Webhook and a message payload that carries our three variables __Value 1-3__ that we want to display in the IFTTT Email notification: ```js msg.event = "test_request"; msg.payload = {"value1": "testvalue1", "value2": "testvalue2", "value3": "testvalue3"}; return msg; ``` --- ![IFTTT](./IFTTT_12.png) --- 3. Next we have a __HTTP Request Node__ that sets the URL for our IFTTT Webhook as a __POST__ message: ``` https://maker.ifttt.com/trigger/{{event}}/with/key/API_KEY ``` __Note__ that this is using the __Event Name__ that we set as `msg.event` in the previous __Function Node__. But to identify our IFTTT account we first need to find out our personal __API_KEY__ and add it at the end of the URL: --- ![IFTTT](./IFTTT_13.png) --- 4. To get access to our personal API key we first have to sign in to our IFTTT account and open the following page `https://ifttt.com/services/maker_webhooks/settings`. Copy the alpha-numeric key in the Account Info - e.g. if you find the `URL: https://maker.ifttt.com/use/c3oL9WnAbz8Z08KumJWS`, then `<KEY>` is your API key: --- ![IFTTT](./IFTTT_14.png) --- 5. Copy your API key and replace the `API_KEY` part in the URL for the __HTTP Request Node__ in Node-RED. Then confirm the edit by hitting __Done__ and the __Deploy__ your flow: --- ![IFTTT](./IFTTT_15.png) --- 6. Hit the __Inject Node__ to trigger an event. The __HTTP Request Node__ will contact your IFTTT webhook and you will see a _success message_ outputted into the debug panel from the __Debug Node__ connected to the request node - `Congratulation! You fired the test_request event` - telling you that the IFTTT server was contacted and your Applet was executed: --- ![IFTTT](./IFTTT_16.png) --- 7. Check your Email (the one you used to register your IFTTT account) - you will have received a message from IFTTT that follows the structure we set up in the Applet configuration: --- ![IFTTT](./IFTTT_17.png) --- ### IFTTT Contacting a Node-RED Endpoint 1. We now want to set up a Node-RED sequence that contacts an IFTTT Webhook - just as in the step before - but instead of sending an Email, we want IFTTT to contact our own Webhook in Node-RED and trigger an event there that we can consume in a Flow. __Note__ that this is just a proof of concept - later we want to use one of the many IFTTT services to replace the manual trigger, enabling us to connect the Node-RED webhook to services like _Amazon Alexa_, _Google Home_, _Nest Home Automation_, etc. To set up the manual trigger, we could simply recycle the node sequence we build previously. But to explore a different approach, we are going to install a third-party node called [node-red-contrib-ifttt](https://www.npmjs.com/package/node-red-contrib-ifttt) - that basically just combines the __Function Node__ and __HTTP Request Node__ that we used earlier (__Note__ with this setup, we cannot send custom variables with the message payload - we still would have to add another function node to realize the same sequence we created earlier.): ```json [{"id":"2efd9020.529d9","type":"inject","z":"b8ce5e67.23af6","name":"","topic":"","payload":"","payloadType":"str","repeat":"","crontab":"","once":false,"onceDelay":"","x":92,"y":139,"wires":[["7abb924d.fcfabc"]]},{"id":"7abb924d.fcfabc","type":"ifttt out","z":"b8ce5e67.23af6","eventName":"ifttt_webhook","key":"<KEY>","x":236,"y":139,"wires":[]},{"id":"d<KEY>","type":"ifttt-key","z":""}] ``` --- ![IFTTT](./IFTTT_18.png) --- 2. Double-Click the __IFTT Node__ to add an __Event Name__ - we choose `ifttt_webhook` - and click on the pencil icon to add your __API key__, that we determined earlier: --- ![IFTTT](./IFTTT_19.png) --- 3. Just as in the [previous example](#ifttt-setting-up-a-webhook), we now create an Applet on IFTTT that will use a __Webhook__ to listen to the `ifttt_webhook` event. Go to the __My Applets__ tab on IFTTT and click on __New Applet__. Click on __this__ and choose a __Webhook__ as trigger: --- ![IFTTT](./IFTTT_20.png) --- 4. You have to give this __Webhook__ the __Event Name__ that you choose in Node-RED in the previous step and give it the URL to your Node-RED server. This can be done via the WAN IP address (__Note__ that this usually changes every 24hrs) or via your INSTAR cameras DDNS address (as long as it is on the same network as your Node-RED server!). This is followed by the port used by your Node-RED installation - by default this is the __Port 1880__. __Note__ that this port needs to be forwarded in your router for IFTTT to be able to access it! We complete the URL by adding `/pushalarm/trigger/recording` to identify the Node-RED webhook that we will have to create later. 5. We can also add a message payload to the webhook request by IFTTT - this way we will be able to use the same webhook in Node-RED for more than one IFTTT Applet - e.g. when I leave home __Applet 1__ sends the payload `{"val": "on"}` to activate the Motion Detection on my IP cameras. But if I arrive home and you receive `{"val": "off"}` from __Applet 2__, switch it off again. For this we have to switch to the __POST Method__, choose the __JSON Content Type__ and the value that we need - like `{"val": "on"}` : --- ![IFTTT](./IFTTT_21.png) --- 6. We now created an IFTTT Applet that, when triggered, will try to contact a webhook (__HTTP IN Node__) in our Node-RED installation - make sure that the URL is reachable from the internet before you continue in step 4). Let's create that HTTP Endpoint in our Node-RED Flow: ```json [{"id":"988f4c1b.cfac3","type":"http in","z":"b8ce5e67.23af6","name":"IFTTT Trigger","url":"/pushalarm/trigger/recording","method":"post","upload":false,"swaggerDoc":"","x":86,"y":238,"wires":[["650a9d50.1e9294","df518ee6.55111"]]},{"id":"df518ee6.55111","type":"http response","z":"b8ce5e67.23af6","name":"","x":242,"y":239,"wires":[]},{"id":"650a9d50.1e9294","type":"json","z":"b8ce5e67.23af6","name":"","property":"payload","action":"","pretty":false,"x":242,"y":203,"wires":[["c2531a9f.8b5378"]]},{"id":"c2531a9f.8b5378","type":"debug","z":"b8ce5e67.23af6","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":381,"y":203,"wires":[]}] ``` --- ![IFTTT](./IFTTT_22.png) --- The sequence starts with an __HTTP IN Node__ that expects a __POST Request__ on `/pushalarm/trigger/recording` - that is the URL we added to our IFTTT Applet as the webhook that needs to be contacted. We need to add an empty __HTTP Response Node__ at the end for the webhook to be operational: --- ![IFTTT](./IFTTT_23.png) --- Beside the __HTTP Response Node__, we also connect a __Debug Node__ that receives the payload that we set in the IFTTT Applet - the JSON payload `{"val": "on"}` will be received as a JavaScript Object that can be used by following nodes. We can now test the chain that we build. Clicking on the __Inject Node__ will contact the __IFTTT Webhook__ that __IFTTT Applet__ is listening at. The Applet, when triggered, will then contact the __HTTP IN Node__ in our Node-RED Flow and transmit the JSON payload `{"val": "on"}`, as seen in the debug panel. <GifContainer gifUrl="/assets/gif/IFTTT_24.gif" alt="IFTTT and Node-RED" /> ## Real-world example We are now able to trigger IFTTT Applets from a Node-RED Flow, as well as trigger the Flow through an IFTTT Applet - which opens up the whole variety of [services IFTTT](https://ifttt.com/services) offers to automate our INSTAR IP camera. ### Deactivate the Alarm when I am Home IFTTT offers an [Android](https://play.google.com/store/apps/details?id=com.ifttt.ifttt) and [iOS](https://itunes.apple.com/jp/app/ifttt/id660944635?mt=8) App that can use the sensors on your phone to trigger your IFTTT Applets. For example, create a new Applet, choose __this__ to add a trigger and search for __location__: --- ![IFTTT](./IFTTT_25.png) --- Select the __Location Trigger__ and choose if you want this trigger to go off when you enter an area or when you are leaving it - in the end we will have to add two applets for both cases. They can be combined into one Node-RED sequence later on. Choose _Enter Condition_ for now and point it to your home: --- ![IFTTT](./IFTTT_26.png) --- Now we have to select a __Webhook__ as the __that__ condition - just as we have done in the earlier example. We will create a new __HTTP IN Node__ listening on `/alarm/md/areas` in Node-RED for it - the URL for the webhook will be `https://<DDNS Address>:1880/alarm/md/areas` accordingly. The Applet has to use this URL to __POST__ the `{"val":"off"}` message: --- ![IFTTT](./IFTTT_27.png) --- Repeat those steps for a second Applet that posts `{"val":"on"}`, whenever you leave the area. We can now use a variation of the Alarm Flow that we [already used a couple of times](/Advanced_User/OpenHAB_Home_Automation/IP_Camera_Control/#setting-up-node-red) to switch the alarm areas of our INSTAR IP camera on and off when the message `{"val":"on"}` or `{"val":"off"}` is posted to our __HTTP IN Node__ webhook - you can [download the Flow from here](/Node-RED_Flows/nodered_flow_alarm_areas_IFTTT.json) and add your camera login to the __Login Node__. When the IFTTT Applets triggers our Node-RED webhook, the CGI commands for activating or deactivating the alarm areas will be send to your camera. __Note__ that the commands used here only work with HD and Full HD cameras and you can replace them with every other CGI command you need: <GifContainer gifUrl="/assets/gif/IFTTT_28.gif" alt="IFTTT and Node-RED" /><file_sep>--- date: "2018-01-07" title: "Gatsby Material UI Starter" categories: - Javascript - React - Gatsby - Material-UI --- ![<NAME>, Cambodia](./photo-11627373604_ef9d24436e_o.png) ## How to use Download the example [or clone the repo](https://github.com/mpolinowski/material-ui-gatsby-render-starter) Install it and run: ```bash npm install gatsby-cli npm install npm run develop ``` You can access the hot reloading DEV environment on _http://localhost:8000_. To build your static site run: ```bash npm run build gatsby serve ``` You can access the build page on _http://localhost:9000_. ![Gatsby Material UI Starter](materialui.png) ## The idea behind the example [Material UI](https://material-ui-next.com/) are React components that implement Google's Material Design. [Gatsby](https://github.com/gatsbyjs/gatsby) is a static site generator for React.<file_sep>import GlobalStyle from './global-style' import ResetCSS from './resetcss' export { GlobalStyle, ResetCSS } <file_sep>--- date: "2019-12-03" title: "Podman FTP Server Container" categories: - LINUX - Smarthome - IoT - Docker --- ![Victory Harbour, Hong Kong](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Building the Docker Image](#building-the-docker-image) - [Using the Docker Image](#using-the-docker-image) <!-- /TOC --> ## Building the Docker Image * Create the `Dockerfile` ```dockerfile # Dockerfile for vsftpd on CentOS7 FROM centos:7 MAINTAINER <EMAIL> RUN yum -y update; yum -y install which vsftpd net-tools vsftpd-sysvinit; yum clean all COPY vusers.txt /etc/vsftpd/ RUN db_load -T -t hash -f /etc/vsftpd/vusers.txt /etc/vsftpd/vsftpd-virtual-user.db; rm -v /etc/vsftpd/vusers.txt; \ chmod 600 /etc/vsftpd/vsftpd-virtual-user.db COPY vsftpd.conf /etc/vsftpd/ COPY vsftpd.virtual /etc/pam.d/ RUN mkdir -p /home/vftp/ftpuser; chown -R ftp:ftp /home/vftp EXPOSE 20 21 CMD ["/usr/sbin/vsftpd","-obackground=NO"] ``` We need to create three files before building the image, one for vsftpd virtual users PAM, another vsftpd.conf file and another with the virtual users: * `vsftpd.conf` - Server Configuration ```conf anonymous_enable=NO local_enable=YES virtual_use_local_privs=YES write_enable=YES local_umask=022 pam_service_name=vsftpd.virtual guest_enable=YES user_sub_token=$USER local_root=/home/vftp/$USER chroot_local_user=YES allow_writeable_chroot=YES hide_ids=YES xferlog_enable=YES xferlog_file=/var/log/vsftpd.log ``` * `vsftpd.virtual` - User Configuration ```bash #%PAM-1.0 auth required pam_userdb.so db=/etc/vsftpd/vsftpd-virtual-user account required pam_userdb.so db=/etc/vsftpd/vsftpd-virtual-user session required pam_loginuid.so ``` * `vusers.txt` - User Login ```bash ftpuser mypassword ``` With those 4 files in place run the following command from the same directory: ```bash podman build -t centos-vsftpd . Sending build context to Docker daemon 5.632kB Step 1/10 : FROM centos:7 7: Pulling from library/centos ab5ef0e58194: Already exists Digest: sha256:4a701376d03f6b39b8c2a8f4a8e499441b0d567f9ab9d58e4991de4472fb813c Status: Downloaded newer image for centos:7 ---> 5e35e350aded Step 2/10 : MAINTAINER <EMAIL> ---> Running in 1c61a249529e Removing intermediate container 1c61a249529e ---> 36e28ef538a6 Step 3/10 : RUN yum -y update; yum -y install which vsftpd net-tools vsftpd-sysvinit; yum clean all ---> Running in ae775834a509 Loaded plugins: fastestmirror, ovl Determining fastest mirrors * base: ftp.jaist.ac.jp * extras: ftp.jaist.ac.jp * updates: ftp.jaist.ac.jp Resolving Dependencies --> Running transaction check ... Successfully built 76812707f80e Successfully tagged centos-vsftpd:latest ``` ## Using the Docker Image We can transfer this image to an offline machine by exporting it: ```bash podman save -o centos-vsftpd.docker centos-vsftpd:latest ``` Copy the exported centos-vsftpd.docker file to your offline PC and import it: ```bash podman load -i centos-vsftpd.docker Getting image source signatures Copying blob 1f3e340aabdd done Copying blob b6845f432261 done Copying blob d83f2fa44778 done Copying blob 77b174a6a187 done Copying blob d0419636bcc7 done Copying blob e5dd2d926baa done Copying blob 9721a5c27b58 done Copying config 76812707f8 done Writing manifest to image destination Storing signatures Loaded image(s): localhost/centos-vsftpd:latest ``` Make sure that the FTP user directory exists and can be written to by your podman user: ```bash mkdir /opt/vsftpd mkdir /opt/vsftpd/ftpuser chmod 775 /opt/vsftpd/* ``` And run the container with: ```bash podman images REPOSITORY TAG IMAGE ID CREATED SIZE localhost/centos-vsftpd latest 76812707f80e 2 hours ago 307 MB ``` ``` podman run \ --name vsftpd \ --net=host \ --privileged \ --rm \ -v /opt/vsftpd/ftpuser/:/home/vftp/ftpuser/ \ localhost/centos-vsftpd ``` ![Podman FTP Server Container](./vsftpd_in_Docker_01.png) <file_sep>--- date: "2018-11-24" title: "Node-RED Webhooks" categories: - IoT - Node-RED --- ![<NAME>, Cambodia](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Setting up an Webhook in Node-RED](#setting-up-an-webhook-in-node-red) <!-- /TOC --> In IoT devices it is often necessary to propagate a state from one device to another. A very established way of doing this is by using a HTTP GET request. The receiving device needs to provide a URL that can be contacted by the device that detects the state change - e.g. motion detected. We are going to use Node-RED as our Alarmserver that will be contacted by an IP camera in case of an alarm. ## Setting up an Webhook in Node-RED Start by [importing the following flow](/node-red-flows-and-how-to-import-them) to your Node-RED panel: ```json [{"id":"58bb17f6.7582b8","type":"http response","z":"f90096c5.9db488","name":"","x":430,"y":1040,"wires":[]},{"id":"e422f5bb.9081d8","type":"template","z":"f90096c5.9db488","name":"page","field":"payload","fieldType":"msg","format":"handlebars","syntax":"mustache","template":"<html>\n <head></head>\n <body>\n <h2>Hallo von Node-RED</h2>\n </body>\n</html>","x":310,"y":1040,"wires":[["58bb17f6.7582b8"]]},{"id":"d453a0d1.bfe63","type":"http in","z":"f90096c5.9db488","name":"Webhook","url":"/test-webhook/","method":"get","upload":false,"swaggerDoc":"","x":185,"y":1080,"wires":[["e422f5bb.9081d8","99d441b.1dd64c"]]},{"id":"99d441b.1dd64c","type":"http request","z":"f90096c5.9db488","name":"Go to Position","method":"GET","ret":"txt","paytoqs":false,"url":"http://192.168.2.117/param.cgi?cmd=preset&-act=goto&-number=2&-usr=admin&-pwd=instar","tls":"","persist":false,"proxy":"","authType":"","x":340,"y":1080,"wires":[[]]}] ``` The flow consists of 4 Nodes: ![Node-RED Webhooks and Alarmserver](./Node-RED_Webhooks_01.png) The HTTP entry node on the left gives us our webhook and we have to set the URL here that we want expose to our network - `"url":"/test-webhook/"`. I added some HTML and an HTTP response node to make it an valid URL that can be contacted by external devices and used to inject information to our flow. Connected to our webhook is a HTTP request node that contacts an IP camera (`http://192.168.2.117/`) on our network and sends an [CGI command](https://wiki.instar.com/1080p_Series_CGI_List/) to have it do something - the command `/param.cgi?cmd=preset&-act=goto&-number=2&-usr=admin&-pwd=<PASSWORD>` sends our camera to preset position 3. Here you can use __ANY__ action supported by Node-RED as a response to someone contacting our webhook. To test that our webhook is operational we can call it from our browsers address bar - `http://Node-RED-Server-Address-and-Port/test-webhook/` : ![Node-RED Webhooks and Alarmserver](./Node-RED_Webhooks_02.png) The URL should lead you to the HTML page created by the Node-RED webhook. Once you opened the website the flow is triggered and action that you connected to the hook will be executed. In my case I have a camera start moving into a preset position. Now all you have to do is adding this URL to your sensor device. As an example I will use a second IP camera that, once it detects motion, will contact the first camera and tells it to turn around to see what is happening: ![Node-RED Webhooks and Alarmserver](./Node-RED_Webhooks_03.png)<file_sep>--- date: "2017-12-14" title: "Gatsby.js Knowledgebase" categories: - Javascript - React - Gatsby - Elasticsearch --- ![Tanna Island, Vanuatu](./photo-34445605492_751dc45f2a_o.png) # gatsby-starter-default The default Gatsby starter [Github](https://github.com/mpolinowski/gatsby-wiki) For an overview of the project structure please refer to the [Gatsby documentation - Building with Components](https://www.gatsbyjs.org/docs/building-with-components/) Install this starter (assuming Gatsby is installed) by running from your CLI: ``` gatsby new gatsby-wiki ``` <!-- TOC --> - [gatsby-starter-default](#gatsby-starter-default) - [01 Start your Gatsby development environment](#01-start-your-gatsby-development-environment) - [02 Adding content and Linking Pages](#02-adding-content-and-linking-pages) - [03 Styling your JSX](#03-styling-your-jsx) - [04 Adding Interactive Components](#04-adding-interactive-components) - [05 Importing Components to your Sites](#05-importing-components-to-your-sites) - [06 Passing down Props](#06-passing-down-props) - [Changing Headers](#changing-headers) - [Changing Styles](#changing-styles) - [Setting Default Props](#setting-default-props) - [02 Gatsby Plugins](#02-gatsby-plugins) - [Progress Animation](#progress-animation) - [Offline Support and Manifest](#offline-support-and-manifest) - [03 Single-Page-Application](#03-single-page-application) - [04 GraphQL](#04-graphql) - [05 Adding File Data](#05-adding-file-data) - [06 Working with Markdown](#06-working-with-markdown) - [Post Template for our Markdown Data](#post-template-for-our-markdown-data) - [Nested Routes with Markdown](#nested-routes-with-markdown) - [Creating an Index Page](#creating-an-index-page) - [Catching Links from Markdown](#catching-links-from-markdown) - [07 Adding Material-UI](#07-adding-material-ui) - [08 Adding Elasticsearch](#08-adding-elasticsearch) - [09 Build the Static Page](#09-build-the-static-page) <!-- /TOC --> ## 01 Start your Gatsby development environment Now change into your site directory and run the Gatsby development environment using npm: ``` cd gatsby-wiki npm run development ``` You can now access your website on http://localhost:8000 : ![Gatsby.js Wiki Starter](./gatsby_01.png) ## 02 Adding content and Linking Pages The _/src/pages/index.js_ file contains regular JSX - add any HTML inside the `<div/>` tag to make it appear inside your website (Gatsby is hot-reloading). ```js import React from 'react' import Link from 'gatsby-link' const IndexPage = () => ( <div> <h1>Hi people</h1> <p>Welcome to your new Gatsby site.</p> <p>Now go build something great.</p> <Link to="/page-2/">Go to page 2</Link> </div> ) export default IndexPage ``` You need to import Link from gatsby-link to use the Link Component and link to other pages - above you see the: ```js <Link to="/page-2/">Go to page 2</Link> ``` component, linking our __index.js__ page to another page inside the same folder with the name __page-2.js__. Every js file inside the _/src/pages_ folder will automagically be routed by Gatsby! ![Gatsby.js Wiki Starter](./gatsby_02.png) ## 03 Styling your JSX You can use simply add inline styles to your component, e.g. ```js const IndexPage = () => ( <div style={{color: 'tomato', background: 'blue'}}> <h1>Hi people</h1> <p>Welcome to your new Gatsby site.</p> <p>Now go build something great.</p> <Link to="/page-2/">Go to page 2</Link> </div> ) ``` For some advanced styles check out the Gatsby plugins [Glamor](https://www.gatsbyjs.org/packages/gatsby-plugin-glamor/) or [Styled Components](https://www.gatsbyjs.org/packages/gatsby-plugin-styled-components/). How to install those plugins is explained below - [Gatsby Plugins](#07-gatsby-plugins) . ## 04 Adding Interactive Components React allows you to add interaction to your page - we want to add a counter, set it's state to 0 on load and have two buttons that use onClick events to increment or decrement the state of the counter. We can just add a new file _/src/pages/counter.js_ and link to it from the index page `<Link to="/counter/">Go to Counter</Link>`: ```js import React from 'react' class Counter extends React.Component { constructor() { super() this.state = { count: 0 } } render() { return <div> <h1>Counter</h1> <p>current count: {this.state.count}</p> <button onClick={() => this.setState({ count: this.state.count + 1 })}>plus</button> <button onClick={() => this.setState({ count: this.state.count - 1 })}>minus</button> </div> } } export default Counter ``` ![Gatsby.js Wiki Starter](./gatsby_03.png) ## 05 Importing Components to your Sites So far, we used every file inside the pages directory as a separate site. But React.js allows us to take the default component - that is exported at the bottom of the file - and import it into another page. For example, we could take the `<Counter />` component above and add it to the index page (instead of just linking to it). We just need to add an import line to the beginning of _/src/pages/index.js_: ```js import React from 'react' import Link from 'gatsby-link' import Counter from './counter' ``` And reference the Counter inside the JSX code of index.js, like this: ```js const IndexPage = () => ( <div> <h1>Hi people</h1> <p>Welcome to your new Gatsby site.</p> <p>Now go build something great.</p> <Link to="/page-2/">Go to Page 2</Link><br/><br/> <Counter /> </div> ) ``` ![Gatsby.js Wiki Starter](./gatsby_04.png) ## 06 Passing down Props We can now pass properties, from the parent component, down to the Counter component - e.g. we can change the title of our counter, depending on the page it is displayed on: ### Changing Headers ```js <Counter header="This is the Index Counter" /> ``` The prop header is now available to the render function inside the Counter component. Now we can get different headers for our Counter component, depending on the parent component that called it - awesome! ```js render() { return <div> <h3>{this.props.header}</h3> <p>current count: {this.state.count}</p> <button onClick={() => this.setState({ count: this.state.count + 1 })}>plus</button> <button onClick={() => this.setState({ count: this.state.count - 1 })}>minus</button> </div> } ``` ### Changing Styles The same goes with styles - if we want the header to match the colour scheme of our parent component, we can just pass down a color prop to the Counter component: ```js <Counter header="This is the Index Counter" color="rebeccapurple" /> ``` And add the necessary inline styles in the component itself: ```js render() { return <div> <h3 style={{color: this.props.color}}>{this.props.header}</h3> <p>current count: {this.state.count}</p> <button onClick={() => this.setState({ count: this.state.count + 1 })}>plus</button> <button onClick={() => this.setState({ count: this.state.count - 1 })}>minus</button> </div> } ``` ### Setting Default Props To be able to still open the _localhost:8000/counter_ URL, we now have to define a default prop inside the counter component - the header tag and font colour will be undefined, if there is no parent component passing down props! This can be done by Prop-Types, that we need to install: ``` npm install --save prop-types ``` Now we can import it into _/src/pages/counter.js_ : ```js import React from 'react' import PropTypes from 'prop-types' ``` And define a default value for the header prop below the Counter component (above the export statement): ```js Counter.defaultProps = { header: 'Default Counter', color: 'black' } ``` ## 02 Gatsby Plugins [Plugins](https://www.gatsbyjs.org/docs/plugins/) are Node.js packages that implement Gatsby APIs. They enable you to easily solve common website build problems e.g. setup Sass, add markdown support, process images, etc. ### Progress Animation In this example, we want to use a plugin for [NProgress.js](http://ricostacruz.com/nprogress/) to add a loading animation to our site. You install the [NProgress plugin](https://www.gatsbyjs.org/packages/gatsby-plugin-nprogress/) with npm: ``` npm install --save gatsby-plugin-nprogress ``` Now we have to tell Gatsby to use the plugin by editing (creating if file doesn't exist) the gatsby-config.js file inside the root directory of our app. Coming from the starter template, we already have the react-helmet plugin installed (This plugin is described below: [Page Layout](#08-page-layout)). Now simply add the gatsby-plugin-nprogress to the array: ```js module.exports = { siteMetadata: { title: `Gatsby Wiki`, }, plugins: [ `gatsby-plugin-react-helmet`, { resolve: `gatsby-plugin-nprogress`, options: { // Setting a color is optional. color: `rebeccapurple`, // Disable the loading spinner. showSpinner: false, } } ], } ``` ### Offline Support and Manifest We now want to add a Serviceworker to our site that helps us cache important parts of our application, giving us a certain amount of offline support - as the [Offline Plugin](https://www.gatsbyjs.org/packages/gatsby-plugin-offline/) tells us, we will also install the [Manifest Plugin](https://www.gatsbyjs.org/packages/gatsby-plugin-manifest/) (make sure, that it is listed before the Offline Plugin!). ``` npm install --save gatsby-plugin-manifest npm install --save gatsby-plugin-offline ``` Now we add them to our Gatsby configuration: ```js module.exports = { siteMetadata: { title: `Gatsby Wiki`, }, plugins: [ `gatsby-plugin-react-helmet`, { resolve: `gatsby-plugin-nprogress`, options: { // Setting a color is optional. color: `rebeccapurple`, // Disable the loading spinner. showSpinner: false, } }, { resolve: `gatsby-plugin-manifest`, options: { name: "Gatsby Wiki", short_name: "Gatsby Wiki", start_url: "/", background_color: "white", theme_color: "rebeccapurple", display: "minimal-ui", icons: [ { // Everything in /static will be copied to an equivalent // directory in /public during development and build, so // assuming your favicons are in /static/favicons, // you can reference them here src: `/apple-touch-icon.png`, sizes: `180x180`, type: `image/png`, }, { src: `/favicon.ico`, sizes: `256x256`, type: `image/png`, }, ], }, }, `gatsby-plugin-offline`, ], } ``` ## 03 Single-Page-Application Gatsby offers an easy way to create Single-Page-Applications (__SPA's__) with it's layout feature. You can find the JSX and CSS inside _/src/layout_. The Gatsby Starter, that we are using, already uses a header navbar, that is defined inside the index.js file (and comes with the necessary css). You can see that the app already uses [React-Helmet](https://github.com/nfl/react-helmet) as a Gatsby plugin. This reusable React component will manage all of your changes to the document `<head>`. Helmet takes plain HTML tags and outputs plain HTML tags. The layout defines a `<Header />` component, that - together with the `<Helmet />` component - is used inside the `<TemplateWrapper />` All your content, from the pages that we created so far, is then injected into the Wrapper via the {children} tag. This way, you can create top-navbars, headers, side-navigations and footers, that are then displayed on all of your websites. ## 04 GraphQL We can define some global variables inside gatsby-config.js in the root directory of our app: ```js module.exports = { siteMetadata: { title: `Gatsby Wiki`, author: `<NAME>`, description: `Trying out Gatsby` } } ``` This Data will be available to every page and can be queried usind __GraphQL__. Just add the following GraphQL query to _/src/pages/index.js_, to get a hold of those values: ```js export const query = graphql` query FirstQuery { site { siteMetadata { title author description } } } ` ``` Then we have to inject this `{data}` into the parent component `<IndexPage />`: ```js const IndexPage = ({data}) => ``` Now we are able to query this data inside the component: ```js <h1>{data.site.siteMetadata.description}</h1> ``` Why is it __data.site.siteMetadata__? Gatsby's graphql debugger is running at `http://localhost:8000/___graphql` you can also use it to test your queries and see how the results look. Just open the debugger and try out our previous query: ![Gatsby.js Wiki Starter](./gatsby_05.png) ## 05 Adding File Data With Gatsby you can use GraphQL to query Data from your files directly. Transformer plugins transform File nodes into various other types of data e.g. [gatsby-transformer-json](https://www.gatsbyjs.org/packages/gatsby-transformer-json/) transforms JSON files into JSON data nodes and [gatsby-transformer-remark](https://www.gatsbyjs.org/packages/gatsby-transformer-remark/) transforms markdown files into MarkdownRemark nodes from which you can query an HTML representation of the markdown. In this case we will use [gatsby-source-filesystem](https://www.gatsbyjs.org/packages/gatsby-source-filesystem/) to create file nodes from our file system. ``` npm install --save gatsby-source-filesystem ``` After installation, add the plugin to gatsby-config.js. You can have multiple instances of this plugin to read source nodes from different locations on your filesystem. The following sets up the Jekyll pattern of having a _pages_ directory for __Markdown files__ and a _data_ directory for __.json__, __.yaml__, __.csv__.: ```js { resolve: `gatsby-source-filesystem`, options: { name: `pages`, path: `${__dirname}/src/pages/`, }, }, { resolve: `gatsby-source-filesystem`, options: { name: `data`, path: `${__dirname}/src/data/`, }, } ``` You can now open the GraphiQL debugger put in curly brackets - when you start typing allFiles, it should offer autocompletion. Just press enter to accept and __CTRL + ENTER__ again to fill out the query for all page ID's: ``` { allFile { edges { node { id } } } } ``` ![Gatsby.js Wiki Starter](./gatsby_06.png) When you delete _id_ and press __CTRL + SPACE__, you will be given a drop down menu with all options that you can query: ![Gatsby.js Wiki Starter](./gatsby_07.png) Using the _parent_, _children_ and _relativePath_ attribute enables you to create e.g. a breadcrumb navigation: ![Gatsby.js Wiki Starter](./gatsby_08.png) We can now add a GraphQL query to _/src/pages/page-2.js_ to loop through all of our pages and display some data: ```js export const query = graphql` query MyFilesQuery { allFile { edges { node { relativePath prettySize extension birthTime(fromNow: true) } } } } ` ``` Don't forget to inject the `{data}` to the page component: ```js const SecondPage = ({data}) => ``` Now we can add some JSX that loops through all of our files and outputs the information inside a `<table>` ```js <table> <thead> <tr> <th>relativePath</th> <th>prettySize</th> <th>extension</th> <th>birthTime</th> </tr> </thead> <tbody> {data.allFile.edges.map(({node}, index) => <tr key={index}> <td> {node.relativePath} </td> <td> {node.prettySize} </td> <td> {node.extension} </td> <td> {node.birthTime} </td> </tr> )} </tbody> </table> ``` ![Gatsby.js Wiki Starter](./gatsby_09.png) ## 06 Working with Markdown Now we are able to access information about all of our pages. But as mentioned, in the beginning of the last paragraph, we are also able to use __Gatsby Transformer Plugins__ to look into files and make their content available to GraphQL. In this case we want to use Markdown files and transform them, to be able to display their content in our website. The Transformer Plugin needed for this is [gatsby-transformer-remark](https://www.gatsbyjs.org/packages/gatsby-transformer-remark/). First we need to install the plugin: ``` npm install --save gatsby-transformer-remark ``` And add it to our _gatsby-config.js_: ```js plugins: [ `gatsby-transformer-remark`, ] ``` Then create a markdown page inside _/src/pages/FirstMDpost/index.md_ that contains some __FrontMatter__ (metadata in the beginning of the file, that can later be queried by GraphQL) and some text: ``` --- path: '/md-posts' title: 'My first Post' date: '2017-10-05' author: '<NAME>' chapter: 'Index' --- # This is my first mardown Post! ``` Now we have Markdown available in GraphQL - as before, just start typing allMardownRemark (ENTER autocompletes) and then press __CTRL + ENTER__ to complete your query: ![Gatsby.js Wiki Starter](./gatsby_10.png) Now we can query for the FrontMatter as well as the MD-to-HTML transformed content of each MD file we add to our pages folder: ![Gatsby.js Wiki Starter](./gatsby_11.png) ### Post Template for our Markdown Data The markdown represents the data that is going to be displayed. But now we need to create a style template that is used with this data. Lets start by adding a new folder inside _/src_ called templates. Now add a file to it called __post.js__ that will contain the structure template for every post entry. The file contains the JSX markup for our post: ```js import React from 'react' export default function Template({data}) { const {markdownRemark: post} = data return ( <div> <h1>{post.frontmatter.title}</h1> <div dangerouslySetInnerHTML={{__html: post.html}} /> </div> ) } export const postQuery = graphql` query BlogPostByPath($path: String!) { markdownRemark(frontmatter: { path: { eq: $path} }) { html frontmatter { path title } } } ` ``` The `<Template />` component receives `{data}` props, that are retrieved by an GraphQL query. The query looks for a markdown post, where the called URL equals the $path given inside it's frontmatter. So if the URL that you type into your browser was _/md-posts_, a markdown file with a path: _/md-posts_ inside it's frontmatter, would be a hit. The query then uses the markdownRemark plugin to transform the post markdown to HTML and make both the path and title from it's frontmatter available inside `{data}`, that is passed down into the <Template /> component and then rendered. Gatsby is already configured to route all pages inside /src/pages as pages for our website. But now we have to register our posts, that are from the markdown files and the post.js template. To do this, we have to create a file named __gatsby-node.js__ inside the root directory of our app. We are going to use the [createPages Gatsby API](https://www.gatsbyjs.org/docs/node-apis/#createPages) to create pages from our post template: ```js const path = require('path'); exports.createPages = ({boundActionCreators, graphql}) => { const {createPage} = boundActionCreators; // const createPage = boundActionCreators.createPage; const postTemplate = path.resolve('src/templates/post.js'); return graphql(`{ allMarkdownRemark { edges { node { html id frontmatter { path title } } } } }`) .then(res => { if(res.errors) { return Promise.reject(res.errors); } res.data.allMarkdownRemark.edges.forEach(({node}) => { createPage({ path: node.frontmatter.path, component: postTemplate }) }) }) } ``` Save and restart your app - then open _http://localhost:8000/md-posts_ inside your web browser - Voila` ! ### Nested Routes with Markdown To create children post for the _./src/pages/FirstMDpost/index.md_ file, we can simply add more files to the folder and define nested routes inside their frontmatter - e.g. _./src/pages/FirstMDpost/myfirstpost.md_: ``` --- path: '/md-posts/first-post' title: 'First Blog Post' date: '2017-10-05' author: '<NAME>' chapter: 'Markdown Posts' --- # This is my first markdown Post! ``` and _./src/pages/FirstMDpost/mysecondpost.md_: ``` --- path: '/md-posts/second-post' title: 'Second Blog Post' date: '2017-10-05' author: '<NAME>' chapter: 'Markdown Posts' --- # A dive into Markdown Syntax ``` They will be accessible via _http://localhost:8000/md-posts/first-post_ and _http://localhost:8000/md-posts/second-post_ respectively. ### Creating an Index Page We can now use GraphQL to retrieve all of our Markdown pages and apply filter to them. For this test, we will just add the a table to our start page, showing the last 10 posts (I know we only made 3 so far...), we want to order them descending by date and only display pages that are inside the _chapter: 'Markdown Posts'_, which will exclude our _index.md_: ```js const IndexPage = ({data}) => ( <div> <h2>Markdown Index</h2> <p>The table below sorts out all Markdown pages that are not inside the "Markdown Posts" chapter - as defined inside their frontmatter. It also applies a filter, to only display the latest 10 posts. Click on here to display &nbsp; <Link to="/md-posts/"> all Markdown pages </Link> .</p> <table> <thead> <tr> <th>Date</th> <th>Link</th> </tr> </thead> <tbody> {data.allMarkdownRemark.edges.map(post => ( <tr key={post.node.id}> <td> {post.node.frontmatter.date} </td> <td> <Link to={post.node.frontmatter.path}> {post.node.frontmatter.title} </Link> </td> </tr> ))} </tbody> </table> </div> ) export const pageQuery = graphql` query IndexQuery { allMarkdownRemark(limit: 10 sort: {fields: [frontmatter___date], order: DESC} filter: { frontmatter: { chapter: {eq: "Markdown Posts"} }} ) { edges { node { id frontmatter { path title date } } } } } ` ``` ### Catching Links from Markdown Once you start adding links inside your Markdown files, you will notice that clicking them will reload your application - which isn't good :( But no worries here is [gatsby-plugin-catch-links](https://www.gatsbyjs.org/packages/gatsby-plugin-catch-links/) coming to your rescue! And the nice thing about it - you install it, add to your Gatsby plugins inside _./gatsby-config.js_ and it just works: ``` npm install --save gatsby-plugin-catch-links ``` ```js // In your gatsby-config.js plugins: [ `gatsby-plugin-catch-links`, ] ``` Sweet! ## 07 Adding Material-UI To make our life easier, we want to include ready-to-use material design components from the guys @ [Material-UI](https://material-ui-next.com/getting-started/installation/). We are going to install the beta version of v.1.0.0 - which also requires the [Roboto Fontface](https://material-ui-next.com/style/typography/#general) and the [Material-UI Icons](https://www.npmjs.com/package/material-ui-icons): ``` npm install material-ui@next --save npm install typeface-roboto --save npm install material-ui-icons --save ``` We can now easily import Material-UI components into our app: ```js import React from 'react' import { render } from 'react-dom' import Button from 'material-ui/Button' import 'typeface-roboto' function AppWithButton() { return ( <Button> Hello World </Button> ); } render(<AppWithButton />, document.querySelector('#app')); ``` ## 08 Adding Elasticsearch One of the pre-requisites for this project is, that we need to create a lightning-fast interface for [our ElasticSearch Index](https://github.com/mpolinowski/express-static/tree/master/elasticsearch). We already build the [ES6 Class component](https://github.com/mpolinowski/elasticsearch-react-example) for it. And adding it to Gatsby / Material-UI turned out to be surprisingly straight-forward. First, add _./src/pages/search/jsx_ and modify the [ElasticSearch Component](https://github.com/mpolinowski/elasticsearch-react-example) to play nice with our UI: ```js import React, { Component } from 'react' import Link from 'gatsby-link' import elasticsearch from 'elasticsearch' import { withStyles } from 'material-ui/styles' import Grid from 'material-ui/Grid' import Button from 'material-ui/Button' import ResultCards from '../components/ResultCards' const connectionString = 'localhost:9200' const _index = 'wiki2_de_2017_09_09' const _type = 'article' let client = new elasticsearch.Client({ host: connectionString, log: "trace" }) const rootStyle = { flexGrow: 1, marginTop: 30, } export class Search extends Component { constructor(props) { super(props) this.state = { results: [] }; this.handleChange = this.handleChange.bind(this) } handleChange(event) { const search_query = event.target.value; client.search({ index: _index, type: _type, body: { query: { multi_match: { query: search_query, fields: ['title^100', 'tags^100', 'abstract^20', 'description^10', 'chapter^5', 'title2^10', 'description2^10'], fuzziness: 1, }, }, }, }).then(function(body) { this.setState({ results: body.hits.hits }); }.bind(this), function(error) { console.trace(error.message); } ); } render() { return ( <div className="container"> <input type="text" onChange={this.handleChange} /> <SearchResults results={this.state.results} /> </div> ); } } const SearchResults = ({results}) => ( <div className="search_results"> <br/><hr/> <div className={rootStyle}> <Grid container spacing={24}> {results.map((result , i) => <ResultCards key={i} image={result._source.image} title={result._source.title2} link={result._source.link} abstract={result._source.abstract}/> )} </Grid> </div> <br/><br/><Link to="/" style={{ textDecoration: 'none' }}><Button raised color="primary">Go back to the homepage</Button></Link> </div> ) export default Search ``` The `<SearchResults />` component iterates over the Material UI card inside `<ResultCards />`: ```js import React from 'react' import Link from 'gatsby-link' import Card, { CardActions, CardContent, CardMedia } from 'material-ui/Card' import Button from 'material-ui/Button' import Typography from 'material-ui/Typography' import Grid from 'material-ui/Grid' const ResultCards = ({image, title, abstract, link}) => ( <Grid item xs={12} sm={6} lg={4}> <Card style={{ maxWidth: 345 }}> <CardMedia style={{ height: 200 }} image={image} title={abstract} /> <CardContent> <Typography type="headline" component="h4" style={{ minHeight: 60, marginBottom: "10px" }}> {title} </Typography> <Typography component="p" style={{ minHeight: 50, marginBottom: "10px" }}> {abstract} </Typography> </CardContent> <CardActions> <Link to={link} style={{ textDecoration: 'none' }}> <Button dense color="primary"> Read </Button> </Link> <Button dense color="primary"> Learn More </Button> </CardActions> </Card> </Grid> ) export default ResultCards ``` and adds the results from the ElasticSearch JSON response - giving us a nice responsive card grid (the images used below are not inside this repository - just add a few PNG files (597x382) to _./public/images/Search_, named according to the image URL defined inside [our ElasticSearch Index](https://github.com/mpolinowski/express-static/tree/master/elasticsearch): ![Gatsby.js Wiki Starter](./gatsby_12.png) ## 09 Build the Static Page We now want to move our website from the development environment to our webserver. Gatsby offers us a simple command to build render our React.js page into a static website: ``` npm run build ``` You can find the output inside the _/public_ folder of your Gatsby App.<file_sep>--- date: "2020-06-17" title: "Salt Pillars & Formulas" categories: - LINUX --- ![Guangzhou, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Working with Secure Data](#working-with-secure-data) - [Working with Formulas](#working-with-formulas) <!-- /TOC --> ## Working with Pillars Pillars in Salt is arbitrary, minion-specific data. There is a large volume of [Pillar Modules](https://docs.saltstack.com/en/master/ref/pillar/all/index.html) available to pull this data into Salt from external sources. Instructions on how to pull this data in is stored in `*.sls` files. Start by creating the directory `/srv/pillar` and add the following files: __top.sls__ ```yaml base: '*': - name ``` The `Top File` sets the permission - what minions have access to a specific file. In this case all minions will have access to the file `name.sls`: __name.sls__ ```yaml {% if grains.os == 'Ubuntu' %} name: Ubuntu Rocks {% elif grains.os == 'Centos' %} name: CentOS Rocks {% endif %} ``` We can clean this up by using a dictionary: ```yaml {% set lookup = { 'Ubuntu': 'Ubuntu Rocks', 'Centos': 'Centos Rocks' } %} {% set name = lookup[grains.os] %} name: {{ name | json() }} ``` Or: ```yaml {% set os = salt.grains.filter_by({ 'Ubuntu': { 'name': 'Ubuntu Rocks' }, 'Centos': { 'name': 'Centos Rocks' } }) %} name: {{ os.name }} ``` Run the following command to update all minions: ```bash sudo salt '*' saltutil.refresh_pillar ubuntuAsus: True ``` We can use this data for example in our Apache landing page (see previous tutorial): __welcome.sls__ ```yaml # Adding a blank front page {% set name = salt.pillar.get('name') %} check_pillar_values: test.check_pillar: - present: - name - failhard: True welcome_page: file.managed: - name: /var/www/html/index.html - contents: | <!doctype html> <body> <h1>{{ name }}.</h1> </body> ``` You should be able to see that you Minions have access to your pillars: ```bash sudo salt '*' pillar.items ``` And check that your front page was updated: ```bash sudo salt '*' state.sls apache.welcome ``` You can also manually set the value of `name` - but this data will be send to all minions and is NOT PRIVATE: ```bash sudo salt '*' state.sls apache.welcome pillar='{name: Override}' ``` ## Working with Formulas [Formulas are pre-written Salt States](https://docs.saltstack.com/en/latest/topics/development/conventions/formulas.html). They are as open-ended as Salt States themselves and can be used for tasks such as installing a package, configuring, and starting a service, setting up users or permissions, and many other common tasks. All official Salt Formulas are found as separate Git repositories in the ["saltstack-formulas" organization on GitHub](https://github.com/saltstack-formulas). They can be downloaded using the [GIT Fileserver Backend](https://docs.saltstack.com/en/master/ref/file_server/all/salt.fileserver.gitfs.html#module-salt.fileserver.gitfs). To be able to use Git you first have to uncomment it in your `/etc/salt/master` config. Or use a `local.conf` in `/etc/salt/master.d/local.conf`: ```yaml fileserver_backend: - git - roots gitfs_remotes: - https://github.com/saltstack-formulas/memcached-formula ``` After adding your [desired Formulas](https://github.com/saltstack-formulas) restart the Salt master and use the `cp.list_master` or `cp.list_states` command to get a list of all available configuration files to make sure that `memcached` was successfully cloned from Github: ```bash sudo systemctl restart salt-master sudo salt ubuntuAsus cp.list_states ubuntuAsus: - apache - apache.map - apache.mods - apache.welcome - memcached - memcached.config - memcached.libmemcached - memcached.macros - memcached.python_memcached - memcached.uninstall ``` Continue installing the following package: __on CentOS__ ``` yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm yum install GitPython ``` __on Ubuntu__ ``` sudo apt install python-git-doc ```<file_sep>--- date: "2017-08-07" title: "Google Analytics" categories: - Google Analytics --- ![Harbin, China](./photo-34605550695_a2aebdc705_o.png) <!-- TOC --> - [01 Set up Google Analytics](#01-set-up-google-analytics) - [02 Tracking ID](#02-tracking-id) - [03 amp-analytics](#03-amp-analytics) - [04 Search Console](#04-search-console) - [05 Search Console link to Google Analytics](#05-search-console-link-to-google-analytics) <!-- /TOC --> ## 01 Set up Google Analytics Create a Gmail account and log in to [Google Analytics](https://analytics.google.com). Open the admin panel and add a new property: --- ![Google Analytics](./GA_01.png) --- ## 02 Tracking ID Fill out the form and click to generate a Tracking ID - which will look something like this "UA-88888888-1": --- ![Google Analytics](./GA_02.png) --- ## 03 amp-analytics Now we need to add the [AMP Analytics](https://www.ampproject.org/docs/reference/components/amp-analytics) script to our website inside the \<head\> tag: ```html <script async custom-element="amp-analytics" src="https://cdn.ampproject.org/v0/amp-analytics-0.1.js"></script> ``` The tracking ID can now be implemented inside the \<header\> tag of your AMP page, by adding the \<amp-analytics\> component: ```html <amp-analytics type="googleanalytics"> <script type="application/json"> { "vars": { "account": "UA-88888888-1" }, "triggers": { "trackPageview": { "on": "visible", "request": "pageview" } } } </script> </amp-analytics> ``` ## 04 Search Console Now we want to link our new property to the [Search Console](https://www.google.com/webmasters/tools/search-analytics) to get access to reports (e.g. you get information about AMP Errors and general tips to improve your HTML structure /SEO) and a list of keywords that lead Google Search users to your website. Go back Property Settings: --- ![Google Analytics](./GA_03.png) --- And scroll down to "Adjust Search Console": --- ![Google Analytics](./GA_04.png) --- Click on "Edit": --- ![Google Analytics](./GA_05.png) --- And add your new property to your Search Console (this page will **stay open in the background** and you will have to refresh it after you added your new property - **see final step**): --- ![Google Analytics](./GA_06.png) ![Google Analytics](./GA_07.png) ![Google Analytics](./GA_08.png) --- You will then be asked to download an HTML file and upload it to the public /root directory of your website - to verify your ownership. Click on Verify and you are DONE! ## 05 Search Console link to Google Analytics Now return to the previous page (still open in the background), refresh and select your new property - then click save: --- ![Google Analytics](./GA_09.png) ![Google Analytics](./GA_10.png) --- As seen above, your Property is now linked in Google Analytics and you will start to see search results in your reports. It might take a while for crawling information and potential Error Reports to arrive - give it a month... You will be notified by email if something was found in the need of fixing.<file_sep>--- date: "2020-01-05" title: "CentOS 8 Cockpit Web Console" categories: - LINUX --- ![<NAME>, Cambodia](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installing Cockpit Web Console](#installing-cockpit-web-console) - [System Performance Logging](#system-performance-logging) - [Changing the Hostname](#changing-the-hostname) - [Changing the Timezone](#changing-the-timezone) - [User Management](#user-management) - [Networking](#networking) - [Podman Containers](#podman-containers) <!-- /TOC --> ## Installing Cockpit Web Console Install it on your system by using the command below, which will install the [cockpit](https://cockpit-project.org) with its required dependencies. ```bash yum install cockpit cockpit-storaged cockpit-podman cockpit-dashboard ``` Enable and start the cockpit.socket service to connect to the system through the web console ```bash systemctl start cockpit.socket systemctl enable --now cockpit.socket systemctl status cockpit.socket ``` If you are running a firewalld on the system, you need to open the cockpit port 9090 in the firewall. ```bash firewall-cmd --permanent --zone=public --add-service=cockpit firewall-cmd --reload ``` Open the Cockpit web console in your web browser on port 9090, proceed past the `NET::ERR_CERT_AUTHORITY_INVALID` warning (or install a CA certificate on your CentOS server) and login with your LINUX user: > The console uses a _.cert_ file certificate from `/etc/cockpit/ws-certs.d` directory. To avoid having to prompt security warnings, install a signed certificate - e.g. LetsEncrypt. Check [documentation](https://cockpit-project.org/guide/latest/https.html#https-certificates) for details. ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_01.png) ## System Performance Logging You can activate System Logging from the Interface: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_02.png) ## Changing the Hostname You can change your system hostname by selecting the option inside the __System__ tab and typing in a name: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_03.png) To verify that the hostname was changed switch to the __Terminal__ tab and type `hostnamectl`: ```bash hostnamectl Static hostname: centos8.localdomain Pretty hostname: centos8 Transient hostname: CentOS8.fritz.box Icon name: computer-desktop Chassis: desktop Machine ID: ae04edc8f0ed429095167d2e34a046e1 Boot ID: e58f2916a4b94f6ea365ae1296e7493c Operating System: CentOS Linux 8 (Core) CPE OS Name: cpe:/o:centos:centos:8 Kernel: Linux 4.18.0-147.5.1.el8_1.x86_64 Architecture: x86-64 ``` You can also use the __Services__ tab to see that `systemd-hostnamed.service` did it's job: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_03.2.png) ## Changing the Timezone You can change your system time settings by selecting the option inside the __System__ tab and typing in your time zone: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_04.png) Again, you can verify your settings inside the __Terminal__: ```bash timedatectl Local time: Fri 2020-04-10 19:18:43 HKT Universal time: Fri 2020-04-10 11:18:43 UTC RTC time: Fri 2020-04-10 11:18:43 Time zone: Asia/Hong_Kong (HKT, +0800) System clock synchronized: no NTP service: n/a RTC in local TZ: no ``` ## User Management Create and manage user accounts from the __Account__ tab: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_05.png) ## Networking Set your servers IPv4 configuration - DHCP, static IP, DNS Server and Gateway: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_06.png) ## Podman Containers We already installed `cockpit-podman` on our machine - if you are using Docker install `cockpit-docker` instead! ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_07.png) ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_08.png) ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_10.png) ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_09.png) ## Remote Server Management Make sure that you have `cockpit-dashboard` installed and click on the __Dashboard__ button: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_11.png) Click on the __Add Server__ button and tyoe in your remote server's IP address (if you don't use the default SSH port, add it behind the IP e.g. `123.123.123.45:6969`): ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_12.png) And type in your user login: ![CentOS8 Cockpit Web Console](./CentOS8-Cockpit_13.png)<file_sep>--- date: "2019-09-25" title: "Podman Cheat Sheet" categories: - LINUX - Docker --- ![Shenzhen, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Finding Images](#finding-images) - [Building Images](#building-images) - [Running Containers on Images](#running-containers-on-images) - [Working with Container Processes](#working-with-container-processes) - [Working with the Container Filesystem](#working-with-the-container-filesystem) - [Removing Images](#removing-images) - [Miscellaneous](#miscellaneous) <!-- /TOC --> For more information about podman, visit the [Red Hat Developer website](https://developers.redhat.com/). This cheat sheet was written by <NAME>, with huge thanks to <NAME> and <NAME>. In the following `container` is either a container name or a container ID. If `tag` is omitted in image:tag , the default value is latest. ## Finding Images | Command | Description | |---|---| | podman images | List all local images | | podman history image:tag | Display information about how an image was built | | podman login registryURL -u username [-p password] | Log in to a remote registry | | podman pull registry/username/image:tag | Pull an image from a remote registry | | podman search searchString | Search local cache and remote registries for images | | podman logout | Log out of the current remote registry | _The list of registries is defined in `/etc/containers/registries.conf`_ ## Building Images | Command | Description | |---|---| | podman build -t image:tag . | Build and tag an image using the instructions in Docker?le in the current directory | | podman build -t image:tag -f Dockerfile2 | Same as above, but with a di?erent Docker?le | | podman tag image:tag image:tag2 | Add an additional name to a local image | | podman tag image:tag registry/username/image:tag | Same as above, but the additional name includes a remote registry | | podman push registry/username/image:tag | Push an image to a remote registry | ## Running Containers on Images | Command | Description | |---|---| | podman run --rm -it [--name name] image:tag command | Run a container based on a given image. | * `--rm` Remove the container after it exits * `-it` Connect the container to the terminal * `--name` name Give the container a name * `image:tag` The image used to create the container * `command` A command to run (/bin/bash for example) * `-d` Run the container in the background * `-p 8080:32000` Expose container port 8080 as localhost:32000 * `-v /var/lib/mydb:/var/lib/db` Map the /var/lib/mydb directory on localhost to a volume named /var/lib/db inside the container | Command | Description | |---|---| | podman commit container newImage:tag | Create a new image based on the current state of a running container | | podman create [--name name] image:tag | Create (but don’t start) a container from an image | | podman start container | Start an existing container from an image | | podman restart container | Restart an existing container | | podman wait container1 [container2… ] | Wait on one or more containers to stop | | podman stop container | Stop a running container gracefully | | podman kill container | Send a signal to a running container | | podman rm [-f] container | Remove a container (use -f if the container is running) | | podman stats container | Display a live stream of a container’s resource usage | | podman inspect container | Return metadata (in JSON) about a running container | ## Working with Container Processes | Command | Description | |---|---| | podman ps [--all] | List the running containers on the system (use --all to include non- running containers) | | podman attach container | Attach to a running container and view its output or control it + + detaches from the container but leaves it running. | | podman exec container command | Execute a command in a running container | | podman top container | Display the running processes of a container | | podman logs [-tail] container | Display the logs of a container | | podman pause container / podman unpause container | Pause/unpause all the processes in a container | | podman port container | List the port mappings from a container to localhost | ## Working with the Container Filesystem | Command | Description | |---|---| | podman diff container | Display all the changes to a container’s ?lesystem | | podman cp source target | Copy ?les and folders between a container and localhost | | podman mount container / podman umount container | Mount or unmount a container’s root ?lesystem | | podman import tarball | Import a tarball and save it as a ?lesystem image | | podman export [-o outputFile] container | Export the container’s ?lesystem to a tar ?le | | podman save [-o archiveFile] [--format docker-archive | oci-archive | oci-dir | docker-dir] image:tag | Save an image in docker-archive (default) or another format | | podman load -i archiveFile | Load a saved image from docker-archive or another format | ## Removing Images | Command | Description | |---|---| | podman rmi [-f] image:tag | Remove a local image from local cache (use -f to force removal) | | podman rmi [-f] registry/username/image:tag | Remove a remote image from local cache (use -f to force removal) | ## Miscellaneous | Command | Description | |---|---| | podman version | Display podman version information | | podman info | Display information about the podman environment |<file_sep>--- date: "2019-02-07" title: "MotionEye Video Surveillance" categories: - IoT - Smarthome --- ![Harbin, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Debian Installation](#debian-installation) - [SSH Access](#ssh-access) - [Set a static IP Address](#set-a-static-ip-address) - [motionEye Installation](#motioneye-installation) - [Adding your IP Camera](#adding-your-ip-camera) - [Enabling Actions](#enabling-actions) - [HTTP Request Example](#http-request-example) - [Alarm - Full HD Models](#alarm---full-hd-models) - [Alarm - VGA Models](#alarm---vga-models) - [PTZ - Full HD Models](#ptz---full-hd-models) - [Preset Positions - Full HD Models](#preset-positions---full-hd-models) - [Configuring MotionEye](#configuring-motioneye) - [Accessing MotionEye over HTTPS](#accessing-motioneye-over-https) - [Installation of FirewallD on Debian](#installation-of-firewalld-on-debian) - [Installation of NGINX on Debian](#installation-of-nginx-on-debian) - [Set up a Virtual Host for MotionEye](#set-up-a-virtual-host-for-motioneye) - [Create the SSL Certificate](#create-the-ssl-certificate) - [Configure Nginx to Use SSL](#configure-nginx-to-use-ssl) - [Adjust the Nginx Configuration to Use SSL](#adjust-the-nginx-configuration-to-use-ssl) <!-- /TOC --> [Motion](https://motion-project.github.io) is a highly configurable program that monitors video signals from many types of cameras (Network cameras via HTTP, RTSP and RTMP). Set it up to monitor your INSTAR IP camera, watch birds, check in on your pet, create timelapse videos and more. [motionEye](https://github.com/ccrisan/motioneye/wiki) is a web frontend for the motion daemon, written in Python. We are going to install both on a regular Intel Core machine - like an [Intel NUC](https://www.intel.com/content/www/us/en/products/boards-kits/nuc/kits.html), [Asus P-Series](https://www.asus.com/Mini-PCs/Mini-PC-PN60/) or [Gigabyte Brix](https://www.gigabyte.com/Mini-PcBarebone/BRIX). This decision was made due to the high CPU demands of working with 1080p video. But we already wrote a tutorial on how to install [motionEye on a Raspberry Pi](/Advanced_User/IOBroker_on_Raspberry_Pi/motionEye/#installation-of-motioneye) in case that this amount of processing power is sufficient for your use case. ## Debian Installation We choose to install __Debian 9 Linux__ on our mini PC - you can check the [motionEye Wiki](https://github.com/ccrisan/motioneye/wiki/Installation) for more Linux flavours. To create the installation USB stick we [downloaded the minimal amd64 image](https://www.debian.org/CD/netinst/) and used the tool __Etcher__ to prepare the USB stick. We used the same approach to prepare a SD card with the openHABian image - you can just [follow the steps from that tutorial](/Advanced_User/openHABian_on_Raspberry_Pi/#install-openhabian-on-a-raspberry-pi). Once the USB stick is ready, deactivate secure boot in your mini PC's BIOS and boot from the stick. We are not going to need a desktop environment. But __make sure that you install the SSH service__ - as we are going to use the SSH service to set up the Debian server. ### SSH Access To enable SSH login for a root user on Debian Linux system you need to first configure SSH server. Open `/etc/ssh/sshd_config` and change the following line: ```bash FROM: PermitRootLogin without-password TO: PermitRootLogin yes ``` Once you made the above change restart your SSH server: ```bash # /etc/init.d/ssh restart [ ok ] Restarting ssh (via systemctl): ssh.service. ``` From now on you will be able to ssh login as a root: ```bash $ ssh [email protected] [email protected]'s password: The programs included with the Debian GNU/Linux system are free software; the exact distribution terms for each program are described in the individual files in /usr/share/doc/*/copyright. Debian GNU/Linux comes with ABSOLUTELY NO WARRANTY, to the extent permitted by applicable law. ``` ### Set a static IP Address By default, you will find the following configuration within the `/etc/network/interfaces` network config file: ``` source /etc/network/interfaces.d/* # The loopback network interface auto lo iface lo inet loopback # The primary network interface allow-hotplug eth0 iface eth0 inet static address 192.168.2.111 netmask 255.255.255.0 gateway 192.168.2.1 ``` <div class="dangerbox"> <h3>Be aware:</h3> <p>In newer versions of Debian the Ethernet interface will not be called <strong>eth0</strong> anymore. The file contains the line <code>iface eth0 inet dhcp</code> before you set it from <strong>dhcp</strong> to <strong>static</strong> - the identification between <strong>iface</strong> and <strong>inet</strong> is the id of your ethernet interface (don't change it to eth0!).</p> </div> To configure a static DNS edit `/etc/resolv.conf` file, and include the IP address of your preferred nameserver eg: ``` nameserver 8.8.8.8 ``` <br/><br/> ## motionEye Installation _All commands require root; use sudo before each command or become root using su._ 1. Install motion, ffmpeg and v4l-utils: ```bash apt-get install motion ffmpeg v4l-utils ``` 2. Install the dependencies from the repositories: ```bash apt-get install python-pip python-dev python-setuptools curl libssl-dev libcurl4-openssl-dev libjpeg-dev libz-dev ``` 3. Install motioneye, which will automatically pull Python dependencies (tornado, jinja2, pillow and pycurl): ```bash pip install motioneye ``` 4. Prepare the configuration directory: ```bash mkdir -p /etc/motioneye cp /usr/local/share/motioneye/extra/motioneye.conf.sample /etc/motioneye/motioneye.conf ``` 5. Prepare the media directory: ```bash mkdir -p /var/lib/motioneye ``` 6. Add an init script, configure it to run at startup and start the motionEye server: ```bash cp /usr/local/share/motioneye/extra/motioneye.systemd-unit-local /etc/systemd/system/motioneye.service systemctl daemon-reload systemctl enable motioneye systemctl start motioneye ``` 7. To upgrade to the newest version of motioneye, after it has been released, just issue: ```bash pip install motioneye --upgrade systemctl restart motioneye ``` <br/><br/> ## Adding your IP Camera Open the MotionEye Interface with your web browsers by typing in your Raspberry Pi's IP address followed by the Port 8765 - e.g. `http://192.168.2.115:8765`. The default login is __admin__ __without a password__. We already covered how [to add JPG, MJPEG and RTSP cameras](/Advanced_User/IOBroker_on_Raspberry_Pi/motionEye/#adding-your-ip-camera) earlier. Follow the instructions there and add all your INSTAR VGA, HD and Full HD cameras. We now want to go a step further and add buttons to control the basic camera functions. MotionEye offers a list of __Action Buttons__ that we can use with our camera's [CGI commands](/Advanced_User/CGI_Commands/). They automatically map to Shell Scripts that we have to put into the `/etc/motioneye` directory. The following actions are defined: * lock * unlock * light_on * light_off * alarm_on * alarm_off * up * right * down * left * zoom_in * zoom_out * preset1 to preset9 While the available actions are limited to the above set, the commands executed can be practically anything. We will choose to execute HTTP GET commands to send [CGI commands](/Advanced_User/CGI_Commands/) to our cameras. ### Enabling Actions motionEye will look inside its configuration folder `/etc/motioneye` for executable files named [action]_[cameraid], where action is one of the available actions (listed above) and cameraid is the id of the camera on top of which the action button will be displayed. For example, on a setup using the default configuration, the presence of the executable file `/etc/motioneye/alarm_on_1` tells motionEye to show an alarm bell button for the camera number one. The file will be executed upon pressing the button. Buttons will have distinctive icons that correspond to the name of the action. #### HTTP Request Example Let's say that you want to issue an HTTP request to your INSTAR IP camera when you click the "turn alarm on" button of your first camera (with id 1). Create the following bash script and make it executable: ##### Alarm - Full HD Models --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_01.png) --- ```bash touch /etc/motioneye/alarm_on_1 chmod +x /etc/motioneye/alarm_on_1 nano /etc/motioneye/alarm_on_1 ``` Then type in (or paste) the following contents, save and exit nano (Ctrl-O, Enter, Ctrl-X): ```bash #!/bin/bash URL="http://admin:[email protected]/param.cgi?cmd=setscheduleex&-ename=md&-week0=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&-week1=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&-week2=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&-week3=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&-week4=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&-week5=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&-week6=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_02.png) --- We will use the [Alarm Schedule CGI Command](https://wiki.instar.com/1080p_Series_CGI_List/Alarm_Menu/Schedule/) for an INSTAR Full HD Camera in this example - but the URL can be set to any [CGI command](https://wiki.instar.com/Advanced_User/CGI_Commands/) that you need! Note that the command activates the motion alarm detection of your camera 24/7. If you are already using a alarm schedule that deactivates your camera alarm during certain times - just replace some of those __P__'s with __N__'s to reflect your personal schedule and set MotionEye up to switch between __Alarm Always ON__ / __Your Alarm Schedule__. To switch the alarm off we repeat those steps for: ```bash touch /etc/motioneye/alarm_off_1 chmod +x /etc/motioneye/alarm_off_1 nano /etc/motioneye/alarm_off_1 ``` Then type in (or paste) the following contents, save and exit nano (Ctrl-O, Enter, Ctrl-X): ```bash #!/bin/bash URL="http://admin:[email protected]/param.cgi?cmd=setscheduleex&-ename=md&-week0=NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN&-week1=NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN&-week2=NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN&-week3=NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN&-week4=NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN&-week5=NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN&-week6=NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` Reload the MotionEye interface - the buttons should now be visible in the camera 1 view port (you might have to click onto the video are once to have them appear). If the buttons are not displayed, make sure that you made the shell scripts executable. --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_03.png) --- ##### Alarm - VGA Models ```bash touch /etc/motioneye/alarm_on_3 chmod +x /etc/motioneye/alarm_on_3 nano /etc/motioneye/alarm_on_3 ``` Then type in (or paste) the following contents, save and exit nano (Ctrl-O, Enter, Ctrl-X): ```bash #!/bin/bash URL="http://admin:[email protected]/set_alarm.cgi?motion_armed=1" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` ```bash touch /etc/motioneye/alarm_off_3 chmod +x /etc/motioneye/alarm_off_3 nano /etc/motioneye/alarm_off_3 ``` Then type in (or paste) the following contents, save and exit nano (Ctrl-O, Enter, Ctrl-X): ```bash #!/bin/bash URL="http://admin:[email protected]/set_alarm.cgi?motion_armed=0" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` ##### PTZ - Full HD Models ```bash touch /etc/motioneye/left_2 /etc/motioneye/right_2 /etc/motioneye/down_2 /etc/motioneye/up_2 chmod +x /etc/motioneye/left_2 /etc/motioneye/right_2 /etc/motioneye/down_2 /etc/motioneye/up_2 ``` __STEP LEFT__ *left_2* ```bash #!/bin/bash URL="http://admin:[email protected]/ptzctrl.cgi?-step=&-act=left" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` __STEP RIGHT__ *right_2* ```bash #!/bin/bash URL="http://admin:[email protected]/ptzctrl.cgi?-step=&-act=right" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` __STEP DOWN__ *down_2* ```bash #!/bin/bash URL="http://admin:[email protected]/ptzctrl.cgi?-step=&-act=down" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` __STEP UP__ *up_2* ```bash #!/bin/bash URL="http://admin:[email protected]/ptzctrl.cgi?-step=&-act=up" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` ##### Preset Positions - Full HD Models ```bash touch /etc/motioneye/preset1_2 /etc/motioneye/preset2_2 chmod +x /etc/motioneye/preset1_2 /etc/motioneye/preset2_2 ``` __Go to Preset 1__ *preset1_2* ```bash #!/bin/bash URL="http://admin:[email protected]/param.cgi?cmd=preset&-act=goto&-number=0" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` __Go to Preset 2__ *preset2_2* ```bash #!/bin/bash URL="http://admin:[email protected]/param.cgi?cmd=preset&-act=goto&-number=1" METHOD="GET" TIMEOUT="5" curl -X $METHOD --connect-timeout $TIMEOUT "$URL" > /dev/null ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_04.png) --- <br/><br/> ## Configuring MotionEye Open the side menu and select the camera you want to edit from the drop down menu `1` - activate the __Advanced Settings__ `2`. Let's go through a few interesting menus that MotionEye offers: --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_05.png) --- `3` MotionEye gives you both a administrator and a regular user account. You should add a password for the admin account here. The regular user will not have access to this part of the menu. `4` We setup MotionEye to grab the 1080p RTSP stream from our camera. Here you can scale down the resolution - to save storage and - of particulate interest if you are using a Raspberry Pi to run MotionEye - to reduce the demand on your CPU. The framerate here is set to 2fps by default - which means that you end up with very choppy video. It again depends on your CPU and the amount of cameras you want to add if you might be able to go higher. You can also rotate your video image in 90 degree increments. `5` Set an upper and lower detection threshold for the integrated motion detection. How much percent of the image has to change to register as an alarm? And do you want to discard the alarm, when more than a set percentage of the image changes - as it will probably just be a light that turned on and does not interest you. `6` Do you have a constant movement in the background of your image that keeps triggering the alarm? Use the mask feature to have MotionEye learn which part of the image it should not use for the motion detection. `7` Have MotionEye draw a box around the change it detects inside the frame - this is useful to debug false alerts. Also looks really fancy. `8` The Email notification is self-explanatory - the fields are identical to [SMTP server configuration](https://wiki.instar.com/Web_User_Interface/720p_Series/Alarm/SMTP_Server/) of an INSTAR IP camera. More interesting is the web hook function below that is identical the [Alarmserver](https://wiki.instar.com/Web_User_Interface/1080p_Series/Alarm/Alarm_Server/) of your INSTAR camera. `9` Make sure to set the video recording to __h.264 (.mp4)__ if you want to display the recorded video in the MotionEye user interface! Otherwise you can use the __HEVC (.mp4)__ to apply the highly effective h.265 compression and make your videos as small as possible. `10` Choose between a continuous recording and a recording triggered by motion detection. Alarm videos will continue to record as long as the motion continues. It makes sense to limit the maximum file size - this will create more small videos instead of one huge one that is hard to handle. You can also choose to automatically delete files again after a set time. `11` Upload your alarm videos to an __FTP__ or __SFTP Server__. Or mirror them to __Dropbox__ or __Google Drive__. `12` MotionEye re-streams all your camera's video streams in a browser compatible format. `13` A photo camera icon will appear in the camera viewport when you activate the __Manual Capture__, allowing you to take a quick snapshot. <br/><br/> ## Accessing MotionEye over HTTPS If you want to access the MotionEye interface over the internet, it necessary to first protect your server. In the following we want to do two things: 1. Close all unnecessary ports with a firewall 2. Install a web proxy that enables us to add SSL encryption We are going to use [FirewallD](https://firewalld.org) as firewall and [NGINX](http://nginx.org/) as web proxy. Running behind the Nginx webserver (using Nginx as a reverse proxy) can be useful when: * you want to use HTTPS with motionEye * you want to serve multiple websites (including motionEye) on the same webserver (accessible at the same IP address over port 80) * you want to take advantage of various Nginx modules (such as rate limiting or HTTP authentication) ### Installation of FirewallD on Debian Install FirewallD by running the following commands: ```bash apt-get update apt-get install firewalld ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_06.png) --- To open the default __http__ and __https ports__ run the following commands: ```bash firewall-cmd --permanent --zone=public --add-service=https --add-service=http firewall-cmd --reload firewall-cmd --list-all ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_07.png) --- If you now try to access the MotionEye interface on port __8765__ you will see that it is going to timeout - as the request will be blocked by FirewallD. We can temporarily open all ports that MotionEye uses - in my case this is the main UI port 8765 and the streaming ports for three cameras __8081__, __8082__, __8083__. ```bash firewall-cmd --permanent --zone=public --add-port=8765/tcp --add-port=8081-8083/tcp firewall-cmd --reload firewall-cmd --list-all ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_08.png) --- You should now be able to access the MotionEye interface again - __be aware__ that this command did not use the __--permanent__ flag. That means, when you restart your server, or reload the firewall, those rules will be discarded. ### Installation of NGINX on Debian ```bash apt-get install nginx ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_09.png) --- At the end of the installation process, Debian 9 starts Nginx. The web server should already be up and running. We can check with the systemd init system to make sure the service is running by typing: ```bash systemctl status nginx ``` When you have your server's IP address, enter it into your browser's address bar. You should see the default Nginx landing page: --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_10.png) --- #### Set up a Virtual Host for MotionEye Here's the content of a sample virtual host file that normally goes to `/etc/nginx/sites-enabled/motioneye.local`: ``` touch /etc/nginx/sites-enabled/motioneye.local nano /etc/nginx/sites-enabled/motioneye.local ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_11.png) --- _motioneye.local_ ``` server { listen 80; listen [::]:80; location /dashboard/ { proxy_pass http://127.0.0.1:8765/; proxy_read_timeout 1800; proxy_connect_timeout 1800; access_log off; } } ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_12.png) --- Make sure that your NGINX configuration passes the test and reload the service: ``` nginx -t service nginx reload ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_13.png) --- Your MotionEye UI will be available at `http://192.168.2.111/dashboard/` (__this has to be changed to the IP address of your MotionEye server!__). It's important to note the trailing slashes at location /dashboard/ and at `http://127.0.0.1:8765/`. They make sure paths are correctly passed around when forwarding the HTTP requests to motionEye. #### Create the SSL Certificate We can create a self-signed key and certificate pair with OpenSSL in a single command: ```bash openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/ssl/private/nginx-selfsigned.key -out /etc/ssl/certs/nginx-selfsigned.crt ``` These options will create both a key file and a certificate. We will be asked a few questions about our server in order to embed the information correctly in the certificate. --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_14.png) --- Both of the files you created will be placed in the appropriate subdirectories of the `/etc/ssl` directory. While we are using OpenSSL, we should also create a strong Diffie-Hellman group, which is used in negotiating [Perfect Forward Secrecy](https://en.wikipedia.org/wiki/Forward_secrecy) with clients. We can do this by typing: ```bash openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048 ``` This may take a few minutes, but when it's done you will have a strong DH group at `/etc/ssl/certs/dhparam.pem` that we can use in our configuration. --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_15.png) --- #### Configure Nginx to Use SSL We have created our key and certificate files under the /etc/ssl directory. Now we just need to modify our Nginx configuration to take advantage of these. We will make a few adjustments to our configuration: * We will create a configuration snippet containing our SSL key and certificate file locations. * We will create a configuration snippet containing strong SSL settings that can be used with any certificates in the future. * We will adjust our Nginx server blocks to handle SSL requests and use the two snippets above. First, let's create a new Nginx configuration snippet in the `/etc/nginx/snippets` directory: ```bash nano /etc/nginx/snippets/self-signed.conf ``` Within this file, we just need to set the ssl_certificate directive to our certificate file and the ssl_certificate_key to the associated key. In our case, this will look like this: ``` ssl_certificate /etc/ssl/certs/nginx-selfsigned.crt; ssl_certificate_key /etc/ssl/private/nginx-selfsigned.key; ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_16.png) --- Next, we will create another snippet that will define some SSL settings. This will set Nginx up with a strong SSL cipher suite and enable some advanced features that will help keep our server secure. The parameters we will set can be reused in future Nginx configurations, so we will give the file a generic name: ```bash nano /etc/nginx/snippets/ssl-params.conf ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_17.png) --- ``` # from https://cipherli.st/ # and https://raymii.org/s/tutorials/Strong_SSL_Security_On_nginx.html ssl_protocols TLSv1 TLSv1.1 TLSv1.2; ssl_prefer_server_ciphers on; ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH"; ssl_ecdh_curve secp384r1; ssl_session_cache shared:SSL:10m; ssl_session_tickets off; ssl_stapling on; ssl_stapling_verify on; resolver 8.8.8.8 8.8.4.4 valid=300s; resolver_timeout 5s; # Disable preloading HSTS for now. You can use the commented out header line that includes # the "preload" directive if you understand the implications. #add_header Strict-Transport-Security "max-age=63072000; includeSubdomains; preload"; add_header Strict-Transport-Security "max-age=63072000; includeSubdomains"; add_header X-Frame-Options DENY; add_header X-Content-Type-Options nosniff; ssl_dhparam /etc/ssl/certs/dhparam.pem; ``` Because we are using a self-signed certificate, the SSL stapling will not be used. Nginx will simply output a warning, disable stapling for our self-signed cert, and continue to operate correctly. ##### Adjust the Nginx Configuration to Use SSL Now that we have our snippets, we can adjust our Nginx configuration to enable SSL. In this guide we are using the default server block file in the `/etc/nginx/sites-available` directory. If you are using a different server block file, substitute its name in the below commands. Before we go any further, let's back up our current server block file: ```bash cp /etc/nginx/sites-available/default /etc/nginx/sites-available/default.bak ``` Now, open the server block file to make adjustments: ```bash nano /etc/nginx/sites-available/default ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_18.png) --- We will be modifying this configuration so that unencrypted HTTP requests are automatically redirected to encrypted HTTPS. This offers the best security for our sites. If you want to allow both HTTP and HTTPS traffic, use the alternative configuration that follows. Next, we need to start a new server block directly below to contain the remaining configuration. We can uncomment the two listen directives that use port 443. We can add http2 to these lines in order to enable HTTP/2 within this block. Afterwards, we just need to include the two snippet files we set up: ```bash server { listen 80 default_server; listen [::]:80 default_server; return 301 https://$server_name$request_uri; # SSL configuration # listen 443 ssl http2 default_server; listen [::]:443 ssl http2 default_server; include snippets/self-signed.conf; include snippets/ssl-params.conf; ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_19.png) --- Now we have to add the SSL configuration to our MotionEye server block in `/etc/nginx/sites-enabled/motioneye.local`: ``` server { listen 80; return 301 https://$server_name$request_uri; } server { listen 443 ssl http2; server_name 192.168.2.111; include snippets/self-signed.conf; include snippets/ssl-params.conf; location /dashboard/ { proxy_pass http://127.0.0.1:8765/; proxy_read_timeout 1800; proxy_connect_timeout 1800; access_log off; } } ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_20.png) --- Now we can restart Nginx to implement our new changes: ```bash nginx -t ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_21.png) --- Notice the warning in the beginning. As noted earlier, this particular setting throws a warning since our self-signed certificate can't use SSL stapling. This is expected and our server can still encrypt connections correctly. If your output matches the above, your configuration file has no syntax errors. We can safely restart Nginx to implement our changes: ```bash service nginx reload ``` --- ![Motion Eye and INSTAR IP Cameras](./MotionEye_22.png) ---<file_sep>import sys import os.path # Check if the IP address file is available and valid def ip_file_valid(): # Prompt for file path ip_file = input("\n# please enter server IP file - e.g. ./serverip.env : ") # Check file available if os.path.isfile(ip_file) == True: print("\n* filepath is valid") else: print("\n* file {} does not exist. \n* exiting program \n".format(ip_file)) sys.exit() # Open IP file for reading selected_ip_file = open(ip_file, 'r') # Read from the beginning selected_ip_file.seek(0) # Read all lines ip_list = selected_ip_file.readlines() #Closing the file selected_ip_file.close() return ip_list <file_sep>--- date: "2010-02-02" title: "Using SVN Subversion on Windows 10" categories: - Windows --- ## Installing Subversion Download and install [the latest version of Subversion](tortoisesvn.net/downloads.html) ## Checkout, Update and Commit 1. Create a folder for your project 2. Right-click inside this folder and select __SVN Checkout__ ![SVN Subversion on Windows 10](./SVN_Subversion_01.png) 3. Copy&Paste in the URL to your project repository and add your user credentials when asked ![SVN Subversion on Windows 10](./SVN_Subversion_02.png) 2. Right-click inside this folder and select __SVN Commit__ ![SVN Subversion on Windows 10](./SVN_Subversion_03.png) <file_sep>--- date: "2019-06-18" title: "User Login with Google OAuth2 for Discourse" categories: - LINUX - Discourse --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Creating a new Google Cloud Platform Project](#creating-a-new-google-cloud-platform-project) - [Setup OAuth2](#setup-oauth2) - [Verify your Domain](#verify-your-domain) <!-- /TOC --> Configure Discourse to allow login and registration with Google OAuth2. ## Creating a new Google Cloud Platform Project Open the [Google Developer Console](https://console.developers.google.com) and Select to __Create a new Project__: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_01.png) Fill in the Project Name field, then click on __Create__: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_02.png) ## Setup OAuth2 Under __APIs and Services__ and select the __OAuth consent screen__: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_03.png) Select __External__ and then click on __Create__: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_04.png) Fill out the form then click Save: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_05.png) Go to the __Credentials__ tab on the left, click on __+ CREATE CREDENTIALS__ then click on __OAuth client ID__: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_06.png) Choose __Web application__ as the Application Type. In the __Authorized JavaScript Origins__ section, add your site’s base url, including http:// or https://. In the __Authorized Redirect URI__ section, add the base url with `/auth/google_oauth2/callback`. Click the __Create__ button: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_07.png) The OAuth client dialog will appear with client ID and secret: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_08.png) In your Discourse site settings, check enable google oauth2 logins, and fill in your google oauth2 client id and google auth2 client secret provided earlier: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_09.png) ## Verify your Domain ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_10.png) Your site must be registered on [Search Console](https://www.google.com/webmasters/tools) with an `https://` URL or with the domain name provider verification method __before you can proceed__: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_11.png) Add your domain to your [Search Console](https://www.google.com/webmasters/tools) properties: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_12.png) You can now add a HTML Tag to your Discourse page template to verify your ownership over the domain. To do this copy the meta tag Google provides for you: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_13.png) Go to the Discourse Admin and go to the following page: `Admin>Customize>Themes`, then select __Components__. Click on __Install__, then __+ Create new__: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_14.png) Give it a meaningful name (i.e. `GoogleSearchConsoleVerification`): ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_15.png) In your new component, select __Edit CSS/HTML__, under __Custom CSS/HTML__. In the `</head>` section, paste the HTML tag generated by GSC Verification Wizard, then Save this change: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_16.png) Back in the Theme menu add your new component to your default Discourse theme: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_17.png) Back in GSC click on __Verify__ to add your domain to your properties: ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_18.png) Back in the OAuth2 configuration you can now add your verified domain without further issues! ![Using OAuth2 with Discourse](./Discourse_OAuth2_Login_19.png)<file_sep>--- date: "2018-12-01" title: "OpenHAB2 and MQTT" categories: - IoT - Smarthome --- ![<NAME>](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [OpenHAB 2 Installation](#openhab-2-installation) - [How to install Java with Apt on Debian 9](#how-to-install-java-with-apt-on-debian-9) - [Installing the Default JRE/JDK](#installing-the-default-jrejdk) - [OpenHAB Configuration though Paper UI](#openhab-configuration-though-paper-ui) - [MQTT Binding](#mqtt-binding) - [OpenHAB Cloud Connector](#openhab-cloud-connector) <!-- /TOC --> ## OpenHAB 2 Installation We are going to use the apt-get package manager to install the latest OpenHAB 2 snapshot on our Debian Linux server. For more installation options - different Linux versions, stable builds, etc. - check out the [OpenHAB Download Section](https://www.openhab.org/download/). 1. We will start by adding the needed repository key: ```bash wget -qO - 'https://bintray.com/user/downloadSubjectPublicKey?username=openhab' | sudo apt-key add - ``` 2. Add the HTTPS transport for APT: ```bash sudo apt-get install apt-transport-https ``` 3. Add the repository: ```bash echo 'deb https://openhab.jfrog.io/openhab/openhab-linuxpkg unstable main' | sudo tee /etc/apt/sources.list.d/openhab2.list ``` --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_01.png) --- 4. Update the package lists and install the openHAB distribution package: ```bash sudo apt-get update && sudo apt-get install openhab2 ``` --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_02.png) --- 5. Execute the following statements to configure openHAB to start automatically using `systemd`: ```bash sudo /bin/systemctl daemon-reload sudo /bin/systemctl enable openhab2.service ``` 6. You can start openhab2 by executing: ```bash sudo /bin/systemctl start openhab2.service ``` --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_03.png) --- 7. Open the OpenHAB default port in your Firewall: ```bash sudo firewall-cmd --permanent --zone=public --add-port=8080/tcp sudo firewall-cmd --reload sudo firewall-cmd --list-all ``` --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_04.png) --- 8. Navigate with a web browser to `http://<ip-address>:8080` and select the __Standard Package__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_09.png) --- If the OpenHAB user interface does not load, you are probably missing Java on your Linux server. You can check the OpenHAB service status by typing: ```bash systemctl status openhab2 ``` --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_05.png) --- ### How to install Java with Apt on Debian 9 In this guide, you will install various versions of the Java Runtime Environment (JRE) and the Java Developer Kit (JDK) using `apt`. #### Installing the Default JRE/JDK The easiest option for installing Java is to use the version packaged with Debian. By default, Debian 9 includes Open JDK, which is an open-source variant of the JRE and JDK. This package will install OpenJDK version 1.8, which is compatible with Java 8. Java 8 is the current Long Term Support version and is still widely supported, though public maintenance ends in January 2019. 1. To install this version, first update the package index: ```bash sudo apt update ``` 2. Next, check if Java is already installed: ```bash java -version ``` If Java is not currently installed, you'll see the following output: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_06.png) --- 3. Execute the following command to install OpenJDK: ```bash sudo apt install default-jre ``` This command will install the Java Runtime Environment (JRE). This will allow you to run almost all Java software. Verify the installation with `java -version`: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_07.png) --- 4. You may need the Java Development Kit (JDK) in addition to the JRE in order to compile and run some specific Java-based software. To install the JDK, execute the following command, which will also install the JRE: ```bash sudo apt install default-jdk ``` Verify that the JDK is installed by checking the version of `javac`, the Java compiler: ```bash javac -version ``` --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_08.png) --- You can now navigate with a web browser to `http://<ip-address>:8080` and select the __Standard Package__ of OpenHAB2: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_09.png) --- We highly recommend that you read the [Concepts Introduction](https://www.openhab.org/docs/concepts/). It introduces a number of important ideas that will help you as you install and begin to configure openHAB for the first time. Once you have openHAB up and running, the [Configuration Guide](https://www.openhab.org/docs/configuration/) contains everything you need to know to get your openHAB installation talking to different devices around your home. For instance, you can use [Sitemaps](https://www.openhab.org/docs/configuration/sitemaps.html) to control how the status of these devices are displayed on different openHAB User Interfaces, and you can begin to experiment with [Rules](https://www.openhab.org/docs/configuration/rules-dsl.html) in order to get the most out of your installation. There are many other ways of interacting with your openHAB smart home: the most popular are presented in the Interfaces and Ecosystem section below. --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_10.png) --- ## OpenHAB Configuration though Paper UI The Paper UI is a new interface that helps setting up and configuring your openHAB instance. It does not (yet) cover all aspects, so you still need to resort to textual configuration files. __Add-on management__: Easily install or uninstall [openHAB add-ons](https://www.openhab.org/docs/configuration/%7B%7Bdocu%7D%7D/addons/uis/paper/readme.html): ### MQTT Binding Go to the __Add-Ons__ Menu and click on the __Bindings Tab__. We are going to use the MQTT protocol to communicate with our INSTAR IP camera from OpenHAB. To be able to connect to the Mosquitto MQTT Server we need to install the [MQTT Binding](https://www.openhab.org/addons/bindings/mqtt/) - make sure that you choose the __Version 2__ (at the moment of writing this is the _2.4.0_ version). --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_11.png) --- Now switch to your OpenHAB Inbox: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_12.png) --- Click on the __+__ icon to add the MQTT Binding: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_13.png) --- Find the MQTT Binding inside the list and select it: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_14.png) --- Click on __Add Manually__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_15.png) --- And choose the __MQTT Broker__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_16.png) --- With the newest Firmware for your INSTAR Full HD camera, you can use the camera as MQTT Broker. For older cameras models please install a MQTT broker like Mosquito first. Add your brokers IP address and the broker port - by default this is __1883__. Note that we are first not going to use [TLS encryption](https://community.openhab.org/t/mqtt-binding-and-ssl/40622). We will add it once we are able to establish the connection: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_17.png) --- Your INSTAR Full HD cameras MQTT default login is the Administrator login you added to your camera (if you are using Mosquitto). Then save your setting by clicking on the blue confirmation button: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_18.png) --- Back in the __Things Menu__ click on __+__ again - this time to add a __Thing__ that we can use for an _PaperUI_ button later on: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_19.png) --- Click on __MQTT Binding__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_20.png) --- And __Add Manually__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_21.png) --- This time we need to add a __Generic MQTT Thing__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_22.png) --- I am going to create a button that triggers the alarm on my INSTAR Full HD camera and name it here accordingly. As __Bridge__ you need to select the MQTT broker you just created. Click on the blue button to confirm: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_23.png) --- Back in the __Thing Menu__ click to edit the __Thing__ you just created: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_24.png) --- Click on the blue __+__ button to add a __Channel__ to the __Thing__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_25.png) --- I am going to use an __On/Off Switch__ which is not ideal for the function - the Alarm-Trigger command does not have an On/Off state. Unfortunately OpenHab does not offer a simple button. The MQTT topic to trigger the alert is `alarm/pushalarm` and as message payload we need to add `{"val":"1"}`. Click on __Save__ to confirm: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_26.png) ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_26a.png) --- By clicking on the blue icon in front of the created channel we are now able to link it to a switch in the __OpenHAB PaperUI__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_27.png) --- Click on __Create new item...__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_28.png) --- Click on __Link__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_29.png) --- You can now click on __Control__ to open the __PaperUI__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_30.png) --- Our Alarm Trigger now appears in the __Paper UI__ and clicking the switch will send the MQTT message to our MQTT server that is going to notify our camera to trigger an alert. Note the MQTT.fx program at the bottom of the frame can be used to verify that the command was actually send - but the installation __is optional__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_31.png) --- Instead of using MQTT.fx you can also just check your cameras log book that should now show an __Audio Alarm__ (the manual alarm trigger uses the audio alarm to trigger all your programmed Alarm Actions): --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_32.png) --- All right that's it - the Binding was successfully added and we are now able to control all of our camera functions with the MQTT interface. ### OpenHAB Cloud Connector You can use the OpenHAB Cloud to access your Smarthome over the internet. First we need to install __openHAB Cloud Connector__ that can be found in the __Add-ons Menu__ in the __Miscellaneous__ section: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_33.png) --- Now you need to create a free account on the OpenHAB Cloud Service. Once your account is set up and you are signed in, go to the [Account Page](https://myopenhab.org/account). Here you need to add your __OpenHAB UUID__ and __OpenHAB Secret__. Those can be found on your OpenHAB installation under the following paths (make sure that you install the __Cloud Connector__ first for them to be generated): ```bash cat /var/lib/openhab2/uuid cat /var/lib/openhab2/openhabcloud/secret ``` Add your UUID and Secret on your [Account Page](https://myopenhab.org/account) and update your configuration: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_35.png) --- Now back to the __Paper UI__ go to __Configuration__ and __Services__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_36.png) --- You can find the __Cloud Connector__ in the __IO Section__. Click to configure it: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_37.png) --- And make sure that the __Remote Access__ is active and the __Base URL__ is set to `https://myopenhab.org/`: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_38.png) --- Now go back to your terminal and reboot your Linux system that is running OpenHab with `sudo reboot`. Once OpenHAB is back up refresh the [Cloud Page](https://myopenhab.org/events?source=openhab) - you should see that your status is now set to __ONLINE__: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_39.png) --- When you now switch to the [dashboard URL](https://home.myopenhab.org/start/index) you will be able to see your regular OpenHAB interface. And this URL will work from anywhere where you have internet access: --- ![OpenHAB 2 with your INSTAR IP Camera](./OpenHAB2_40.png) ---<file_sep>--- date: "2019-09-16" title: "Working with SQL Dumps" categories: - SQL - Windows --- ![Harbin, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Exporting/Importing SQL files](#exportingimporting-sql-files) - [MySQL Docker Container](#mysql-docker-container) - [HeidiSQL](#heidisql) <!-- /TOC --> ## Exporting/Importing SQL files ```bash mysqldump -u username -p database_name > file.sql mysql -u username -p database_name < file.sql ``` ## MySQL Docker Container We can start the MySQL database on our system using Docker. [Download the MySQL Container](https://hub.docker.com/_/mysql) by typing `docker pull mysql` into your Terminal / Powershell. To start the container run the following commands: ```bash docker volume create crv_mysql docker run \ -e MYSQL_ROOT_PASSWORD=<PASSWORD> \ -e MYSQL_DATABASE=devdb \ -e MYSQL_USER=dbuser \ -e MYSQL_PASSWORD=<PASSWORD> \ --mount type=volume,src=crv_mysql,dst=/var/lib/mysql \ -p 3306:3306 \ -d \ mysql:latest ``` This will create a volume to store your data in `/var/lib/mysql` and also create a non-root user and a database that can be accessed with this user. If you just need a quick look at an SQL dump, simplify this command to: ```bash docker run \ -e MYSQL_ROOT_PASSWORD=<PASSWORD> \ -e MYSQL_PASSWORD=<PASSWORD> \ -p 3306:3306 \ -d \ mysql:latest ``` You can no connect to the database with `root` and `dbpassword` on `127.0.0.1:3306`. ## HeidiSQL Under Windows we can use [HeidiSQL](https://www.heidisql.com/download.php) to work with our database. Once you downloaded and installed the software connect the software with the database service: ![HeidiSQL](./HeidiSQL_01.png) Now right-click to add a new database. Once created select the database and click on the folder icon to add your SQL file: ![HeidiSQL](./HeidiSQL_02.png) ![HeidiSQL](./HeidiSQL_03.png) <file_sep>--- date: "2017-10-01" title: "Using NGINX as proxy for your nodejs apps" categories: - LINUX - NGINX --- ![Hongkong](./photo-34607920835_e26fff721f_o.jpg) <!-- TOC --> - [01 Useful links](#01-useful-links) - [02 Install Nginx and Adjust the Firewall](#02-install-nginx-and-adjust-the-firewall) - [03 FirewallD](#03-firewalld) - [04 Create a login](#04-create-a-login) - [05 nginx.conf](#05-nginxconf) - [06 virtual.conf](#06-virtualconf) - [07 GoDaddy Certs](#07-godaddy-certs) - [Generate a CSR and Private Key](#generate-a-csr-and-private-key) - [Download your key from GoDaddy](#download-your-key-from-godaddy) - [Install Certificate On Web Server](#install-certificate-on-web-server) - [08 LetsEncrypt and Certbot](#08-letsencrypt-and-certbot) - [Install Certbot on CentOS 7](#install-certbot-on-centos-7) - [Run Certbot](#run-certbot) - [Setting Up Auto Renewal](#setting-up-auto-renewal) - [Systemd](#systemd) - [Cron.d](#crond) - [TLS-SNI-01 challenge Deactivated](#tls-sni-01-challenge-deactivated) <!-- /TOC --> ## 01 Useful links ___ * [Apache2-Utils](https://kyup.com/tutorials/set-http-authentication-nginx/) * [SSL Labs](https://www.ssllabs.com/ssltest/) * [Set up NGINX with http/2](https://www.digitalocean.com/community/tutorials/how-to-set-up-nginx-with-http-2-support-on-ubuntu-16-04) * [Create a self-signed Certificate](https://www.digitalocean.com/community/tutorials/how-to-create-a-self-signed-ssl-certificate-for-nginx-on-centos-7/) * [How To Secure Nginx with Let's Encrypt on CentOS 7](https://www.digitalocean.com/community/tutorials/how-to-secure-nginx-with-let-s-encrypt-on-centos-7) * [Installing Elasticsearch](https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html) * [Installing Kibana](https://www.elastic.co/guide/en/kibana/current/install.html) * [Installing X-Pack](https://www.elastic.co/downloads/x-pack) ## 02 Install Nginx and Adjust the Firewall ___ * **Step One** — Nginx is not available in CentOS's default repositories - but we can install it from the EPEL (extra packages for Enterprise Linux) repository. ``` sudo yum install epel-release ``` * **Step Two** — Next, we can install Nginx. ``` sudo yum install nginx ``` * **Step Three** — Start the Nginx service and test it inside your browser http://server_domain_name_or_IP/ ``` sudo systemctl start nginx ``` * **Step Four** — Check that the service is up and running by typing: ``` systemctl status nginx ``` * **Step Five** — You will also want to enable Nginx, so it starts when your server boots: ``` sudo systemctl enable nginx ``` ## 03 FirewallD ___ * **Step One** — Installation Open ports 80 and 443 in [FirewallD](http://www.firewalld.org/) To start the service and enable FirewallD on boot: ``` sudo systemctl start firewalld sudo systemctl enable firewalld ``` To stop and disable it: ``` sudo systemctl stop firewalld sudo systemctl disable firewalld ``` Check the firewall status. The output should say either running or not running: ``` sudo firewall-cmd --state ``` To view the status of the FirewallD daemon: ``` sudo systemctl status firewalld ``` To reload a FirewallD configuration: ``` sudo firewall-cmd --reload ``` * **Step Two** — Configuration Add the http/s rule to the permanent set and reload FirewallD. ``` sudo firewall-cmd --zone=public --add-service=https --permanent sudo firewall-cmd --zone=public --add-service=http --permanent sudo firewall-cmd --reload ``` Allow traffic / block traffic over ports: ``` sudo firewall-cmd --zone=public --add-port=12345/tcp --permanent sudo firewall-cmd --zone=public --remove-port=12345/tcp --permanent ``` Verify open ports: ``` firewall-cmd --list-ports ``` Check the firewall status: ``` sudo firewall-cmd --state ``` To view the status of the FirewallD daemon: ``` sudo systemctl status firewalld ``` To reload a FirewallD configuration: ``` sudo firewall-cmd --reload ``` ## 04 Create a login ___ ``` sudo htpasswd -c /etc/nginx/.htpasswd USERNAME New password: <PASSWORD> Re-type new password: <PASSWORD> ``` ## 05 nginx.conf /etc/nginx/nginx.conf ```nginx user nginx; worker_processes 8; error_log /var/log/nginx/error.log; pid /run/nginx.pid; events { worker_connections 1024; } http { log_format main '$remote_addr - $remote_user [$time_local] "$request" ' '$status $body_bytes_sent "$http_referer" ' '"$http_user_agent" "$http_x_forwarded_for"'; access_log /var/log/nginx/access.log main; sendfile on; tcp_nopush on; tcp_nodelay on; keepalive_timeout 65; types_hash_max_size 2048; gzip on; gzip_vary on; gzip_proxied any; gzip_comp_level 6; gzip_buffers 16 8k; gzip_http_version 1.1; gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; include /etc/nginx/mime.types; default_type application/octet-stream; # Load modular configuration files from the /etc/nginx/conf.d directory. # See http://nginx.org/en/docs/ngx_core_module.html#include # for more information. include /etc/nginx/conf.d/*.conf; # include /etc/nginx/sites-enabled/*; # Hide nginx version token server_tokens off; # Configure buffer sizes client_body_buffer_size 16k; client_header_buffer_size 1k; client_max_body_size 8m; large_client_header_buffers 4 8k; } ``` ## 06 virtual.conf /etc/nginx/conf.d/virtual.conf Set up virtual server instances for our 2 node/express apps, Elasticsearch and Kibana ```nginx # redirect http/80 traffic to https/443 for our node apps server { listen 80; listen [::]:80; server_name example.de example2.de; return 301 https://$server_name$request_uri; } # point to our first node app that is running on port 8888 and accept calls over https://example.de:443 upstream myApp_en { # point to the running node server 127.0.0.1:8888; } server { # users using this port and domain will be directed to the node app defined above # listen 80 default_server; # listen [::]:80 default_server ipv6only=on; listen 443 ssl http2 default_server; listen [::]:443 ssl http2 default_server; # If you want to run more then one node app, they either have to be assigned different web domains (server_name) or ports! server_name example.de; # Adding the SSL Certificates ssl_prefer_server_ciphers on; ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5; ssl_dhparam /etc/nginx/ssl/dhparam.pem; ssl_certificate /etc/nginx/ssl/nginx-selfsigned.crt; ssl_certificate_key /etc/nginx/ssl/nginx-selfsigned.key; # set the default public directory for your node root /opt/myApp_en/build/public; # Optimizing Nginx for Best Performance ssl_session_cache shared:SSL:5m; ssl_session_timeout 1h; location / { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header Host $http_host; proxy_set_header X-NginX-Proxy true; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_max_temp_file_size 0; proxy_pass http://myApp_en; proxy_redirect off; proxy_read_timeout 240s; # Authentication can be activated during development # auth_basic "Username and Password <PASSWORD>"; # the user login has to be generated # auth_basic_user_file /etc/nginx/.htpasswd; } # use NGINX to cache static resources that are requested regularly location ~* \.(css|js|jpg|png|ico)$ { expires 168h; } } # point to our second node app that is running on port 8484 and accept calls over https://example2.de:443 upstream myApp_de { # point to the second running node server 127.0.0.1:8484; } server { # users using this port and domain will be directed to the second node app # listen 80; # listen [::]:8080 ipv6only=on; listen 443 ssl http2; # The IPv6 address is unique - only one app can use the default port 443! listen [::]:444 ssl http2; server_name example2.de; # adding the SSL Certificates ssl_prefer_server_ciphers on; ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5; ssl_dhparam /etc/nginx/ssl/dhparam.pem; ssl_certificate /etc/nginx/ssl/nginx-selfsigned.crt; ssl_certificate_key /etc/nginx/ssl/nginx-selfsigned.key; # set the default public directory for your second node root /opt/myApp_de/build/public; # optimizing Nginx for Best Performance ssl_session_cache shared:SSL:5m; ssl_session_timeout 1h; location / { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header Host $http_host; proxy_set_header X-NginX-Proxy true; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_max_temp_file_size 0; proxy_pass http://myApp_de; proxy_redirect off; proxy_read_timeout 240s; # auth_basic "Username and Password <PASSWORD>"; # auth_basic_user_file /etc/nginx/.htpasswd; } # use NGINX to cache static resources that are requested regularly location ~* \.(css|js|jpg|png|ico)$ { expires 168h; } } # point to our Elasticsearch database that is running on port 9200 and accept calls over 8080 upstream elasticsearch { # point to the second running node server 127.0.0.1:9200; } server { # users using this port will be directed to Elasticsearch listen 8080; listen [::]:8080 ipv6only=on; server_name SERVER_IP_ADDRESS; location / { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header Host $http_host; proxy_set_header X-NginX-Proxy true; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_max_temp_file_size 0; proxy_pass http://elasticsearch; proxy_redirect off; proxy_read_timeout 240s; auth_basic "Username and Password are required"; auth_basic_user_file /etc/nginx/.htpasswd; } } # point to our Kibana instance that is running on port 5601 and accept calls over 8181 server { # users using this port and will be directed to Elasticsearch/Kibana listen 8181; listen [::]:8181 ipv6only=on; server_name SERVER_IP_ADDRESS; auth_basic "Restricted Access"; auth_basic_user_file /etc/nginx/.htpasswd; location / { proxy_pass http://localhost:5601; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection 'upgrade'; proxy_set_header Host $host; proxy_cache_bypass $http_upgrade; } } ``` ## 07 GoDaddy Certs When you ordered a wildcard certificate from goDaddy you will receive two files: Your SSL Certificate with a random name (Ex. 93rfs8dhf834hts.crt) and the GoDaddy intermediate certificate bundle (gd_bundle-g2-g1.crt). Lets install them on our server. ### Generate a CSR and Private Key Create a folder to put all our ssl certificates: ``` mkdir /etc/nginx/ssl cd /etc/nginx/ssl ``` Generate our private key, called example.com.key, and a CSR, called example.com.csr: ``` openssl req -newkey rsa:2048 -nodes -keyout example.com.key -out example.com.csr ``` At this point, you will be prompted for several lines of information that will be included in your certificate request. The most important part is the Common Name field which should match the name that you want to use your certificate with — for example, example.com, www.example.com, or (for a wildcard certificate request) [STAR].example.com. ### Download your key from GoDaddy The files you receive will look something like this: - 93rfs8dhf834hts.crt - gd_bundle-g2-g1.crt Upload both to /etc/nginx/ssl directory and rename the first one to your domain name example.com.cst ### Install Certificate On Web Server You can use the following command to create a combined file from both GoDaddy files called example.com.chained.crt: ``` cat example.com.crt gd_bundle-g2-g1.crt > example.com.chained.crt ``` And now you should change the access permission to this folder: ``` cd /etc/nginx sudo chmod -R 600 ssl/ ``` To complete the configuration you have to make sure your NGINX config points to the right cert file and to the private key you generated earlier. Add the following lines inside the server block of your NGINX config: ``` # adding the SSL Certificates ssl_prefer_server_ciphers on; ssl_ciphers EECDH+CHACHA20:EECDH+AES128:RSA+AES128:EECDH+AES256:RSA+AES256:EECDH+3DES:RSA+3DES:!MD5; ssl_certificate /etc/nginx/ssl/example.com.chained.crt; ssl_certificate_key /etc/nginx/ssl/example.com.key; ``` Always test your configuration first: ``` nginx -t ``` and then reload: ``` service nginx reload ``` ## 08 LetsEncrypt and Certbot ### Install Certbot on CentOS 7 **yum install certbot-nginx** ``` Dependencies Resolved ============================================================================================== Package Arch Version Repository Size ============================================================================================== Installing: python2-certbot-nginx noarch 0.14.1-1.el7 epel 52 k Installing for dependencies: pyparsing noarch 1.5.6-9.el7 base 94 k Transaction Summary ============================================================================================== Install 1 Package (+1 Dependent package) Complete! ``` ### Run Certbot **certbot --nginx -d wiki.instar.fr** ``` Saving debug log to /var/log/letsencrypt/letsencrypt.log Enter email address (used for urgent renewal and security notices) (Enter 'c' to cancel): ``` **<EMAIL>** ``` ------------------------------------------------------------------------------- Please read the Terms of Service at https://letsencrypt.org/documents/LE-SA-v1.1.1-August-1-2016.pdf. You must agree in order to register with the ACME server at https://acme-v01.api.letsencrypt.org/directory ------------------------------------------------------------------------------- ``` **(A)gree/(C)ancel: A** ``` Starting new HTTPS connection (1): supporters.eff.org Obtaining a new certificate Performing the following challenges: tls-sni-01 challenge for wiki.instar.fr Waiting for verification... Cleaning up challenges Deployed Certificate to VirtualHost /etc/nginx/conf.d/virtual.conf for set(['wiki.instar.fr']) Please choose whether HTTPS access is required or optional. ------------------------------------------------------------------------------- 1: Easy - Allow both HTTP and HTTPS access to these sites 2: Secure - Make all requests redirect to secure HTTPS access ------------------------------------------------------------------------------- Select the appropriate number [1-2] then [enter] (press 'c' to cancel): 2 The appropriate server block is already redirecting traffic. To enable redirect anyway, uncomment the redirect lines in /etc/nginx/conf.d/virtual.conf. ------------------------------------------------------------------------------- Congratulations! You have successfully enabled https://wiki.instar.fr ------------------------------------------------------------------------------- ``` ``` IMPORTANT NOTES: - Congratulations! Your certificate and chain have been saved at /etc/letsencrypt/live/wiki.instar.fr/fullchain.pem. Your cert will expire on 2017-12-13. To obtain a new or tweaked version of this certificate in the future, simply run certbot again with the "certonly" option. To non-interactively renew *all* of your certificates, run "certbot renew" - Your account credentials have been saved in your Certbot configuration directory at /etc/letsencrypt. You should make a secure backup of this folder now. This configuration directory will also contain certificates and private keys obtained by Certbot so making regular backups of this folder is ideal. ``` ### Setting Up Auto Renewal #### Systemd Go to _/etc/systemd/system/_ and create the following two files _certbot-nginx.service_ ``` [Unit] Description=Renew Certbot certificates (nginx) After=network-online.target [Service] Type=oneshot ExecStart=/usr/bin/certbot-2 renew --deploy-hook "systemctl reload nginx" ``` _certbot-nginx.timer_ ``` [Unit] Description=Renew Certbot certificate (nginx) [Timer] OnCalendar=daily Persistent=true RandomizedDelaySec=86400 [Install] WantedBy=multi-user.target ``` Now activate the service ``` $ systemctl daemon-reload $ systemctl start certbot-nginx.service # to run manually $ systemctl enable --now certbot-nginx.timer # to use the timer ``` #### Cron.d Add Certbot renewal to Cron.d in /etc/cron.d - we want to run it twice daily at 13:22 and 04:17: ``` # Example of job definition: # .---------------- minute (0 - 59) # | .------------- hour (0 - 23) # | | .---------- day of month (1 - 31) # | | | .------- month (1 - 12) OR jan,feb,mar,apr ... # | | | | .---- day of week (0 - 6) (Sunday=0 or 7) OR sun,mon,tue,wed,thu,fri,sat # | | | | | # * * * * * user-name command to be executed 17 4 * * * /usr/bin/certbot-2 renew --quiet 22 13 * * * /usr/bin/certbot-2 renew --quiet ``` ### TLS-SNI-01 challenge Deactivated If you are receiving the following error when trying to add a certificate to your domain: ``` Client with the currently selected authenticator does not support any combination of challenges that will satisfy the CA. ``` Follow the Instructions given [here](https://community.letsencrypt.org/t/solution-client-with-the-currently-selected-authenticator-does-not-support-any-combination-of-challenges-that-will-satisfy-the-ca/49983) and if you’re serving files for that domain out of a directory on that server, you can run the following command: ``` sudo certbot --authenticator webroot --webroot-path <path to served directory> --installer nginx -d <domain> ``` If you’re not serving files out of a directory on the server, you can temporarily stop your server while you obtain the certificate and restart it after Certbot has obtained the certificate. This would look like: ``` sudo certbot --authenticator standalone --installer nginx -d <domain> --pre-hook "service nginx stop" --post-hook "service nginx start" ``` e.g. 1. Create your virtual server conf - the given config below routes an node/express app running on localhost:7777 with a public directory in /opt/mysite-build/app : ```nginx server { listen 80; listen [::]:80; server_name my.domain.com; return 301 https://$server_name$request_uri; } upstream app_test { # point to the running node server 127.0.0.1:7777; } server { listen 443 ssl http2; listen [::]:443 ssl http2; server_name my.domain.com; # set the default public directory for your node root /opt/mysite-build/app; # Optimizing Nginx for Best Performance ssl_session_cache shared:SSL:5m; ssl_session_timeout 1h; location / { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header Host $http_host; proxy_set_header X-NginX-Proxy true; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_max_temp_file_size 0; proxy_pass http://wiki2_test; proxy_redirect off; proxy_read_timeout 240s; } # use NGINX to cache static resources that are requested regularly location ~* \.(css|js|jpg|png|ico)$ { expires 168h; } } ``` Test your your site by opening my.domain.com inside your browser - you should be automatically redirected to https://my.domain.com and be given a certificate warning. Click to proceed anyway to access your site. Now run: ``` sudo certbot --authenticator webroot --webroot-path /opt/mysite-build/app --installer nginx -d my.domain.com ``` certbot will modify your NGINX config automatically!<file_sep>--- date: "2019-09-24" title: "Installing ioBroker on CentOS8 with Podman" categories: - LINUX - Smarthome - IoT - Docker --- ![Shanghai, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installation through Podman on CentOS8](#installation-through-podman-on-centos8) <!-- /TOC --> ## Installation through Podman on CentOS8 IoBroker for Docker is an Dockerimage for [ioBroker IoT platform](http://www.iobroker.net). The automatically built images are available on [Docker Hub](hhttps://github.com/buanet/docker-iobroker). Download the image with one of the following command: ```bash podman pull buanet/iobroker:latest ``` And and run it: ```bash podman run -p 8081:8081 --name iobroker buanet/iobroker:latest ``` ![ioBroker CentOS8 Podman](./ioBroker_CentOS8_Podman_01.png) ### Persist your Configuration 1. Find out your container ID (or just use the name you have assigned when running the image) ```bash podman ps CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 5582e69da175 localhost/buanet/iobroker:latest /opt/scripts/iobr... 13 minutes ago Up 13 minutes ago 0.0.0.0:8081->8081/tcp iobroker ``` 2. Get into the container and find the ioBroker location ```bash [root@CentOS8 ~]# podman exec -ti 5582e69da175 /bin/bash root@5582e69da175:/opt/iobroker# find / -name iobroker /etc/sudoers.d/iobroker /home/iobroker /opt/iobroker /opt/iobroker/iobroker /usr/bin/iobroker /iobroker ``` 3. Stop and delete the container and change to runscript to mount the folder onto your host system: ```bash podman stop iobroker podman rm iobroker podman run -p 8081:8081 \ -p 8082:8082 \ -p 8087:8087 \ -p 1883:1883 \ --name iobroker \ -v /opt/iobroker:/opt/iobroker \ -v /opt/iobroker/home:/home/iobroker \ --detach=true \ --restart="always" \ buanet/iobroker:latest ``` > Port `8082` added for ioBroker visualization, port `8087` for the REST API and port `1883` for MQTT. Make sure that the folders `/opt/iobroker/home` and `/opt/iobroker` exist on your host system (`chmod -R 777 /opt/iobroker` or `755` to give Podman all necessary permissions and `chcon -Rt svirt_sandbox_file_t /opt/motioneye`). > I noticed a problem that podman changes the owner:group of those folders on my host system to iobroker:iobroker. But it sets the permission wrong and ioBroker no longer loads - you can check it with `podman logs iobroker`. Rerunning `chmod -R 777 /opt/iobroker` solves the issue.<file_sep>--- date: "2017-07-06" title: "Windows CMD" categories: - Windows --- ![<NAME>](./photo-11627014666_359f04f9db_o.png) > You can create a bootable USB flash drive to use to deploy your OS of choice . The first step is to prepare the USB flash drive by using DiskPart, which is a command-line utility. <!-- TOC --> - [CHKDSK your drive](#chkdsk-your-drive) - [Create a Bootable USB Flash Drive](#create-a-bootable-usb-flash-drive) - [DiskPart Commands](#diskpart-commands) <!-- /TOC --> ## CHKDSK your drive 1. Press the Windows key to open the Start Menu 2. Type-in cmd 3. Right-click on Command Prompt from the search results list 4. Click Run as administrator 5. When Command Prompt launches, type the command: `chkdsk H: /f /r /x` > The parameters for this command are: > > * `/f` option will attempt to fix any found errors > * `/r` option will locate for bad sectors and recovery any readable information > * `/x` option will force the volume you’re about to check to be dismounted before the utility begins a scan > > If the C: drive is in use, type Y to run a scan at your PC’s next restart. If so, exit Command Prompt and restart the computer. ![CHKDSK](./cmd-disk_01.png) ## Create a Bootable USB Flash Drive DiskPart is a text-mode command interpreter in Windows Vista, Windows® XP, and the Windows Server 2003® family. This tool enables you to manage objects (disks, partitions, or volumes) by using scripts or direct input at a command prompt. ### DiskPart Commands Before you can use [DiskPart commands](https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-vista/cc766465(v=ws.10)) on a disk, partition, or volume, you must first list and then select the object to give it focus. When an object has focus, any [DiskPart commands](https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-vista/cc766465(v=ws.10)) that you type act on that object. You can list the available objects and determine an object's number or drive letter by using the `list disk`, `list volume`, and `list partition` commands. The `list disk` and `list volume` commands display all disks and volumes on the computer. However, the `list partition` command displays only partitions on the disk that have focus. When you use the `list` commands, an asterisk (*) appears next to the object with focus. You select an object by its number or drive letter, such as disk 0, partition 1, volume 3, or volume C. 1. Insert a USB flash drive into a running computer. 2. Open a Command Prompt window as an administrator. 3. Type `diskpart`. 4. In the new command line window that opens, to determine the USB flash drive number or drive letter, at the command prompt, type `list disk`, and then `click ENTER`. The list disk command displays all the disks on the computer. Note the drive number or drive letter of the USB flash drive. 5. At the command prompt, type `select disk <X>`, where X is the drive number or drive letter of the USB flash drive, and then click `ENTER`. 6. Type `clean`, and the click `ENTER`. This command deletes all data from the USB flash drive. 7. To create a new primary partition on the USB flash drive, type `create part pri`, and then click `ENTER`. 8. To select the partition that you just created, type `select part 1`, and then click `ENTER`. 9. To format the partition, type `format fs=ntfs` quick, and then click `ENTER`. > Important > > If your server platform supports Unified Extensible Firmware Interface (UEFI), you should format the USB flash drive as FAT32 rather than as NTFS. To format the partition as FAT32, type format `fs=fat32` quick, and then click ENTER. 10. Type `active`, and then click `ENTER`. 11. Type `exit`, and then click `ENTER`. 12. When you finish preparing your custom image, save it to the root of the USB flash drive. ![DISKPART](./cmd-disk_02.png) <file_sep>--- date: "2020-01-05" title: "Installing FirewallD on CentOS 8" categories: - LINUX --- ![Shenzhen, China](./photo-kt443t6d_64hdh43hfh6dgjdfhg4_d.jpg) <!-- TOC --> - [Installing FirewallD](#installing-firewalld) - [Enable FirewallD](#enable-firewalld) - [Opening Ports and Services](#opening-ports-and-services) <!-- /TOC --> ## Installing FirewallD firewalld is installed by default on some Linux distributions, including many images of CentOS 8. However, it may be necessary for you to install firewalld yourself: ```bash yum install firewalld ``` ## Enable FirewallD After you install firewalld, you can enable it as a system service: ```bash systemctl enable firewalld systemctl start firewalld firewall-cmd --state ``` ## Opening Ports and Services Enable available services: ```bash firewall-cmd --permanent --zone=public --add-service=cockpit --add-service=http --add-service=https firewall-cmd --reload firewall-cmd --zone=public --list-services ``` And open specific ports: ```bash firewall-cmd --permanent --zone=public --add-port=12345/tcp firewall-cmd --reload firewall-cmd --zone=public --list-ports ``` ## For Docker ```bash firewall-cmd --zone=public --change-interface=docker0 --permanent firewall-cmd --zone=public --add-masquerade --permanent firewall-cmd --reload firewall-cmd --list-all ```<file_sep>user_says = input ("Please enter a string: ") print(user_says)<file_sep>--- date: "2018-11-27" title: "Node-RED SQL Logging Datastreams" categories: - IoT - Node-RED - Databases - SQL --- ![Annapurna, Nepal](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Working with AlaSQL in Node-RED](#working-with-alasql-in-node-red) - [Connect our Camera to Node-RED](#connect-our-camera-to-node-red) - [Writing the Data to our SQL Database](#writing-the-data-to-our-sql-database) - [Backup the Database Table to a File](#backup-the-database-table-to-a-file) - [Backup your camera's System Log](#backup-your-cameras-system-log) - [Flow Export](#flow-export) - [Light Sensor Readout](#light-sensor-readout) - [System Log Backup](#system-log-backup) <!-- /TOC --> In this Tutorial we want to use Node-RED to retrieve an Information from a INSTAR Full HD camera, display it's value on the Node-RED dashboard and feed it into a SQL database. In the example we will read the current brightness level detected by the internal light sensor. This value can be used to detect when your camera switches the Infrared Cutfilter (IRCut), which is a useful information if you want to change the camera configuration based on the night and day mode (e.g. change the sensitivity of the motion detection or move to a different P&T position). The exact value, when the switch happens depends on your camera model and has to be determined first: --- ![alasql in node-red](./node-red_alasql_01.png) --- The same mechanism can be used to create backups from your camera configuration or system logs. As an example we will use Node-RED to backup the camera's system logbook. > [Scroll to the bottom](#Flow-Export) of this page to find the complete flows and import it to your Node-RED installation to follow along. But make sure to first install `node-red-contrib-alasql` as described below. ## Working with AlaSQL in Node-RED [AlaSQL](https://github.com/agershun/alasql) is an open source SQL database for Javascript with a strong focus on query speed and data source flexibility for both relational data and schemaless data. It can be added to [Node-RED](https://nodered.org) by installing [node-red-contrib-alasql](https://www.npmjs.com/package/node-red-contrib-alasql): --- ![alasql in node-red](./node-red_alasql_02.png) ![alasql in node-red](./node-red_alasql_03.png) --- The package provides us with a node that enables us to use SQL queries like CREATE, SELECT, INSERT and DROP to create tables, add data to them to re-feed them to our Node-RED project at a later stage. There is also a handy Out-File and In-File node that enables us to backup our tables to a file and recreate our database from this backup. To prepare a table for our soon to be created data, we can use an AlaSQL node with the following command: ```sql CREATE TABLE lightSensor (camera string, brightness number) ``` --- ![alasql in node-red](./node-red_alasql_04.png) --- Click on the Inject node next to the __CREATE__ node to create the table if it does not exist yet (the inject node is set to trigger 5s after Node-RED started - so you should already be good to go). ## Connect our Camera to Node-RED As an example we can connect to the HTTP API of INSTAR IP Full HD camera using a [CGI command](/Advanced_User/CGI_Commands/) like (__don't forget to adjust the IP address and your camera login!__): ```bash http://user:[email protected]/param.cgi?cmd=getsaradcstate ``` --- ![alasql in node-red](./node-red_alasql_05.png) --- The command will retrieve to current brightness level recorded by the internal light sensor of the camera. We need to add some RegEX `var brightness = msg.payload.replace(/^\D+|\D+$/g, "");` to clean up the response: --- ![alasql in node-red](./node-red_alasql_06.png) --- Then Regular-Expression will take the camera response `var saradc_state="208";`, extract the value (in this case `208`) and we then set this value to the `msg.payload`. Which then can be used directly in a [node-red-dashboard](https://flows.nodered.org/node/node-red-dashboard) node: --- ![alasql in node-red](./node-red_alasql_07.png) ![alasql in node-red](./node-red_alasql_08.png) --- ## Writing the Data to our SQL Database For the AlaSQL node we can use an insert command to add the measurement to our database table (`CREATE TABLE lightSensor (camera string, brightness number)`): ```bash INSERT INTO lightSensor VALUES ('192.168.2.115',?) ``` --- ![alasql in node-red](./node-red_alasql_09.png) --- The Inject node is configured to trigger once every minute. You can check if there are already values stored inside the SQL table by clicking on the Inject node next to the [SELECT node](#Working-with-AlaSQL-in-Node-RED) we created above - `SELECT * FROM lightSensor`. Open the debug panel in Node-RED to see the output - it should list every row inside the `lightSensor` table. ## Backup the Database Table to a File Once we collected all the data we needed, we can use an AlaSQL node to select all data from one camera `SELECT * FROM lightSensor WHERE camera IN ('192.168.2.115')` and use an AlaSQL Out-File node to write this information to a file (xlsx, csv, json, etc.). Make sure that the folder `/home/pi/.node-red/sql/` exists and NodeRED has the necessary rights to write to it. --- ![alasql in node-red](./node-red_alasql_10.png) --- ## Backup your camera's System Log To get your camera's System Log you can use the following path (_please change the login and the IP address according to your camera setup_): ```bash http://admin:[email protected]/tmpfs/syslog.txt ``` --- ![alasql in node-red](./node-red_alasql_11.png) --- Once we send this command our camera will answer with a simple text file, listing all recorded camera events. To be able to write this information to a database, we first have to clean it up using regular expressions: --- ![alasql in node-red](./node-red_alasql_12.png) --- Now we are able to use an AlaSQL node to insert the information into our database table - e.g. `sysLog116`: ```bash INSERT INTO sysLog116 VALUES (CURRENT_TIMESTAMP,?); ``` --- ![alasql in node-red](./node-red_alasql_13.png) --- The following node inside the sequence will delete the log file on our camera to prevent duplicated event entries in our database: ```bash http://admin:[email protected]/param.cgi?cmd=cleanlog&-name=sys ``` Just as before we first need to create this table by triggering the __CREATE__ node if the table does not exist already: ```bash CREATE TABLE sysLog116 (ts TIMESTAMP VARCHAR(80), event string); ``` --- ![alasql in node-red](./node-red_alasql_14.png) --- ## Flow Export __How to import a Flow__ _Open the top menu then select import -> clipboard, this will open a dialogue box where you can import the flow. Copy the JSON flow above to your system clipboard. When the file content has been pasted into the dialogue box press the import button._ ### Light Sensor Readout ![alasql in node-red](./node-red_alasql_15.png) ```json [{"id":"54e58600.85c09c","type":"alasql","z":"4e79509.5da71b","name":"SELECT","query":"SELECT * FROM lightSensor","x":260,"y":220,"wires":[["f74b0716.9b1e78"]]},{"id":"f74b0716.9b1e78","type":"debug","z":"4e79509.5da71b","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":450,"y":129,"wires":[]},{"id":"c7d0d4c9.343eb8","type":"inject","z":"4e79509.5da71b","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":123,"y":220,"wires":[["54e58600.85c09c"]]},{"id":"64f88aa9.618984","type":"alasql","z":"4e79509.5da71b","name":"CREATE","query":"CREATE TABLE lightSensor (camera string, brightness number)","x":280,"y":100,"wires":[["f74b0716.9b1e78"]]},{"id":"3f95a33a.3bc3fc","type":"inject","z":"4e79509.5da71b","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":true,"onceDelay":"5","x":130,"y":100,"wires":[["64f88aa9.618984"]]},{"id":"88a397a8.148378","type":"alasql","z":"4e79509.5da71b","name":"INSERT","query":"INSERT INTO lightSensor VALUES ('192.168.2.115',61),('192.168.2.116',66),('192.168.2.117',67),('192.168.2.118',2),('192.168.2.15',59),('172.16.31.10',2)","x":260,"y":160,"wires":[["f74b0716.9b1e78"]]},{"id":"c0deb77.6afab48","type":"inject","z":"4e79509.5da71b","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":123,"y":160,"wires":[["88a397a8.148378"]]},{"id":"20947549.54031a","type":"alasql","z":"4e79509.5da71b","name":"DROP","query":"DROP TABLE lightSensor","x":260,"y":40,"wires":[["f74b0716.9b1e78"]]},{"id":"d73e17f7.7800f8","type":"inject","z":"4e79509.5da71b","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":123,"y":40,"wires":[["20947549.54031a"]]},{"id":"2f0fb36e.ef2d1c","type":"http request","z":"4e79509.5da71b","name":"192.168.2.115","method":"GET","ret":"txt","paytoqs":false,"url":"http://admin:[email protected]/param.cgi?cmd=getsaradcstate","tls":"","proxy":"","authType":"basic","x":320,"y":340,"wires":[["8e66245b.715848"]]},{"id":"8e66245b.715848","type":"function","z":"4e79509.5da71b","name":"brightness","func":"msg.topic = \"brightness\";\n\nvar brightness = msg.payload.replace(/^\\D+|\\D+$/g, \"\");\n\nmsg.payload = brightness;\n\nreturn msg;","outputs":1,"noerr":0,"x":488,"y":340,"wires":[["eed46a9f.8c8608","79b12832.8cfbf8","72dc17cb.ff6178"]]},{"id":"eed46a9f.8c8608","type":"ui_chart","z":"4e79509.5da71b","name":"Brightness","group":"58c9cffc.1484f","order":1,"width":"0","height":"0","label":"Brightness (IN-9008)","chartType":"line","legend":"false","xformat":"HH:mm:ss","interpolate":"bezier","nodata":"","dot":false,"ymin":"","ymax":"","removeOlder":1,"removeOlderPoints":"","removeOlderUnit":"3600","cutout":0,"useOneColor":false,"colors":["#1f77b4","#aec7e8","#ff7f0e","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5"],"useOldStyle":false,"outputs":1,"x":653,"y":339,"wires":[[]]},{"id":"79b12832.8cfbf8","type":"ui_text","z":"4e79509.5da71b","group":"58c9cffc.1484f","order":3,"width":0,"height":0,"name":"","label":"Current Brightness","format":"{{msg.payload}}","layout":"row-spread","x":672,"y":379,"wires":[]},{"id":"72dc17cb.ff6178","type":"alasql","z":"4e79509.5da71b","name":"lightSensor","query":"INSERT INTO lightSensor VALUES ('192.168.2.115',?)","x":630,"y":300,"wires":[[]]},{"id":"1a556423.56677c","type":"inject","z":"4e79509.5da71b","name":"1min Trigger","topic":"","payload":"","payloadType":"date","repeat":"60","crontab":"","once":false,"onceDelay":0.1,"x":140,"y":340,"wires":[["2f0fb36e.ef2d1c"]]},{"id":"5bfab4c7.fead1c","type":"inject","z":"4e79509.5da71b","name":"20 Min Trigger","topic":"","payload":"","payloadType":"date","repeat":"1200","crontab":"","once":false,"onceDelay":0.1,"x":140,"y":480,"wires":[["6c59f580.919d0c"]]},{"id":"6c59f580.919d0c","type":"alasql","z":"4e79509.5da71b","name":"SELECT 115","query":"SELECT * FROM lightSensor WHERE camera IN ('192.168.2.115')","x":310,"y":480,"wires":[["2d6e1036.6d267"]]},{"id":"2d6e1036.6d267","type":"alafile out","z":"4e79509.5da71b","name":"xlsx","filename":"/home/pi/.node-red/sql/lightSensor_115","format":"xlsx","columns":"*","headers":true,"x":450,"y":480,"wires":[]},{"id":"58c9cffc.1484f","type":"ui_group","z":"","name":"192.168.2.115","tab":"32dce8be.80f688","disp":true,"width":"6","collapse":false},{"id":"32dce8be.80f688","type":"ui_tab","z":"","name":"Brightness","icon":"fa-sun-o","order":11,"disabled":false,"hidden":false}] ``` ### System Log Backup ![alasql in node-red](./node-red_alasql_16.png) ```json [{"id":"98abb348.a5e48","type":"inject","z":"e3ee2ba4.3b3d68","name":"24hrs Trigger","topic":"","payload":"","payloadType":"date","repeat":"86400","crontab":"","once":false,"onceDelay":0.1,"x":120,"y":100,"wires":[["3e24cc90.b66634"]]},{"id":"3e24cc90.b66634","type":"http request","z":"e3ee2ba4.3b3d68","name":"Get Log 116","method":"GET","ret":"txt","paytoqs":false,"url":"http://admin:[email protected]/tmpfs/syslog.txt","tls":"","proxy":"","authType":"basic","x":220,"y":53,"wires":[["eb0870ff.5a5e1"]]},{"id":"ccf12822.059a18","type":"inject","z":"e3ee2ba4.3b3d68","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":100,"y":380,"wires":[["bd6d09c3.782788"]]},{"id":"40908c9f.0b1474","type":"inject","z":"e3ee2ba4.3b3d68","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":true,"onceDelay":"5","x":110,"y":205,"wires":[["efc4323e.4c5bc"]]},{"id":"73c283ff.d3003c","type":"inject","z":"e3ee2ba4.3b3d68","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":100,"y":265,"wires":[["bded183e.8941a8"]]},{"id":"aca8074.2662bf8","type":"inject","z":"e3ee2ba4.3b3d68","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":100,"y":325,"wires":[["43e99a76.7f6154"]]},{"id":"43e99a76.7f6154","type":"alasql","z":"e3ee2ba4.3b3d68","name":"SELECT","query":"SELECT * FROM sysLog116","x":237,"y":325,"wires":[["8d1cd7bf.de6d98"]]},{"id":"b333dff4.6e8f","type":"alasql","z":"e3ee2ba4.3b3d68","name":"INSERT","query":"INSERT INTO sysLog116 VALUES (CURRENT_TIMESTAMP,?);","x":360,"y":265,"wires":[[]]},{"id":"efc4323e.4c5bc","type":"alasql","z":"e3ee2ba4.3b3d68","name":"CREATE","query":"CREATE TABLE sysLog116 (ts TIMESTAMP VARCHAR(80), event string);","x":263,"y":205,"wires":[[]]},{"id":"bd6d09c3.782788","type":"alasql","z":"e3ee2ba4.3b3d68","name":"DROP","query":"DROP TABLE sysLog116","x":237,"y":380,"wires":[[]]},{"id":"8d1cd7bf.de6d98","type":"debug","z":"e3ee2ba4.3b3d68","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","x":390,"y":325,"wires":[]},{"id":"3f7d0a1e.da1be6","type":"inject","z":"e3ee2ba4.3b3d68","name":"20 Min Trigger","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":110,"y":500,"wires":[["317d8b07.06a1a4"]]},{"id":"317d8b07.06a1a4","type":"alasql","z":"e3ee2ba4.3b3d68","name":"SELECT 116","query":"SELECT * FROM sysLog116;","x":278,"y":500,"wires":[["c31a719a.72d4a"]]},{"id":"c31a719a.72d4a","type":"alafile out","z":"e3ee2ba4.3b3d68","name":"json","filename":"/home/pi/.node-red/sql/syslog_116","format":"json","columns":"*","headers":true,"x":418,"y":500,"wires":[]},{"id":"eb0870ff.5a5e1","type":"function","z":"e3ee2ba4.3b3d68","name":"syslog","func":"msg.topic = \"syslog\";\n\nstr = msg.payload.replace(/(\\r\\n|\\n|\\r)/gm,\" \").replace(/\\[/g, '').replace(/\\]/g, \"\").replace(/\\./g, \";\");\n\nvar syslog = str.split(/;\\s/);\n\nmsg.payload = syslog;\n\nreturn msg;","outputs":1,"noerr":0,"x":318,"y":100,"wires":[["7e28ffaa.12763"]]},{"id":"7e28ffaa.12763","type":"alasql","z":"e3ee2ba4.3b3d68","name":"sysLog116","query":"INSERT INTO sysLog116 VALUES (CURRENT_TIMESTAMP,?);","x":417,"y":54,"wires":[["ac0d4d61.2265a"]]},{"id":"bded183e.8941a8","type":"function","z":"e3ee2ba4.3b3d68","name":"","func":"msg.payload = [\"2019_06_17 09:08:48 ipc_server start\", \"2019_06_17 09:09:00 ircut: display switch(color -> blackwhite)\", \"2019_06_17 09:09:02 ircut: display switch(blackwhite -> color)\", \"2019_06_17 09:09:02 user=admin login for live stream\", \"2019_06_17 09:09:10 user=%61%64%6D%69%6E login for mjpeg stream\", \"2019_06_17 09:15:50 motion detection(area=1) alarm\", \"2019_06_17 09:26:52 motion detection(area=1) alarm\", \"2019_06_17 09:31:25 motion detection(area=1) alarm\", \"2019_06_17 09:31:51 motion detection(area=1) alarm\", \"2019_06_17 09:32:12 motion detection(area=1) alarm\", \"2019_06_17 09:33:29 motion detection(area=1) alarm\", \"2019_06_17 09:43:52 motion detection(area=1) alarm\", \"2019_06_17 09:45:43 user=%61%64%6D%69%6E logout from mjpeg stream\", \"2019_06_17 10:29:59 motion detection(area=1) alarm\", \"2019_06_17 10:30:01 motion detection(area=1) alarm\", \"2019_06_17 10:30:11 motion detection(area=1) alarm\", \"2019_06_17 10:30:13 motion detection(area=2) alarm\", \"2019_06_17 10:30:20 motion detection(are\"]\nreturn msg;","outputs":1,"noerr":0,"x":230,"y":265,"wires":[["b333dff4.6e8f"]]},{"id":"ac0d4d61.2265a","type":"http request","z":"e3ee2ba4.3b3d68","name":"Delete Log","method":"GET","ret":"txt","paytoqs":false,"url":"http://admin:[email protected]/param.cgi?cmd=cleanlog&-name=sys","tls":"","proxy":"","authType":"basic","x":507,"y":100,"wires":[[]]}] ```<file_sep>--- date: "2018-11-26" title: "Node-RED Dashboard and Videostreams" categories: - IoT - Node-RED --- ![Guangzhou, China](./photo-kt456d_645dhfh6dgjkhg4_d.jpg) <!-- TOC --> - [Using your Cameras Snapshot](#using-your-cameras-snapshot) - [Flow Export](#flow-export) - [Using your Cameras ONVIF Service](#using-your-cameras-onvif-service) - [Flow Export](#flow-export-1) - [Using your Cameras MJPEG Stream](#using-your-cameras-mjpeg-stream) - [Flow Export](#flow-export-2) <!-- /TOC --> This tutorial explores several ways to add your INSTAR cameras live image to the [Node-RED Dashboard](https://flows.nodered.org/node/node-red-dashboard). You might have to install the corresponding Nodes to use the flows below: * [node-red-node-base64](https://flows.nodered.org/node/node-red-node-base64) * [node-red-contrib-onvif](https://flows.nodered.org/node/node-red-contrib-onvif) * [node-red-contrib-multipart-stream-encoder](https://flows.nodered.org/node/node-red-contrib-multipart-stream-encoder) ## Using your Cameras Snapshot The simplest way to add your camera's live video is by requesting a live snapshot in an settable interval and using an image tag to display it as a stream on the Node-RED dashboard. For this we need to use an HTTP Request Node to GET the image from our camera and then convert the it using [node-red-node-base64](https://flows.nodered.org/node/node-red-node-base64) to feed it into an HTML template node. ![Node-RED Dashboard and INSTAR IP Camera Live Video](./Node-RED_INSTAR_Camera_Live_Video_01.png) The snapshot path for your INSTAR HD or Full HD camera is: * http://IP-Address:Port/tmpfs/snap.jpg?usr=admin&pwd=instar _Snapshot (1080p)_ * http://IP-Address:Port/tmpfs/auto.jpg?usr=admin&pwd=instar _Snapshot (320p)_ * http://IP-Address:Port/tmpfs/auto2.jpg?usr=admin&pwd=instar _Snapshot (160p)_ You can import the flow below and change the IP address and login to match that of your personal camera - `http://192.168.178.21/tmpfs/snap.jpg?usr=admin&pwd=instar`. ### Flow Export ```json [{"id":"86fe9b81.31c418","type":"http request","z":"fafa85fb.98fd68","name":"","method":"GET","ret":"bin","paytoqs":false,"url":"http://192.168.178.21/tmpfs/snap.jpg?usr=admin&pwd=<PASSWORD>","tls":"","proxy":"","authType":"","x":265,"y":343,"wires":[["4068ec45.05b034"]]},{"id":"54d0ea94.c1b9a4","type":"ui_template","z":"fafa85fb.98fd68","group":"73b331f4.a8bda","name":"Display image","order":1,"width":"6","height":"6","format":"\n<img alt=\"HTTP Snap\" src=\"data:image/jpg;base64,{{msg.payload}}\" />\n","storeOutMessages":true,"fwdInMessages":true,"templateScope":"local","x":575.3645515441895,"y":343.6041603088379,"wires":[[]]},{"id":"4068ec45.05b034","type":"base64","z":"fafa85fb.98fd68","name":"","action":"","property":"payload","x":415,"y":343,"wires":[["54d0ea94.c1b9a4"]]},{"id":"3fa57e6a.6819f2","type":"inject","z":"fafa85fb.98fd68","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":0.1,"x":115,"y":343,"wires":[["86fe9b81.31c418"]]},{"id":"73b331f4.a8bda","type":"ui_group","z":"","name":"HTTP Snap","tab":"d75e440d.a22678","disp":true,"width":"6","collapse":false},{"id":"d75e440d.a22678","type":"ui_tab","z":"fafa85fb.98fd68","name":"Cameras","icon":"camera_alt","order":2}] ``` ## Using your Cameras ONVIF Service Use the ONVIF snapshot node to connect to your camera's ONVIF service. Make sure that the [Service is activated](/Web_User_Interface/1080p_Series/Network/ONVIF/), the authentication is set and note the ONVIF port that your camera is using. Use your camera's IP address, ONVIF port and your camera administrator login to configure the ONVIF node. ![Node-RED Dashboard and INSTAR IP Camera Live Video](./Node-RED_INSTAR_Camera_Live_Video_02.png) ### Flow Export ```json [{"id":"a1428a3e.9b4d78","type":"ONVIF Snapshot","z":"3ad90760.562918","name":"ONVIF 9020","url":"http://192.168.2.165:8080/","interval":"30","username":"admin","password":"<PASSWORD>","active":false,"x":290,"y":183,"wires":[["5f85bc5d.988a14"]]},{"id":"5f85bc5d.988a14","type":"ui_template","z":"3ad90760.562918","group":"b51595ca.fcd178","name":"Display image","order":1,"width":"6","height":"6","format":"<img width=\"16\" height=\"16\" alt=\"9020 ONVIF\" src=\"{{msg.payload}}\" />\n","storeOutMessages":true,"fwdInMessages":true,"templateScope":"local","x":467,"y":183,"wires":[[]]},{"id":"b51595ca.fcd178","type":"ui_group","z":"fafa85fb.98fd68","name":"ONVIF","tab":"d75e440d.a22678","order":1,"disp":true,"width":"6","collapse":false},{"id":"d75e440d.a22678","type":"ui_tab","z":"fafa85fb.98fd68","name":"Cameras","icon":"camera_alt","order":2}] ``` ## Using your Cameras MJPEG Stream The following flow uses both the [node-red-node-base64](https://flows.nodered.org/node/node-red-node-base64) and [node-red-contrib-multipart-stream-encoder](https://flows.nodered.org/node/node-red-contrib-multipart-stream-encoder) node to embed your cameras MJPEG stream in the Node-RED dashboard. ![Node-RED Dashboard and INSTAR IP Camera Live Video](./Node-RED_INSTAR_Camera_Live_Video_03.png) The MJPEG path for your INSTAR HD and Full HD camera is: * http://IP-Address:Port/mjpegstream.cgi?-chn=11&-usr=admin&-pwd=<PASSWORD> _MJPEG Stream 11_ * http://IP-Address:Port/mjpegstream.cgi?-chn=12&-usr=admin&-pwd=instar _MJPEG Stream 12_ * http://IP-Address:Port/mjpegstream.cgi?-chn=13&-usr=admin&-pwd=<PASSWORD> _MJPEG Stream 13_ Please change the IP address and login according to your camera setup: `http://192.168.2.117/mjpegstream.cgi?-chn=13`. The URL is set inside the __set msg.url__ node as marked in the screenshot above. ### Flow Export ```json [{"id":"bcfdc528.e179f8","type":"base64","z":"3ad90760.562918","name":"Encode","action":"","property":"payload","x":900,"y":652,"wires":[["9ac64835.1ca948"]]},{"id":"9ac64835.1ca948","type":"ui_template","z":"3ad90760.562918","group":"9f4bace2.7b09a","name":"Display image","order":1,"width":"6","height":"6","format":"<img alt=\"stream test\" src=\"data:image/jpg;base64,{{msg.payload}}\" />\n","storeOutMessages":true,"fwdInMessages":true,"templateScope":"local","x":1063.256923675537,"y":652.4166660308838,"wires":[[]]},{"id":"58723502.20fb1c","type":"inject","z":"3ad90760.562918","name":"Start Stream","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"onceDelay":"","x":280,"y":589,"wires":[["f3dc375f.005998"]]},{"id":"f3dc375f.005998","type":"change","z":"3ad90760.562918","name":"","rules":[{"t":"set","p":"url","pt":"msg","to":"http://192.168.2.117/mjpegstream.cgi?-chn=13","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":466,"y":589,"wires":[["ac461010.24496"]]},{"id":"ac461010.24496","type":"multipart-decoder","z":"3ad90760.562918","name":"MJPEG Encoder","ret":"bin","url":"","tls":"","delay":"5","maximum":"10000000","blockSize":"1","x":720,"y":652,"wires":[["bcfdc528.e179f8"]]},{"id":"4821ba30.2f4374","type":"inject","z":"3ad90760.562918","name":"Pause stream","topic":"","payload":"true","payloadType":"bool","repeat":"","crontab":"","once":false,"x":279.00001525878906,"y":628,"wires":[["e32d48eb.0b1358"]]},{"id":"ffa25a37.8b6208","type":"inject","z":"3ad90760.562918","name":"Resume stream","topic":"","payload":"true","payloadType":"bool","repeat":"","crontab":"","once":false,"x":290,"y":668,"wires":[["8d2e7e10.68e59"]]},{"id":"69f879bd.8d8c38","type":"inject","z":"3ad90760.562918","name":"Stop stream","topic":"","payload":"true","payloadType":"bool","repeat":"","crontab":"","once":false,"x":279.00001525878906,"y":708,"wires":[["b9d3d5af.e7d4f8"]]},{"id":"e32d48eb.0b1358","type":"change","z":"3ad90760.562918","name":"","rules":[{"t":"set","p":"pause","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":476.00001525878906,"y":628,"wires":[["ac461010.24496"]]},{"id":"8d2e7e10.68e59","type":"change","z":"3ad90760.562918","name":"","rules":[{"t":"set","p":"resume","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":476.00001525878906,"y":668,"wires":[["ac461010.24496"]]},{"id":"b9d3d5af.e7d4f8","type":"change","z":"3ad90760.562918","name":"","rules":[{"t":"set","p":"stop","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":466.00001525878906,"y":708,"wires":[["ac461010.24496"]]},{"id":"9f4bace2.7b09a","type":"ui_group","z":"","name":"MJPEG Encoder","tab":"d75e440d.a22678","disp":true,"width":"6","collapse":false},{"id":"d75e440d.a22678","type":"ui_tab","z":"fafa85fb.98fd68","name":"Cameras","icon":"camera_alt","order":2}] ```<file_sep>--- date: "2019-02-05" title: "NGINX Ingress with Cert-Manager" categories: - LINUX - Docker - Kubernetes - NGINX --- ![Port Vila, Vanuatu](./photo-34477006171_65093dd884_o.jpg) <!-- TOC --> - [Creating the Ingress Controller](#creating-the-ingress-controller) - [Creating the Ingress](#creating-the-ingress) - [Setting up Cert-Manager](#setting-up-cert-manager) - [Installing via Helm](#installing-via-helm) - [Manual Installation](#manual-installation) - [Verifying the installation](#verifying-the-installation) - [Setting up ACME Test Issuers](#setting-up-acme-test-issuers) - [Creating Certificate Resources](#creating-certificate-resources) - [Updating the Ingress](#updating-the-ingress) - [Testing the TLS Access](#testing-the-tls-access) - [Going into Production](#going-into-production) <!-- /TOC --> The goal of this adventure is to add an [NGINX Ingress](https://github.com/kubernetes/ingress-nginx) to our Kubernetes cluster that routes HTTPS traffic to cluster internal services. I want to use [cert-manager](https://github.com/jetstack/cert-manager) to provision and manage TLS certificates for encrypting HTTP traffic to the Ingress. I already have several apps and services running - all I need now is an Ingress that allows me to assign a domain name and TLS certificate to them and start routing traffic. ## Creating the Ingress Controller To create these mandatory resources, use `kubectl apply` and the `-f` flag to specify the manifest file hosted on GitHub (or download it and apply it locally - `kubectl apply -f nginx-ingress-mandatory.yaml`): ```bash kubectl apply -f https://raw.githubusercontent.com/kubernetes/ingress-nginx/master/deploy/mandatory.yaml ``` We use apply instead of create here so that in the future we can incrementally apply changes to the Ingress Controller objects instead of completely overwriting them. ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_01.png) Since I am not using a Cloud provider like AWS, GCE or Azure I cannot rely on an external load-balancer to spin on once I provide a service for my Ingress. Instead, I am going to add my __WAN IP address__ - e.g. `192.168.3.11` - of the Master server to expose my Cluster through it directly. Also I am planing to use the same namespace for the ingress and all pods/services that are going to be served by it - `my-apps`. Create the following file `nano nginx-ingress-service.yaml` __nginx-ingress-service.yaml__ ```yaml apiVersion: v1 kind: Service metadata: name: ingress-nginx namespace: default labels: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx spec: type: NodePort ports: - name: http port: 80 targetPort: 80 protocol: TCP - name: https port: 443 targetPort: 443 protocol: TCP selector: app.kubernetes.io/name: ingress-nginx app.kubernetes.io/part-of: ingress-nginx externalIPs: - 192.168.3.11 --- ``` I can apply the service with `kubectl apply -f nginx-ingress-service.yaml`: ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_02.png) We can check that the service was created and the external IP assigned with the following command `kubectl get service ingress-nginx -n my-apps`: ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_03.png) We now created an endpoint that can receive traffic over HTTP and HTTPS ports 80 and 443, and forwards it to the Ingress Controller Pod. The Ingress Controller will then route the traffic to the appropriate backend Service. We can now point our DNS records at this WAN IP address of our cluster and create the Ingress to implement routing rules. ## Creating the Ingress Now we can create the Ingress to route traffic directed at two domains to a corresponding backend service. I'll create a rule to route traffic directed at my.domain.com to the my-app1 backend service. Those two apps can be set up according to a [previous Tutorial](/kubernetes-nginx-ingress/#create-two-web-apps-to-respond-to-the-incoming-traffic) - if you don't have anyhting running on your cluster yet. The Ingress configuration for this looks like this: __nginx-ingress.yaml__ ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: nginx-ingress namespace: default annotations: kubernetes.io/ingress.class: nginx nginx.ingress.kubernetes.io/app-root: "/" spec: rules: - host: my.domain.com http: paths: - backend: serviceName: my-app1 servicePort: 80 path: / ``` An HTTP request Host header specifies the domain name of the target server. Requests with host my.domain.com will be directed to the my-app1 backend. Note that it is very likely that your service doesn't use the port 80 - e.g. you have a container that hosts a website on port 3000 and a service that exposes it - just add the port here as `servicePort` and your website will be exposed by the Ingress on port 80 (http). Apply the ingress with `kubectl apply -f nginx-ingress.yaml` and check if it was created with `kubectl get ingress -n my-apps`. ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_04.png) To verify that the Ingress is working, open the page URLs inside your browser or run a curl command on your server: ```bash curl my.domain.com ``` ## Setting up Cert-Manager I want to try using [Helm](https://helm.sh) to install [cert-manager](https://docs.cert-manager.io/en/venafi/tutorials/quick-start/index.html) into the Kubernetes cluster. cert-manager is a Kubernetes service that provisions TLS certificates from [Let's Encrypt](https://letsencrypt.org/) and other certificate authorities and manages their lifecycles. ### Installing via Helm > __Note__: I ran into a lot of difficulties using this setup and I could not find the error. [Troubleshooting](https://docs.cert-manager.io/en/latest/getting-started/troubleshooting.html) led me back to the [Installation Page](https://docs.cert-manager.io/en/latest/getting-started/install.html) - so I ended up abandoning Helm and did the [manual installation](#manual-installation). If you are an Helm expert and spot my error - please let me know. My assumption is that it might be namespace related as I tried to move everything into a custom namespace instead of going with the default `cert-manager` ? I will opt for all-default with the manual installation. Before using Helm to install cert-manager into our cluster, we need to create the cert-manager [Custom Resource Definitions (CRDs)](https://kubernetes.io/docs/concepts/extend-kubernetes/api-extension/custom-resources/). Create these by applying them directly from the cert-manager [GitHub repository](https://github.com/jetstack/cert-manager/): ```bash kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.7/deploy/manifests/00-crds.yaml ``` ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_05.png) In order to provide advanced resource validation, cert-manager includes a `ValidatingWebhookConfiguration` which is deployed into the cluster as its own pod: ```bash kubectl label namespace kube-system certmanager.k8s.io/disable-validation="true" ``` Helm now has an installer script that will automatically grab the latest version of the Helm client and [install it locally](https://raw.githubusercontent.com/helm/helm/master/scripts/get). You can fetch that script, and then execute it locally: ```bash curl https://raw.githubusercontent.com/helm/helm/master/scripts/get > get_helm.sh chmod 700 get_helm.sh ./get_helm.sh ``` ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_06.png) __INSTALLING TILLER__ Tiller, the server portion of Helm, typically runs inside of your Kubernetes cluster. But for development, it can also be run locally, and configured to talk to a remote Kubernetes cluster. The easiest way to install __tiller__ into the cluster is simply to run `helm init`. This will validate that helm’s local environment is set up correctly (and set it up if necessary). Then it will connect to whatever cluster kubectl connects to by default `kubectl config view`. Once it connects, it will install tiller into the `kube-system` namespace. After `helm init`, you should be able to run `kubectl get pods --namespace kube-system` and see Tiller running: ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_07.png) Finally, we can install the [cert-manager Helm chart](https://github.com/helm/charts/tree/master/stable/cert-manager) into the `kube-system` namespace: ```bash helm install --name cert-manager --namespace kube-system stable/cert-manager ``` __Note__ : if you get the following __ERROR__: ```bash Error: release cert-manager failed: namespaces "kube-system" is forbidden: User "system:serviceaccount:kube-system:default" cannot get resource "namespaces" in API group "" in the namespace "kube-system" ``` Run the following command (see [Github Issue](https://github.com/fnproject/fn-helm/issues/21)) and rerun the `helm install`: ```bash kubectl patch deploy --namespace kube-system tiller-deploy -p '{"spec":{"template":{"spec":{"serviceAccount":"tiller"}}}}' ``` ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_08.png) This indicates that the cert-manager installation succeeded. ### Manual Installation It is deployed using regular YAML manifests, like any other applications on Kubernetes. In order to install cert-manager, we must first create a namespace to run it within. This guide will install cert-manager into the cert-manager namespace. It is possible to run `cert-manager` in a different namespace, although you will need to make modifications to the deployment manifests. ```bash kubectl create namespace cert-manager ``` As part of the installation, cert-manager also deploys a __ValidatingWebhookConfiguration__ resource in order to validate that the Issuer, ClusterIssuer and Certificate resources we will create after installation are valid. In order to deploy the __ValidatingWebhookConfiguration__, cert-manager creates a number of internal Issuer and Certificate resources in its own namespace. This creates a chicken-and-egg problem, where cert-manager requires the webhook in order to create the resources, and the webhook requires cert-manager in order to run. This problem is avoided by disabling resource validation on the namespace that cert-manager runs in: ```bash kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true ``` We can now go ahead and install cert-manager. All resources (the CustomResourceDefinitions, cert-manager, and the webhook component) are included in a single YAML manifest file: ``` kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.7/deploy/manifests/cert-manager.yaml ``` ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_12.png) ### Verifying the installation Once you’ve installed cert-manager, you can verify it is deployed correctly by checking the `cert-manager` namespace for running pods: ```bash kubectl get pods --namespace cert-manager ``` ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_13.png) You should see both the cert-manager and webhook component in a Running state, and the ca-sync pod is Completed. If the webhook has not Completed but the cert-manager pod has recently started, wait a few minutes for the ca-sync pod to be retried. If you experience problems, please check the [troubleshooting guide](https://docs.cert-manager.io/en/latest/getting-started/troubleshooting.html). The following steps will confirm that cert-manager is set up correctly and able to issue basic certificate types: ```bash # Create a ClusterIssuer to test the webhook works okay cat <<EOF > test-resources.yaml apiVersion: v1 kind: Namespace metadata: name: cert-manager-test --- apiVersion: certmanager.k8s.io/v1alpha1 kind: Issuer metadata: name: test-selfsigned namespace: cert-manager-test spec: selfSigned: {} --- apiVersion: certmanager.k8s.io/v1alpha1 kind: Certificate metadata: name: selfsigned-cert namespace: cert-manager-test spec: commonName: example.com secretName: selfsigned-cert-tls issuerRef: name: test-selfsigned EOF # Create the test resources kubectl apply -f test-resources.yaml # Check the status of the newly created certificate # You may need to wait a few seconds before cert-manager processes the # certificate request kubectl describe certificate -n cert-manager-test ... Name: selfsigned-cert Namespace: cert-manager-test Labels: <none> Annotations: kubectl.kubernetes.io/last-applied-configuration: {"apiVersion":"certmanager.k8s.io/v1alpha1","kind":"Certificate","metadata":{"annotations":{},"name":"selfsigned-cert","namespace":"cert-m... API Version: certmanager.k8s.io/v1alpha1 Kind: Certificate Metadata: Creation Timestamp: 2019-03-29T08:52:04Z Generation: 3 Resource Version: 5871553 Self Link: /apis/certmanager.k8s.io/v1alpha1/namespaces/cert-manager-test/certificates/selfsigned-cert UID: ed15e107-51ff-11e9-b7a0-960000184ec8 Spec: Common Name: example.com Issuer Ref: Name: test-selfsigned Secret Name: selfsigned-cert-tls Status: Conditions: Last Transition Time: 2019-03-29T08:52:05Z Message: Certificate is up to date and has not expired Reason: Ready Status: True Type: Ready Not After: 2019-06-27T08:52:05Z Events: Type Reason Age From Message ---- ------ ---- ---- ------- Normal CertIssued 31s cert-manager Certificate issued successfully # Clean up the test resources kubectl delete -f test-resources.yaml ``` If all the above steps have completed without error, you are good to go! ## Setting up ACME Test Issuers When you create a new ACME Issuer, cert-manager will generate a private key which is used to identify you with the ACME server. To set up a basic ACME issuer, you should create a new Issuer (bound to a namespace) or ClusterIssuer (can be referenced over all namespaces) resource. In this example, we will create a non-namespaced ClusterIssuer resource for the [Let’s Encrypt staging endpoint](https://letsencrypt.org/docs/staging-environment/) that has only the [HTTP01 Challenge Provider](https://docs.cert-manager.io/en/latest/tasks/acme/configuring-http01.html) enabled. You should read the guides linked at the bottom of this page to learn more about the ACME challenge validation mechanisms that cert-manager supports and how to configure the various DNS01 provider implementations. Let's create a test Issuer to make sure the certificate provisioning mechanism is functioning correctly. Open a file named staging_issuer.yaml in your favorite text editor no cert-staging_issuer.yaml`: __cert-staging_issuer.yaml__ ```yaml apiVersion: certmanager.k8s.io/v1alpha1 kind: ClusterIssuer metadata: name: letsencrypt-staging spec: acme: # The ACME server URL server: https://acme-staging-v02.api.letsencrypt.org/directory # Email address used for ACME registration email: your_email_address_here # Name of a secret used to store the ACME account private key privateKeySecretRef: name: letsencrypt-staging-secret # Enable the HTTP-01 challenge provider http01: {} ``` Here we specify that we'd like to create a `ClusterIssuer` object called `letsencrypt-staging`, and use the Let's Encrypt staging server. We'll later use the production server to roll out our certificates. We then specify an email address to register the certificate, and create a Kubernetes Secret called `letsencrypt-staging-secret` to store the ACME account's private key. We also enable the __HTTP-01 challenge mechanism__. To learn more about these parameters, consult the [official cert-manager documentation](https://cert-manager.readthedocs.io/en/latest/reference/issuers.html). Go on and create the Issuer: ```bash kubectl apply -f cert-staging-issuer.yaml ``` To verify that the account has been registered successfully, you can run `kubectl describe` and check the `Ready` condition: ```bash kubectl describe clusterissuer letsencrypt-staging ``` ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_09.png) ### Creating Certificate Resources The [Certificate resource](https://docs.cert-manager.io/en/latest/tasks/acme/issuing-certificates.html) type is used to request certificates from different Issuers. A Certificate resource specifies fields that are used to generated certificate signing requests which are then fulfilled by the issuer type you have referenced. Certificates specify which issuer they want to obtain the certificate from by specifying the `certificate.spec.issuerRef` field. A basic Certificate resource, for the example.com and www.example.com DNS names. We must configure our Certificate resource with the `ingress class` that will be used to solve the __ACME HTTP01__ challenges: __cert-staging.yaml__ ```yaml apiVersion: certmanager.k8s.io/v1alpha1 kind: Certificate metadata: name: my-domain-com namespace: default spec: secretName: my-domain-com-tls commonName: my.domain.com dnsNames: - my.domain.com - www.my.domain.com issuerRef: name: letsencrypt-staging # We can reference ClusterIssuers by changing the kind here. # The default value is Issuer (i.e. a locally namespaced Issuer) kind: ClusterIssuer acme: config: - http01: ingressClass: nginx domains: - my.domain.com - www.my.domain.com ``` The signed certificate will be stored in a Secret resource named `my-domain-com-tls` once the issuer has successfully issued the requested certificate. The Certificate will be issued using the __ClusterIssuer__ named `letsencrypt-staging` in the `default` namespace (the same namespace as the Certificate resource). ### Updating the Ingress We can now modify our Ingress and enable TLS encryption for the my.domain.com path - `nano nginx-ingress.yaml`: __nginx-ingress.yaml__ ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: nginx-ingress namespace: default annotations: kubernetes.io/ingress.class: nginx certmanager.k8s.io/cluster-issuer: letsencrypt-staging nginx.ingress.kubernetes.io/app-root: / spec: tls: - hosts: - my.domain.com secretName: my-domain-com-tls rules: - host: my.domain.com http: paths: - backend: serviceName: my-app1 servicePort: 80 ``` Here we add the annotations to specify an `ingress.class`, which determines the Ingress Controller. In addition, we define the cluster-issuer to be `letsencrypt-staging`. Finally, we add a tls block to specify the hosts for which we want to acquire certificates, and specify a secretName. This secret will contain the TLS private key and issued certificate. Apply your edits with `kubectl apply -f nginx-ingress.yaml`. ### Testing the TLS Access Run `kubectl describe ingress -n default` to see that the certificate has been successfully created: ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_10.png) Once the certificate has been successfully created, you can run an additional `kubectl describe certificate -n default` on it to further confirm its successful creation. You should see the following output in the Events section: ![NGINX Ingress for your Kubernetes Cluster](./nginx-ingress-cert-manager_11.png) I am seeing some error messages that the TLS certificate could not be saved after the order comes through. But the problem seems to goes away and the certificate is successfully issued. The same happens again when I restart the service - I am not sure right now if this is a bigger problem. We're now ready to send a request to our backend server to test that HTTPS is functioning correctly. Run the following __wget command__ to send a request to _my.domain.com_: ```bash wget --save-headers -O- my.domain.com ``` You should see the following output: ```bash wget --save-headers -O- my.domain.com --2019-03-28 11:26:30-- http://my.domain.com Resolving my.domain.com (my.domain.com)... 192.168.3.11 Connecting to my.domain.com (my.domain.com)|192.168.3.11|:80... connected. HTTP request sent, awaiting response... 308 Permanent Redirect 2019-03-28 11:26:30 ERROR 308: Permanent Redirect. ``` Accessing our backend over __http / port 80__ no longer works - the ingress now tries to redirect us to use __https / port 443__. ```bash wget --save-headers -O- https://my.domain.com ``` We can prefix the request with https to see the response header for an TLS encrypted request: ```bash --2019-03-29 11:27:00-- https://my.domain.com/ Resolving my.domain.com (my.domain.com)... 192.168.3.11 Connecting to my.domain.com (my.domain.com)|192.168.3.11|:443... connected. ERROR: cannot verify my.domain.com's certificate, issued by ‘/CN=Fake LE Intermediate X1’: Unable to locally verify the issuer's authority. To connect to my.domain.com insecurely, use `--no-check-certificate'. ``` This indicates that HTTPS has successfully been enabled, but the certificate cannot be verified as it's a fake temporary certificate issued by the Let's Encrypt staging server. Now that we've tested that everything works using this temporary fake certificate, we can roll out production certificates for the host `my.domain.com`. __Note__: As mentioned earlier, I tried to install cert manager using Helm - and at this point I saw the first Problem that the installation wasn't working. Instead of the `Intermediate X1` certificate, I was seeing the default __Kubernetes Ingress Controller Fake Certificate__ - telling me that the whole thing wasn't working: ```bash wget --save-headers -O- https://my.domain.com --2019-03-28 11:26:41-- https://my.domain.com Resolving my.domain.com (my.domain.com)... 192.168.3.11 Connecting to my.domain.com (my.domain.com)|192.168.3.11|:443... connected. ERROR: cannot verify my.domain.com's certificate, issued by ‘/O=Acme Co/CN=Kubernetes Ingress Controller Fake Certificate’: Unable to locally verify the issuer's authority. ERROR: no certificate subject alternative name matches requested host name ‘my.domain.com’. To connect to my.domain.com insecurely, use `--no-check-certificate'. ``` ### Going into Production In this step we’ll modify the procedure used to provision staging certificates, and generate a valid, verifiable production certificate for our Ingress hosts. To begin, we'll first create a production certificate ClusterIssuer `nano cert-production-issuer.yaml`: __cert-production-issuer.yaml__ ```yaml apiVersion: certmanager.k8s.io/v1alpha1 kind: ClusterIssuer metadata: name: letsencrypt-production spec: acme: # The ACME server URL server: https://acme-v02.api.letsencrypt.org/directory # Email address used for ACME registration email: your_email_address_here # Name of a secret used to store the ACME account private key privateKeySecretRef: name: letsencrypt-production-secret # Enable the HTTP-01 challenge provider http01: {} ``` We are changing the server address to receive an production ready certificate and store it inside the secret `letsencrypt-production`. To make use of our new production issuer we now have to edit our Ingress and add both the new issuer and the secret - `nano nginx-ingress.yaml`: __nginx-ingress.yaml__ ```yaml apiVersion: extensions/v1beta1 kind: Ingress metadata: name: nginx-ingress namespace: default annotations: kubernetes.io/ingress.class: nginx certmanager.k8s.io/cluster-issuer: letsencrypt-production spec: tls: - hosts: - my.domain.com secretName: my-domain-com-tls rules: - host: my.domain.com http: paths: - backend: serviceName: my-app1 servicePort: 80 ``` > Because I already used my domain for the staging environment, I am first going to delete the prior certificate resource `kubectl delete -f cert-staging.yaml` and then apply the following production cert definition `kubectl apply -f cert-production.yaml.yaml`: __cert-production.yaml__ ```yaml apiVersion: certmanager.k8s.io/v1alpha1 kind: Certificate metadata: name: my-domain-com namespace: default spec: secretName: my-domain-com-tls commonName: my.domain.com dnsNames: - my.domain.com - www.my.domain.com issuerRef: name: letsencrypt-production # We can reference ClusterIssuers by changing the kind here. # The default value is Issuer (i.e. a locally namespaced Issuer) kind: ClusterIssuer acme: config: - http01: ingressClass: nginx domains: - my.domain.com - www.my.domain.com ``` Then apply both changes with `kubectl apply -f cert-production-issuer.yaml` and `kubectl apply -f nginx-ingress.yaml`. We can re-test the access and see if we now receive a valid certificate from our server: ```bash wget --save-headers -O- https://my.domain.com ``` And you should now see the expected response from your application that you bound to the Ingress (the exact output here will vary accordingly): ```bash --2019-03-29 12:09:28-- https://my.domain.com/en Resolving my.domain.com (my.domain.com)... 192.168.3.11 Connecting to my.domain.com (my.domain.com)|192.168.3.11|:443... connected. HTTP request sent, awaiting response... 301 Moved Permanently Location: /en/ [following] --2019-03-29 12:09:28-- https://my.domain.com/en/ Reusing existing connection to my.domain.com:443. HTTP request sent, awaiting response... 200 OK Length: 170474 (166K) [text/html] Saving to: ‘STDOUT’ HTTP/1.1 200 OK Server: nginx/1.15.9 Date: Fri, 29 Mar 2019 11:09:28 GMT Content-Type: text/html; charset=UTF-8 Content-Length: 170474 Connection: keep-alive Vary: Accept-Encoding X-DNS-Prefetch-Control: off X-Frame-Options: SAMEORIGIN Strict-Transport-Security: max-age=15724800; includeSubDomains X-Download-Options: noopen X-Content-Type-Options: nosniff X-XSS-Protection: 1; mode=block Accept-Ranges: bytes Cache-Control: public, max-age=0 Last-Modified: Tue, 26 Mar 2019 08:55:34 GMT ETag: W/"299ea-169b9363f70" Vary: Accept-Encoding ```
90a97c5ca382e8483c7b7411c31c84cbb1dd232c
[ "Ruby", "JavaScript", "Markdown", "Python", "Dockerfile", "Shell" ]
177
JavaScript
mpolinowski/gatsby-starter-minimal-blog
cdb628357e806be211c3220bfb7b50ac0c204a6c
c1902c673cf334ee896e07d05ffa7b7896dae1b1
refs/heads/master
<repo_name>ns-agency/bing<file_sep>/drive.bing/config.py # Put your configuration options here TITLE = "Bing Drive" THEME = "litera" SESSION_COOKIE_HTTPONLY = False SECRET_KEY = "wow_so_very_secret_lmao" <file_sep>/drive.bing/app/bing_drive/__init__.py #!/usr/bin/env python3 # This is the entrypoint to bing_drive from flask_core.app import create_app from flask_core.config import Config from flask_swagger_ui import get_swaggerui_blueprint from . import models # If you add more blueprints, add them here from .main import bp as main_bp config = Config() app = create_app(config) # Swagger BS swaggerui_blueprint = get_swaggerui_blueprint( "/staff/secret/1gbdfte/swagger", # Swagger UI static files will be mapped to '{SWAGGER_URL}/dist/' "/docs/swag.json", config={"app_name": "Prism Api"}, ) app.register_blueprint(swaggerui_blueprint, url_prefix="/staff/secret/1gbdfte/swagger") app.register_blueprint(main_bp) <file_sep>/pastebing/run.py #!/usr/bin/env python3 try: import pastebing except ImportError: raise RuntimeError("Couldn't import pastebing, have you run pip install -e app?") import os if __name__ == "__main__": config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config.py") print("Loading config from", config_path) pastebing.app.config.from_pyfile(config_path, silent=True) pastebing.app.run(host="0.0.0.0", port=4141, debug=True, use_evalex=False) <file_sep>/drive.bing/app/bing_drive/README.md # bing_drive This is the root of the bing_drive package. <file_sep>/drive.bing/app/run.sh #!/usr/bin/env bash gunicorn bing_drive:app -b 0.0.0.0:8000 <file_sep>/pastebing/app/pastebing/main/__init__.py #!/usr/bin/env python3 from flask import Blueprint bp = Blueprint("main", __name__, template_folder="templates", static_folder="static", static_url_path="/static/main") from . import views <file_sep>/pastebing/Pipfile [[source]] name = "pypi" url = "https://pypi.org/simple" verify_ssl = true [dev-packages] black = "*" [packages] appdirs = "==1.4.3" click = "==6.7" flask = "==0.12.1" itsdangerous = "==0.24" jinja2 = "==2.9.6" markupsafe = "==1.0" packaging = "==16.8" pyparsing = "==2.2.0" six = "==1.10.0" werkzeug = "==0.12.1" psutil = "==5.4.3" sqlalchemy = "*" dnspython = "*" psycopg2-binary = "*" gunicorn = "*" requests = "*" psycopg2 = "*" flask-core = "~=1.0" pastebing = {path = "./app",editable = true} base58 = "*" [requires] python_version = "3.7" [pipenv] allow_prereleases = true <file_sep>/drive.bing/db/Dockerfile FROM mysql:8 RUN echo "[mysqld]\nsecure-file-priv=""\ndefault_authentication_plugin=mysql_native_password\n" > /etc/mysql/conf.d/fuck.cnf ENV MYSQL_ROOT_PASSWORD "<PASSWORD>" COPY messages /var/log/messages COPY ./sql-scripts/ /docker-entrypoint-initdb.d/<file_sep>/drive.bing/app/frontend/src/middleware/auth.js function parseCookies(cookie) { return cookie.split("; ").map(x=>x.split("=")).reduce((acc,v)=>{acc[v[0]]=v[1];return acc},{}); } export default function auth({ next, router }) { const session = parseCookies(document.cookie).session; if (session === undefined) { return router.push({ name: 'login' }); } return next(); }<file_sep>/pastebing/app/run.sh #!/usr/bin/env bash while ! pg_isready -h database -U postgres; do echo "Waiting for database.." sleep 5 done psql -h database -U postgres < schema.sql gunicorn pastebing:app -b 0.0.0.0:8000 <file_sep>/drive.bing/app/Dockerfile FROM python:3.7 RUN mkdir /app # fucken mother fuckern mysql i swear to god RUN apt-get update && apt-get install -y -q python3-dev default-libmysqlclient-dev mysql-server # Time for the fun of ~~javascwipt~~ RUN apt-get install curl gnupg -yq RUN curl -sL https://deb.nodesource.com/setup_8.x | bash RUN apt-get install nodejs -yq # Copy this explicitly first so we can cache COPY requirements.txt /tmp RUN pip install -r /tmp/requirements.txt # hack to get this working cause i can't get it to install locally RUN pip install mysqlclient COPY . /app/web RUN pip install /app/web/ EXPOSE 8000 WORKDIR /app/web # install frontend RUN cd frontend && npm install RUN cd frontend && npm run build CMD ["./run.sh"] <file_sep>/drive.bing/app/requirements.txt -i https://pypi.org/simple flask-core~=1.0 appdirs==1.4.3 base58==1.0.3 certifi==2018.11.29 chardet==3.0.4 click==6.7 dnspython==1.16.0 flask==0.12.1 gunicorn==19.9.0 idna==2.8 itsdangerous==0.24 jinja2==2.9.6 markupsafe==1.0 packaging==16.8 psutil==5.4.3 psycopg2-binary==2.7.7 psycopg2==2.7.7 pyparsing==2.2.0 requests==2.21.0 six==1.10.0 sqlalchemy==1.3.0 urllib3==1.24.1 werkzeug==0.12.1 Flask-Cors==3.0.7 pycrypto==2.6.1 flask-swagger-ui==3.20.9 <file_sep>/pastebing/app/Dockerfile FROM python:3.7 # Install postgres client to enable db waiting RUN apt-get update && apt-get install -y -q postgresql-client RUN mkdir /app # Copy this explicitly first so we can cache COPY requirements.txt /tmp RUN pip install -r /tmp/requirements.txt COPY . /app/web RUN pip install /app/web/ EXPOSE 8000 WORKDIR /app/web CMD ["./run.sh"] <file_sep>/README.md ## Drive.bing To run this site first you must install docker, then you can do ``` $ cd drive.bing $ docker-compose build ``` This command will take a while the first time you run it, but will take significantly less time in subsequent runs. Once the stack has been built you can launch it via the `up` command and take it down via the `down` command. Note that you can give the up command the argument `-d` to leave the site running in the background. ``` $ docker-compose up -d ``` Everytime you make a change to the code you will need to rebuild the image and relaunch it ``` $ docker-compose build $ docker-compose up -d ``` You may find it useful to not detach from the process so you can see live logs of the site. ``` $ docker-compose build $ docker-compose up ``` Once the site is running you can visit it at `localhost:8010` Once it is up you can run a set of tests via ``` $ cd drive.bing # .. source into a virtual env if you want to (optional) $ pip install -r test_requirements.txt $ python3 tests.py ``` Replace the global host variable in the source code if you are running the tests on a different machine from where the image is running. ## Pastebing To run this site first you must install docker, then you can do ``` $ cd pastebing $ docker-compose build ``` This command will take a while the first time you run it, but will take significantly less time in subsequent runs. Once the stack has been built you can launch it via the `up` command and take it down via the `down` command. Note that you can give the up command the argument `-d` to leave the site running in the background. ``` $ docker-compose up -d ``` Everytime you make a change to the code you will need to rebuild the image and relaunch it ``` $ docker-compose build $ docker-compose up -d ``` You may find it useful to not detach from the process so you can see live logs of the site. ``` $ docker-compose build $ docker-compose up ``` Once the site is running you can visit it at `localhost:8009` Once it is up you can run a set of tests via ``` $ cd pastebing # .. source into a virtual env if you want to (optional) $ pip install -r test_requirements.txt $ python3 tests.py ``` Replace the global host variable in the source code if you are running the tests on a different machine from where the image is running.<file_sep>/pastebing/app/pastebing/README.md # pastebing This is the root of the pastebing package. <file_sep>/pastebing/app/config.py # Put your configuration options here TITLE = "Pastebing" THEME = "litera" <file_sep>/drive.bing/app/bing_drive/main/views.py #!/usr/bin/env python3 import functools import re import datetime import os import logging import base58 import sqlalchemy from flask import ( render_template_string, request, render_template, current_app, flash, redirect, url_for, session, make_response, Response, jsonify, send_from_directory, ) import base64 from flask_cors import CORS from . import bp as app # Note that app = blueprint, current_app = flask context CORS(app) ROOT = os.path.normpath(os.path.join(__file__, "../../../frontend/dist")) def get_files_for_acc(username, role): r = list(current_app.db.execute("SELECT name, content FROM files where author=%s", (username,))) if role == "Staff": r += list(current_app.db.execute("SELECT name, content FROM files where author='staff_account'")) return r @app.route("/", methods=["GET"]) def home(): return send_from_directory(f"{ROOT}", "index.html") @app.route("/docs/<path>", methods=["GET"]) def docs_serve(path): return send_from_directory(f"/app/web/docs", path) @app.route("/js/<path>", methods=["GET"]) def js_serve(path): return send_from_directory(f"{ROOT}/js", path) @app.route("/css/<path>", methods=["GET"]) def css_serve(path): return send_from_directory(f"{ROOT}/css", path) @app.route("/img/<path>", methods=["GET"]) def img_serve(path): return send_from_directory(f"{ROOT}/img", path) @app.route("/register", methods=["POST"]) def register(): payload = request.json username = payload.get("username", None) password = payload.get("password", None) if not username or not password: return "invalid username/password", 400 if username == "admin" or username == "staff_account": return "username taken", 400 if current_app.db.execute("SELECT * FROM users WHERE username = %s", (username,)).first() is not None: return "username taken", 400 current_app.db.execute("INSERT INTO users VALUES(%s,%s,'User')", (username, password)) session["username"] = username session["role"] = "User" return ":)", 200 @app.route("/login", methods=["POST"]) def login(): p = request.json res = current_app.db.execute( "SELECT username,role FROM users WHERE username = %s AND password = %s", (p["username"], p["password"]) ).first() if res is None: return "invalid username/password", 400 [username, role] = res session["username"] = username session["role"] = role return ":)", 200 @app.route("/logout", methods=["POST"]) def logout(): session.clear() return ":)", 200 @app.route("/upload", methods=["POST"]) def upload(): p = request.json me = session["username"] name = p["name"] text = p["text"] if len(p["text"]) > 64: return "File contents > 64 chars", 400 if len(p["name"]) > 64: return "File name > 64 chars", 400 files = get_files_for_acc(me, session["role"]) if len(files) > 5: return "max files for acc reached", 400 if current_app.db.execute("SELECT * FROM files WHERE name = %s AND author = %s", (name, me)).first() is not None: return "file name taken", 400 current_app.db.execute("INSERT INTO files(name,content,author) VALUES(%s,%s,%s)", (name, text, me)) return ":)", 200 @app.route("/me", methods=["GET"]) def me(): results = get_files_for_acc(session["username"], session["role"]) files = [{"name": r[0], "content": r[1]} for r in results] if session["role"] == "Admin": files += [ { "name": "BREAK2{f46728bf-0f09-4a39-822a-4cf231325e90}", "content": "cheeky, no flag tho", "author": "admin", } ] return jsonify({"username": session["username"], "role": session["role"], "files": files}) @app.route("/admin", methods=["GET", "POST"]) def admin(): success = False if request.method == "POST": if request.form.get("pin", "0000") == "2941": success = True return render_template("admin.html", success=success) @app.route("/document/<name>", methods=["GET"]) def view_document(name): username = base64.b64decode(request.args["r"]).decode("utf-8") content = current_app.db.execute( "SELECT content FROM files WHERE name = %s AND author = %s", (name, username) ).first() if not content and session["role"] == "Staff": content = current_app.db.execute( "SELECT content FROM files WHERE name = %s AND author = 'staff_account'", (name,) ).first() return content[0] if content else "ruh roh" @app.route("/api/secret/no/really/give_staff_access") def staff_access(): username = request.args["username"] current_app.db.execute("UPDATE users set role='Staff' WHERE username = %s", (username,)) session["role"] = "Staff" return ":)", 200 @app.route("/api/peek/<username>", methods=["GET", "POST"]) def peek(username): if session["role"] != "Staff" and session["role"] != "Admin": return ":(", 404 rows = current_app.db.execute("SELECT id,name,author from files where author = %s", (username,)) return jsonify([{"id": r[0], "name": r[1], "author": r[2]} for r in rows]) @app.route("/api/peek/file", methods=["GET", "POST"]) def peekFile(): if session["role"] != "Staff" and session["role"] != "Admin": return ":(", 404 file_id = request.args["file_id"] res = current_app.db.execute("SELECT id,name,content,author from files where id = " + file_id).first() if res is None: return ":(", 400 return jsonify({"id": res[0], "name": res[1], "content": res[2], "author": res[3]}) @app.route("/ping") def ping(): return "pong" @app.route("/flag_debug", methods=["POST"]) def flag_debug(): if request.form.get("flag_secret", "") == current_app.config["FLAG_SECRET"]: return current_app.config["FLAG"] return ":(", 401 <file_sep>/drive.bing/test.py #!/usr/bin/env python3 import unittest import sys import requests import uuid import base64 host = "http://localhost:8010" flag1 = "cheeky_bugger_u_aint_fixed_the_bug_innit" flag2 = "absolute_lad_what_a_legend" class TestBingDrive(unittest.TestCase): def test_check_permissions(self): s = requests.Session() rand = str(uuid.uuid4()) s.post(f'{host}/register',json={'username': rand, 'password': <PASSWORD>}) r = s.get(f'{host}/document/flag?r=QWRtaW4=') self.assertNotIn(flag1, r.text) def test_can_make_new_file(self): s = requests.Session() rand = str(uuid.uuid4()) s.post(f'{host}/register',json={'username': rand, 'password': <PASSWORD>}) s.post(f'{host}/upload', json={'name': 'A', 'text': rand}) user = base64.b64encode(bytes(rand ,'utf-8')).decode('utf-8') r = s.get(f'{host}/document/A?r={user}') self.assertIn(rand, r.text) def test_cant_brute_force(self): s = requests.Session() blocked = False # shouldn't be allowed to keep trying till you get it for code in range(0,25): r = s.post(f'{host}/admin',data={'pin': f'{code:04}'}) if r.status_code != 200: blocked = True # check that we got blocked at some point self.assertTrue(blocked) def test_can_use_admin_page(self): # can we still login at all lol s = requests.Session() r = s.post(f'{host}/admin',data={'pin': f'2941'}) self.assertIn(flag2, r.text) def test_no_sqli(self): s = requests.Session() s.post(f'{host}/login',json={'username': 'testStaff', 'password': '<PASSWORD>'}) r = s.get(f'{host}/api/peek/file?file_id=\'') self.assertNotEqual(r.status_code, 500) r = s.get(f"{host}/api/peek/file?file_id=123456678%20UNION%20SELECT%20NULL,NULL,NULL,load_file(%27/var/log/messages%27)") self.assertEqual(r.text,":(") def test_peek_still_works(self): s = requests.Session() s.post(f'{host}/login',json={'username': 'testStaff', 'password': '<PASSWORD>'}) r = s.get(f'{host}/api/peek/file?file_id=1') self.assertIn(flag1, r.text) if __name__ == "__main__": unittest.main()<file_sep>/pastebing/test.py #!/usr/bin/env python3 import unittest import sys import requests import uuid import base58 import re def dist(a, b): diff = 0 for i in range(0,min(len(a),len(b))): if a[i] != b[i]: diff += 1 return diff + abs(len(a) - len(b)) host = "http://localhost:8009" class TestPasteBing(unittest.TestCase): # Can we guess the ID of a post def test_idor_basic(self): s = requests.Session() rand = str(uuid.uuid4()) s.post(f'{host}/register',data={'username': rand, 'password': <PASSWORD>}) s.post(f'{host}/new', data={'title': rand, 'contents': rand}) for i in range(10): code = base58.b58encode(f"{i}:1").decode("UTF-8") r = s.get(f'{host}/raw/{code}') self.assertNotIn(rand, r.text) # are the paste ids super related def test_unique_ids(self): s = requests.Session() rand = str(uuid.uuid4()) s.post(f'{host}/register',data={'username': rand, 'password': <PASSWORD>}) s.post(f'{host}/new', data={'title': rand, 'contents': rand}) s.post(f'{host}/new', data={'title': rand, 'contents': rand}) r = s.get(f'{host}/pastes') links = re.findall(r'/raw/[a-zA-Z0-9_-]+', r.text) self.assertEqual(len(links), 2) links = [link.split('/')[-1] for link in links] # there should be _at_ least 3 different characters between the ids self.assertGreater(dist(links[0],links[1]), 2) # The point of pastebing is you can share links # So make sure you can still do that def test_can_share(self): # user 1 s = requests.Session() rand = str(uuid.uuid4()) s.post(f'{host}/register',data={'username': rand, 'password': <PASSWORD>}) s.post(f'{host}/new', data={'title': rand, 'contents': rand}) r = s.get(f'{host}/pastes') link = re.search(r'/raw/[a-zA-Z0-9_-]+', r.text) self.assertIsNotNone(link) link = link.group(0) # user 2 s2 = requests.Session() rand2 = str(uuid.uuid4()) s2.post(f'{host}/register',data={'username': rand2, 'password': <PASSWORD>}) r = s2.get(f'{host}{link}') self.assertIn(rand, r.text) if __name__ == "__main__": unittest.main()<file_sep>/pastebing/app/pastebing/main/views.py #!/usr/bin/env python3 import functools import re import datetime import base58 import sqlalchemy from flask import ( render_template_string, request, render_template, current_app, flash, redirect, url_for, session, make_response, Response, ) from . import bp as app # Note that app = blueprint, current_app = flask context def require_auth(f): @functools.wraps(f) def wrap(*args, **kwargs): if "username" not in session: flash("You must login to view this page.", "danger") return redirect(url_for("main.login")) return f(*args, **kwargs) return wrap @app.route("/", methods=["GET", "POST"]) def home(): return render_template("home.html") @app.route("/register", methods=["GET", "POST"]) def register(): if request.method == "POST": username = request.form["username"] password = request.form["password"] try: with current_app.db.isolate() as conn: conn.execute("INSERT INTO users (username, password) VALUES (%s, %s)", (username, password)) except sqlalchemy.exc.IntegrityError: flash("Username already taken!", "danger") return render_template("register.html") else: session["username"] = username return redirect(url_for("main.pastes")) return render_template("register.html") @app.route("/login", methods=["GET", "POST"]) def login(): if request.method == "POST": username = request.form["username"] password = request.form["<PASSWORD>"] try: with current_app.db.isolate() as conn: r = conn.execute("SELECT id FROM users WHERE username=%s AND password=%s", (username, password)).first() if not r: raise RuntimeError("Incorrect username and/or password!") except RuntimeError as e: flash(str(e), "danger") return render_template("login.html") else: session["username"] = username return redirect(url_for("main.pastes")) return render_template("login.html") @app.route("/pastes", methods=["GET"]) @require_auth def pastes(): # Fetch all posts by this user with current_app.db.isolate() as conn: pastes = conn.execute( "SELECT * FROM pastes WHERE author IN (SELECT id FROM users WHERE username=%s)", (session["username"]) ).fetchall() # Coerce them all into dicts for ez indexing pastes = sorted([dict(x) for x in pastes], key=lambda p: p["created_at"], reverse=True) return render_template("pastes.html", pastes=pastes) @app.route("/new", methods=["GET", "POST"]) @require_auth def new_paste(): if request.method == "POST": title = request.form["title"] contents = request.form["contents"] # Get the authors ID, and number of posts they've made with current_app.db.isolate() as conn: author_id = conn.execute("SELECT id FROM users WHERE username=%s", (session["username"],)).first()[0] post_count = conn.execute("SELECT COUNT(*) FROM pastes WHERE author=%s", (author_id,)).first()[0] # Generate the new post ID post_id = base58.b58encode(f"{author_id}:{post_count + 1}").decode("UTF-8") # Insert new paste try: with current_app.db.isolate() as conn: conn.execute( "INSERT INTO pastes (id, title, author, contents) VALUES (%s, %s, %s, %s)", (post_id, title, author_id, contents), ) except Exception as e: flash("Something went wrong while creating your paste, try again later.", "danger") return render_template("new_paste.html") else: flash("Created paste successfully!", "success") return redirect(url_for("main.pastes")) return render_template("new_paste.html") @app.route("/raw/<paste_id>") def raw_paste(paste_id): # Get the paste, if it exists with current_app.db.isolate() as conn: paste = conn.execute("SELECT contents FROM pastes WHERE id=%s", (paste_id,)).first() if not paste: resp = Response("Paste not found.") resp.headers["Content-Type"] = "text/plain" resp.status_code = 404 return resp resp = Response(paste[0]) resp.headers["Content-Type"] = "text/plain" return resp @app.route("/ping") def ping(): return "pong" @app.route("/flag_debug", methods=["POST"]) def flag_debug(): if request.form.get("flag_secret", "") == current_app.config["FLAG_SECRET"]: return current_app.config["FLAG"] return ":(", 401 <file_sep>/spec.md # Patch 1 ## Introduction You know the drill, fix up some of the bugs you were cracking open for BREAK 2. If you were not able to find all the bugs, see lab 7 for the solutions. > Note this is only for 6443 students and not extended students ## Deadline This is due by midday (12:00) May 1st. ## Source https://github.com/ns-agency/bing There are 2 folders each with 1 site under them, fix the source for both to be secure and pass the provided tests. We will mark your diff but will not run any further tests. Make sure you check if the repo has been updated whenever you sit down to work on the patch in case there was a fix pushed out. ## Running In the source code above there is a README.md in the root dir on how to run the site. Because of the complexity of these sites we are providing you with the [docker]("https://www.docker.com/) containers for the sites. As such we require you to install docker. > Note if you have trouble running these images locally you can simply clone and run them on a aws/digital ocean box which will work better, simply run it on the box and you can connect the the box via the 8009 and 8010 ports. (you may need to open these ports for aws box's) ## Fixing There are multiple different ways to patch these programs but as long as you patch it in a way that is justifiable you'll get the marks. We will be reading through your diffs to determine this so try to keep your diff as short as possible. If you have any questions / need clarifications you can email the class account or ping the slack channel. Note you do not have to fix _every_ issue with these sites as some are trivial (i.e removing the client side site that gives you access to a staff api end point), see the tests to get an idea on what we do want you to fix. ## Testing In the README.md in the root dir there is also instructions on how to run the tests. These rely on the site running on your machine so the tests can connect to it. You may read the source code of the tests to see what each test is looking for. If you pass all these tests you are set for full marks assuming that your fixes did not simply make the test pass rather then fixing the issue. I.e changeing it from a base 58 to a base 64 does _not_ fix the issue although it will pass the test. ## Files You will most likely need to edit views.py in each site, although you arn't limited to this if you feel something else needs to change as well. *Pastebing* : `pastebing/app/pastebing/main/views.py` *Drive.bing* : `drive.bing/app/bing_drive/main/views.py` ## Submission Submit your diff as a patch. ```bash git diff > z5555555.patch give cs6443 patch2 z5555555.patch ```<file_sep>/pastebing/app/pastebing/__init__.py #!/usr/bin/env python3 # This is the entrypoint to pastebing from flask_core.app import create_app from flask_core.config import Config from . import models # If you add more blueprints, add them here from .main import bp as main_bp config = Config() app = create_app(config) app.register_blueprint(main_bp) <file_sep>/pastebing/app/schema.sql DROP DATABASE IF EXISTS pastebing; CREATE DATABASE pastebing; \c pastebing CREATE TABLE users ( id serial PRIMARY KEY, username TEXT UNIQUE, password TEXT ); CREATE TABLE pastes ( id TEXT PRIMARY KEY, -- base58 encoded tuple, userid:pasteid [vry secure - carey 2019] title TEXT DEFAULT '', author INTEGER REFERENCES users(id), contents TEXT, created_at TIMESTAMP DEFAULT now() ); INSERT INTO users (id, username, password) VALUES (0, 'test', '<PASSWORD>'); -- Reset sequence numbers SELECT setval(pg_get_serial_sequence('users', 'id'), MAX(id)) FROM users; DROP USER IF EXISTS pastebing; CREATE USER pastebing WITH ENCRYPTED PASSWORD '<PASSWORD>'; GRANT SELECT ON ALL TABLES IN SCHEMA public TO pastebing; GRANT INSERT ON ALL TABLES IN SCHEMA public TO pastebing; GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO pastebing; GRANT CREATE ON DATABASE pastebing TO pastebing;<file_sep>/drive.bing/db/sql-scripts/drive.sql DROP DATABASE IF EXISTS drive; CREATE DATABASE drive; use drive; DROP TABLE IF EXISTS users; CREATE TABLE users ( username TEXT, password TEXT, role TEXT ); DROP TABLE IF EXISTS files; CREATE TABLE files ( id SERIAL PRIMARY KEY, name TEXT, content TEXT, author TEXT ); INSERT INTO files(name,content,author) VALUES ('flag', 'BREAK2{cheeky_bugger_u_aint_fixed_the_bug_innit}, Note to self, delete /admin','admin'), ('staff_api_a456h7dvra','BREAK2{call_the_govnr_we_got_ourselfs_a_hacker_innit} also /staff/secret/1gbdfte/swagger','staff_account'); INSERT INTO users VALUES ('testStaff', 'testStaff', 'Staff'); DROP USER IF EXISTS 'driveuser'@'%'; CREATE USER 'driveuser'@'%' IDENTIFIED WITH mysql_native_password BY '<PASSWORD>'; GRANT SELECT, INSERT, UPDATE ON *.* TO 'driveuser'@'%'; GRANT FILE on *.* TO 'driveuser'@'%'; FLUSH PRIVILEGES; <file_sep>/pastebing/app/pastebing/models.py #!/usr/bin/env python3 # models.py # # Used for sqlalchemy style models <file_sep>/drive.bing/docker-compose.yml version: '3' services: web_service: build: ./app restart: unless-stopped links: - database environment: - FLAG=COMP{REPLACE_ME} - FLAG_SECRET=COMP{INVALID_SECRET} - FLASK_CORE_CONFIG=/app/web/config.py - FLASK_CORE_ENABLE_AUTH=false - DB_CONNECTION_STRING=mysql://driveuser:4e34134344b-5834242a-9229-ddf2d6432426b0c45@database/drive - PGPASSWORD=<PASSWORD> ports: - 0.0.0.0:8010:8000 database: build: ./db restart: on-failure
9a57549602549023127969069984e4390278e771
[ "SQL", "YAML", "Markdown", "TOML", "JavaScript", "Python", "Text", "Dockerfile", "Shell" ]
26
Python
ns-agency/bing
2775f8b3c181d54cae415926078ef63a6e5e5ece
959b9621cf5babac2341d0d6bde558a7e5c760de
refs/heads/master
<repo_name>KunWardhana/tugasscss<file_sep>/README.md # tugasscss html dibuat dengan base mobile F12 -> toogle device toolbar -> galaxy s5 <file_sep>/userStyle/user_script.js const myClass = document.getElementById("myClass") const urlClass = "172.31.248.108:9000/Onlinetutorial/api/v1/class" fetch(urlClass) .then((response) => response.json()) .then((json) => { console.log(json) json.map((product) => { }) }) .catch((error) => console.log(error)); //ngambil error
247650322091c5e51e183ef4987353ef95688387
[ "Markdown", "JavaScript" ]
2
Markdown
KunWardhana/tugasscss
8fe82d0c94a78263547f7105f2206621fa52a810
425b0dcb300679dd8b8b6841ed57f9d2ea93a823
refs/heads/main
<repo_name>Bernardo-Angelo98/mccourse<file_sep>/src/main/java/com/bern6angel/mccourse/repositories/ClienteRepository.java package com.bern6angel.mccourse.repositories; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import com.bern6angel.mccourse.domain.Cliente; @Repository public interface ClienteRepository extends JpaRepository<Cliente, Integer> { }
7302ee89a04fcf763b6f65fb8429c77274a2f3e5
[ "Java" ]
1
Java
Bernardo-Angelo98/mccourse
e8200f241c27b4058913fa66404fed0a5c719c3f
a4350c18476adb8da0ab83d6dd57c2ca45dfdd90
refs/heads/master
<file_sep>#include "tcp_exception.h" tcp_exception::tcp_exception(const char * ms) { message = ms; } const char * tcp_exception::what() const throw() { return message; } <file_sep>#ifndef TCP_SOCKET_H #define TCP_SOCKET_H #include <sys/types.h> #include <sys/socket.h> #include <netdb.h> #include <arpa/inet.h> #include <netinet/in.h> #include <unistd.h> #include <fcntl.h> #include <string> #include "tcp_exception.h" class tcp_socket { public: tcp_socket(); tcp_socket(int fd); ~tcp_socket(); int get_descriptor() const; void close(); bool is_open() const; void make_non_blocking(); int read_data(char* msg, int max_size); int write_data(const char* msg, int max_size); const char * read_all(); void write_all(const char* msg, int size); private: static const int CHUNK_SIZE = 512; static const int MAX_SIZE = 100000; int fd; bool open; }; #endif // TCP_SOCKET_H <file_sep>#ifndef TCP_SERVER_H #define TCP_SERVER_H #include <sys/types.h> #include <sys/socket.h> #include <netdb.h> #include <arpa/inet.h> #include <netinet/in.h> #include <unistd.h> #include <fcntl.h> #include <exception> #include <sys/epoll.h> #include <iostream> #include <stdio.h> #include <string.h> #include <signal.h> #include <sys/eventfd.h> #include <errno.h> #include <map> #include <algorithm> #include <functional> #include "cstdio" #include "cstring" #include "cstdlib" #include "tcp_exception.h" #include "tcp_socket.h" struct tcp_server { public: ~tcp_server(); bool begin_listening(char * address, char * service); void set_max_pending_connections(int max); void set_new_connection(void (*f) (tcp_socket*)); void set_func(void (*f) (tcp_socket*)); int event_fd; private: bool running; const int MAX_EVENTS = 20; const int MAX_LENGTH = 20; int max_pending_connections = 10; int epoll_fd; int socket_fd; std::function<void (tcp_socket*)> new_connection; std::function<void (tcp_socket*)> func; static void check_error(int, const char*); static int create_and_bind(char * address, char * service); static int make_socket_non_blocking(int socket_fd); void create_event_fd(); }; #endif // TCP_SERVER_H <file_sep>#include <iostream> #include <stdio.h> #include <string.h> #include <sys/types.h> #include <sys/socket.h> #include <netdb.h> #include <arpa/inet.h> #include <netinet/in.h> #include <sys/epoll.h> #include <signal.h> #include <unistd.h> #include "tcp_server.h" #define MAXDATASIZE 10 #define MAX_EPOLL_EVENTS 10 using namespace std; tcp_server* server; static void sig_handler(int sig, siginfo_t *si, void* unused) { cout << std::endl <<"signal" << std::endl; fflush(stdout); write(server->event_fd, "1", 2); cout << "signal ok" << std::endl; fflush(stdout); } void just(tcp_socket* x) { } void reverse(tcp_socket* x) { const char * msg = x->read_all(); if (!x->is_open()) { delete[] msg; return; } int bytes = strlen(msg); std::cout << bytes << std::endl; std::cout << msg << std::endl; char * msg2 = new char[bytes]; for (int i = 0; i < bytes; i++) { msg2[i] = msg[bytes - 1 - i]; } x->write_data(msg2, bytes); x->close(); std::cout << bytes << std::endl; std::cout << "/" << msg2 << "/" << std::endl; fflush(stdout); delete[] msg; delete[] msg2; } int main() { struct sigaction sa; sa.sa_flags = SA_SIGINFO; sigemptyset(&sa.sa_mask); sa.sa_sigaction = sig_handler; sigaction(SIGINT, &sa, NULL); sigaction(SIGTERM, &sa, NULL); //sigaction(SIGINT, ) server = new tcp_server(); server->set_new_connection(just); server->set_func(reverse); server->begin_listening("127.0.0.1", "23010"); delete server; return 0; } <file_sep>#project(TCP) #cmake_minimum_required(VERSION 2.8) #aux_source _directory(. SRC_LIST) #add_executable(${PROJECT_NAME} ${SRC_LIST}) project(TCP) cmake_minimum_required(VERSION 2.8) aux_source_directory(. SRC_LIST) add_library(tcp_server STATIC tcp_server.cpp) add_library(tcp_client STATIC tcp_client.cpp) add_library(tcp_socket STATIC tcp_socket.cpp) add_library(tcp_exception STATIC tcp_exception.cpp) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") add_executable(tcp_server_exec main_server.cpp) add_executable(tcp_client_exec main_client.cpp) target_link_libraries(tcp_server_exec tcp_server tcp_socket tcp_exception) target_link_libraries(tcp_client_exec tcp_client) <file_sep>#ifndef TCP_CLIENT_H #define TCP_CLIENT_H class tcp_client { public: tcp_client(); }; #endif // TCP_CLIENT_H <file_sep>#ifndef TCP_EXCEPTION_H #define TCP_EXCEPTION_H #include <exception> struct tcp_exception: public std::exception { public: tcp_exception(const char* message); virtual const char* what() const throw(); private: const char* message; }; #endif // TCP_EXCEPTION_H <file_sep>#include <iostream> #include <stdio.h> #include <string.h> #include <sys/types.h> #include <sys/socket.h> #include <netdb.h> #include <arpa/inet.h> #include <netinet/in.h> #include <unistd.h> #define MAXDATASIZE 10 using namespace std; int main() { fork(); fork(); fork(); struct addrinfo hints, *res; struct addrinfo* servinfo; socklen_t addr_size; struct sockaddr their_addr; char msg[MAXDATASIZE]; memset(&hints, 0, sizeof hints); hints.ai_family = AF_UNSPEC; hints.ai_socktype = SOCK_STREAM; hints.ai_flags = AI_PASSIVE; getaddrinfo("127.0.0.1", "23010", &hints, &res); int s = socket(res->ai_family, res->ai_socktype, res->ai_protocol); connect(s, res->ai_addr, res->ai_addrlen); int bytes = send(s, "zader", 5, 0); cout << bytes << "\n"; bytes = recv(s, msg, MAXDATASIZE-1, 0); cout << bytes << "\n"; cout << string(msg).substr(0,bytes) << "\n"; fflush(stdout); close(s); } <file_sep>#include "tcp_socket.h" tcp_socket::tcp_socket() { } tcp_socket::tcp_socket(int fd) { this->fd = fd; open = true; } tcp_socket::~tcp_socket() { if (is_open()) { ::close(fd); open = false; } } int tcp_socket::get_descriptor() const { return fd; } void tcp_socket::close() { ::close(fd); open = false; } bool tcp_socket::is_open() const { return open; } void tcp_socket::make_non_blocking() { int flags = fcntl(fd, F_GETFL, 0); if (flags == -1) { throw tcp_exception("error in fcntl 1"); } flags |= O_NONBLOCK; int status = fcntl(fd, F_SETFL, flags); if (status == -1) { throw tcp_exception("error in fcntl 2"); } } int tcp_socket::read_data(char *msg, int max_size) { if (is_open()) { int x = recv(fd, msg, max_size, 0); if (x == 0) { ::close(fd); open = false; return 0; } else { return x; } } else { throw tcp_exception("read from closed socket"); } } const char * tcp_socket::read_all() { if (is_open()) { char * msg = new char[MAX_SIZE]; int size = 0; while (true) { int x = read_data(msg + size, CHUNK_SIZE); if (x != -1) { if (x == 0) { break; } else { size += x; } } else { if (errno == EACCES) { continue; } else { break; } } } msg[size] = '\0'; return msg; } else { throw tcp_exception("read from closed socket"); } } int tcp_socket::write_data(const char *msg, int max_size) { if (is_open()) { return send(fd, msg, max_size, 0); } else { throw tcp_exception("write to closed socket"); } } void tcp_socket::write_all(const char *msg, int size) { if (is_open()) { int count = 0; while (count < size) { count += send(fd, msg + count, CHUNK_SIZE, 0); } } else { throw tcp_exception("write to closed socket"); } } <file_sep>#include "tcp_client.h" tcp_client::tcp_client() { } <file_sep>#include "tcp_server.h" using namespace std; tcp_server::~tcp_server() { } bool tcp_server::begin_listening(char* address, char* service) { //char their_msg[MAX_LENGTH]; epoll_fd = epoll_create(MAX_EVENTS); check_error(epoll_fd, "epoll_fd not created"); create_event_fd(); socket_fd = create_and_bind(address, service); check_error(socket_fd, "socket_fd not created"); check_error(listen(socket_fd, max_pending_connections), "listen"); epoll_event listen_ev; memset(&listen_ev, 0, sizeof listen_ev); listen_ev.events = EPOLLIN | EPOLLPRI | EPOLLET; listen_ev.data.fd = socket_fd; check_error(epoll_ctl(epoll_fd, EPOLL_CTL_ADD, socket_fd, &listen_ev), "epoll_ctl"); socklen_t client; epoll_event events[MAX_EVENTS]; memset(events, 0, sizeof events); epoll_event connev; memset(&connev, 0, sizeof(epoll_event)); sockaddr cliaddr; int events_cout = 2; running = true; std::map<int, tcp_socket*> sockets; while (running) { int nfds = epoll_wait(epoll_fd, events, MAX_EVENTS, -1); if (nfds == -1 && errno == EINTR) { cout << "stupid exit" << endl; running = false; break; } cout << nfds << " new events" << std::endl; fflush(stdout); for (int n = 0; n < nfds; n++) { if (events[n].data.fd == socket_fd) { client = sizeof cliaddr; int connfd = accept(socket_fd, (sockaddr*) &cliaddr, &client); cout << "new connection accepted connfd= " << connfd << std::endl; fflush(stdout); if (events_cout == max_pending_connections) { std::cout << "full"; close(connfd); continue; } tcp_socket* current_socket = new tcp_socket(connfd); current_socket->make_non_blocking(); sockets[connfd] = current_socket; //make_socket_non_blocking(connfd); connev.data.fd = current_socket->get_descriptor(); connev.events = EPOLLIN | EPOLLET; check_error(epoll_ctl(epoll_fd, EPOLL_CTL_ADD, current_socket->get_descriptor(), &connev), "CTL"); new_connection(current_socket); if (!current_socket->is_open()) { epoll_ctl(epoll_fd, EPOLL_CTL_DEL, current_socket->get_descriptor(), &connev); events_cout--; sockets.erase(sockets.find(current_socket->get_descriptor())); delete current_socket; } events_cout++; } else if (events[n].data.fd == event_fd) { fflush(stdout); running = false; break; } else { tcp_socket* current_socket = sockets[events[n].data.fd]; cout << current_socket->is_open() << std::endl; func(current_socket); if (!current_socket->is_open()) { epoll_ctl(epoll_fd, EPOLL_CTL_DEL, current_socket->get_descriptor(), &connev); events_cout--; sockets.erase(sockets.find(current_socket->get_descriptor())); delete current_socket; } } } } close(socket_fd); close(epoll_fd); close(event_fd); for (map<int, tcp_socket*>::iterator i = sockets.begin(); i != sockets.end(); i++) { delete i->second; } } void tcp_server::set_max_pending_connections(int max) { max_pending_connections = max; } void tcp_server::set_new_connection(void (*f)(tcp_socket*)) { new_connection = f; } void tcp_server::set_func(void (*f)(tcp_socket*)) { func = f; } void tcp_server::check_error(int x, const char * msg) { if (x == -1) { throw tcp_exception(msg); } } int tcp_server::create_and_bind(char* address, char* service) { struct addrinfo hints, *res; memset(&hints, 0, sizeof hints); hints.ai_family = AF_UNSPEC; hints.ai_socktype = SOCK_STREAM; hints.ai_flags = AI_PASSIVE; check_error(getaddrinfo(address, service, &hints, &res), "getaddrinfo in create and bind"); int s = socket(res->ai_family, res->ai_socktype, res->ai_protocol); make_socket_non_blocking(s); check_error(bind(s, res->ai_addr, res->ai_addrlen), "bind in create and bind"); ::free(res); return s; } int tcp_server::make_socket_non_blocking(int socket_fd) { int flags = fcntl(socket_fd, F_GETFL, 0); if (flags == -1) { //TODO: check error return -1; } flags |= O_NONBLOCK; int status = fcntl(socket_fd, F_SETFL, flags); if (status == -1) { //TODO: check error return -1; } return 0; } void tcp_server::create_event_fd() { event_fd = eventfd(0, 0); make_socket_non_blocking(event_fd); struct epoll_event event_fd_ev; memset(&event_fd_ev, 0, sizeof(epoll_event)); event_fd_ev.events = EPOLLIN | EPOLLET; event_fd_ev.data.fd = event_fd; epoll_ctl(epoll_fd, EPOLL_CTL_ADD, event_fd, &event_fd_ev); }
f4d419f116e9e9e615b9200b1a28bd86bacda144
[ "CMake", "C++" ]
11
C++
alex-700/TCP
c72a23b22c362cb1c4b2345012ab24744b6f479e
d2515033679ff877c9c3489dd3984976aa0e387e
refs/heads/master
<file_sep>import { Component, OnInit } from '@angular/core'; import { AddCategory } from '../../model/add-category'; import { CategoryServiceService } from '../../service/category-service.service'; import { Paper } from '../../model/paper'; import { PaperQuestionService } from '../../service/paper-question.service'; import { PaperQuestion } from '../../model/paper-question'; import { Question } from '../../model/question'; import { QuestionServiceService } from '../../Service/question-service.service'; import { FormGroup, FormBuilder, Validators } from '@angular/forms'; @Component({ selector: 'app-question-list', templateUrl: './question-list.component.html', styleUrls: ['./question-list.component.css'] }) export class QuestionListComponent implements OnInit { public getCatInfo: Array<AddCategory> = []; public getQuestionLits : Array<Question> =[]; public categorySelectvalue; submitted :boolean= false; public visible =false; question: Question; testBoolean :boolean; selectValueStore=[]; public paper : Paper; questionForm : FormGroup; constructor(private formBuilder : FormBuilder,private categorySer: CategoryServiceService, private questSer: QuestionServiceService,private paperQuestion : PaperQuestionService) { this.question = new Question(); this.paper = new Paper(); } ngOnInit() { this.categorySer.getCategoryInfo().subscribe(getResultList => { this.getCatInfo = getResultList; }) this.questionForm = this.formBuilder.group({ selectQuestion : ['',Validators.required] }) } check() { this.submitted=true; alert(this.submitted) } //Get All Question list base on category Id... getQuestionList(){ this.submitted=true; alert(this.submitted + ":" + this.questionForm.value.selectQuestion) debugger; if(this.questionForm.valid){ this.questSer.getCategoryInfo(this.questionForm.value.selectQuestion).subscribe(getList =>{ this.getQuestionLits=getList; if(this.getQuestionLits.length === 0){ this.visible=true alert("No Record Found !!!" ) } alert("Total Question is :" + this.getQuestionLits.length) this.categorySelectvalue=this.questionForm.value.selectQuestion; }) } } change(x){ this.paper.paperCategory=this.questionForm.value.selectQuestion; let pq = new PaperQuestion(); pq.paperSelectQuestionId=x; this.paper.paperQuestionList.push(pq); } temp(){ this.paperQuestion.addPaperQuestion(this.paper).subscribe(f => { alert("List Length is :" +this.paper.paperQuestionList.length) }) } } <file_sep>import { Component, OnInit } from '@angular/core'; import { AddCategory } from '../../model/add-category'; import { CategoryType } from '../../model/category-type'; import { CategoryServiceService } from '../../service/category-service.service'; //import { QuestionServiceService } from '../../service/question-service.service'; import { Question } from '../../model/question'; import { Answer } from '../../model/answer'; import { QuestionServiceService } from '../../Service/question-service.service'; import { Form, NgForm } from '@angular/forms'; @Component({ selector: 'app-question', templateUrl: './question.component.html', styleUrls: ['./question.component.css'] }) export class QuestionComponent implements OnInit { public getCatInfo: Array<AddCategory> = []; public setCatType: Array<CategoryType> = []; public categoryName: String; public setDataCatType; public QuestionName; public answer: Array<String> = []; public x; public flag: boolean = false; public subcategory: Array<String> = []; question: Question; answerObject : Answer; public checkRadio : boolean =false; public checkCheck : boolean ; temp :Question; theCheckbox =false; constructor(private categorySer: CategoryServiceService,private questionSer :QuestionServiceService) { this.question = new Question(); this.answerObject = new Answer(); } ngOnInit() { this.categorySer.getCategoryInfo().subscribe(getResultList => { this.getCatInfo = getResultList; this.checkCheck=true; }) // this.subcategory = ['Single Question Single Answer', 'Single Question Multiple Answer']; } addfield() { let ans = new Answer(); this.question.options.push(ans); this.answer[this.question.options.length]; } saveFunction() { console.log("--------------------hhhh " +this.question) if( this.question.questionCategory == undefined && this.question.subcategory == undefined && this.question.questionText.length < 20) { alert("please Select Category And Question Type") return ; } else { this.questionSer.addQuestion(this.question).subscribe(getAllQuestionData =>{ this.temp=getAllQuestionData; alert(this.question.questionId+"Question Id"); alert(this.question.questionText+ "QuestionText"); alert(this.question.subcategory + "Question SubCategory"); console.log("------------->" +this.temp) }) } } setradiocorrect(ans) { alert("Correct Answer is :" + ans.answerCorrect); // ans.answerCorrect =!ans.answerCorrect; // console.log("check" + ans.answerCorrect) } setchkcorrect(ans){ ans.answerCorrect =!ans.answerCorrect; console.log(ans.answerCorrect); } toggleVisibility(e){ if(this.checkCheck == true){ this.checkCheck=false; }else this.checkCheck=true; } resetVal(getForm : NgForm){ getForm.reset() } } <file_sep>import { Component, OnInit, Inject } from '@angular/core'; import { MAT_DIALOG_DATA, MatDialogRef } from '@angular/material'; import { TakeTestComponent } from '../Test/take-test/take-test.component'; import { Dialog } from '../dialog'; @Component({ selector: 'app-quiz-submit-instruction', templateUrl: './quiz-submit-instruction.component.html', styleUrls: ['./quiz-submit-instruction.component.css'] }) export class QuizSubmitInstructionComponent implements OnInit { constructor(public dialogRef: MatDialogRef<TakeTestComponent>, @Inject(MAT_DIALOG_DATA)public d : Dialog) { } ngOnInit() { } } <file_sep>import { Component, OnInit } from '@angular/core'; import { Question } from '../../Model/question'; import { QuestionServiceService } from '../../Service/question-service.service'; import { PaperServiceService } from '../../Service/paper-service.service'; import { Paper } from '../../Model/paper'; import { Router } from '@angular/router'; import { debug } from 'util'; import { MatDialog } from '@angular/material'; import { QuizSubmitInstructionComponent } from '../../quiz-submit-instruction/quiz-submit-instruction.component'; @Component({ selector: 'app-take-test', templateUrl: './take-test.component.html', styleUrls: ['./take-test.component.css'] }) export class TakeTestComponent implements OnInit { public listOfTestImagesHide: boolean = false; public takeTestFlag: boolean = false; public questionObj: Question; public fetchPaperQuestion: Array<Question> = []; public getResponseAllQueston: Array<Paper> = []; public getLoginSession: string; public totalCorrectAnswer : number = 0 ; public startQuizBtnVal : boolean = false; public startQuizBtn : boolean = false; public instructionHideShowCard : boolean = true; p: number = 1; public tempCheckIsNumber: boolean = false; constructor(private router: Router, private questionSer: QuestionServiceService, private paperService: PaperServiceService,private dialog : MatDialog) { } ngOnInit() { } check() { this.listOfTestImagesHide = true; } question(getVal) { debugger; if (getVal != 'test') { if(getVal != null && getVal != 'test'){ localStorage.setItem("loginSession", null); this.questionSer.getCategoryWiaQuestionInfo(getVal).subscribe(getResults => { this.fetchPaperQuestion = getResults; this.listOfTestImagesHide = true; }) } else { alert("There is no question available !!!"); } } if(getVal == 'test'){ // localStorage.clear(); this.getLoginSession = localStorage.getItem("loginSession"); if(this.getLoginSession == null){ alert("go to login page"); this.router.navigate(['/login']); } else { this.listOfTestImagesHide = true; this.paperService.getPaperInfo().subscribe(getPaperResult => { this.getResponseAllQueston = getPaperResult; for (let data of this.getResponseAllQueston) { for (let ans of data.paperQuestionList) { console.log(ans.paperSelectQuestionId); this.questionSer.getQuestionIdBasefetchList(ans.paperSelectQuestionId).subscribe(getAllDataOfPaper => { this.fetchPaperQuestion.push(getAllDataOfPaper); }) } } }) } } } checkAnswer(ans){ if(ans === true){ this.totalCorrectAnswer= this.totalCorrectAnswer + 1; } } Result(){ this.dialogOpenFun(); alert("Total Result is :" + this.totalCorrectAnswer); } instruction(){ this.startQuizBtnVal = true; this.instructionHideShowCard = false; alert(); this.startQuizBtn=true; } onEvent(d){ if(d.left == 0 ){ this.dialogOpenFun(); } } dialogOpenFun(){ const dialogRef = this.dialog.open(QuizSubmitInstructionComponent, { maxWidth: '30vw', maxHeight: '30vh', height: '60%', width: '7 0%', disableClose : true }); } }
b05aabd4a1d738dd7e1865523459793a5052b5c2
[ "TypeScript" ]
4
TypeScript
cd-learning/Code-Quiz---Imp
f0d55fb2fb3892823c323e134126f7965011f003
2dd0eea8ec735faf126e9c7bb5d8c844b1fe140a
refs/heads/master
<repo_name>allouis/monads<file_sep>/lib/Just.js var Maybe = require('./Maybe'); function Just(val){ return { fmap: function(fn) { return Maybe(fn(val)); } } } module.exports = Just; <file_sep>/lib/Nothing.js function Nothing(){ return { fmap: function(){ return Nothing(); } } } module.exports = Nothing;
1922efb76b40fd1f621619ce6f87f102f9ad6fd8
[ "JavaScript" ]
2
JavaScript
allouis/monads
0e30ca427852c90e7ba2479bbd3bcce3e8f7a5d8
44c7f2ddafb3b6e67d74c50f472479fa4ef69b86
refs/heads/master
<file_sep>package main // Import our dependencies. We'll use the standard HTTP library as well as the gorilla router for this app import ( "encoding/json" "fmt" "log" "net/http" "os" "time" "github.com/auth0/go-jwt-middleware" "github.com/dgrijalva/jwt-go" "github.com/gorilla/handlers" "github.com/gorilla/mux" ) type Product struct { Id int Name string Slug string Description string } /* We will create our catalog of VR experiences and store them in a slice. */ var products = []Product{ Product{Id: 1, Name: "World of Authcraft", Slug: "world-of-authcraft", Description: "Battle bugs and protect yourself from invaders while you explore a scary world with no security"}, Product{Id: 2, Name: "Ocean Explorer", Slug: "ocean-explorer", Description: "Explore the depths of the sea in this one of a kind underwater experience"}, Product{Id: 3, Name: "Dinosaur Park", Slug: "dinosaur-park", Description: "Go back 65 million years in the past and ride a T-Rex"}, Product{Id: 4, Name: "Cars VR", Slug: "cars-vr", Description: "Get behind the wheel of the fastest cars in the world."}, Product{Id: 5, Name: "<NAME>", Slug: "robin-hood", Description: "Pick up the bow and arrow and master the art of archery"}, Product{Id: 6, Name: "Real World VR", Slug: "real-world-vr", Description: "Explore the seven wonders of the world in VR"}} var NotImplemented = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte("Not Implemented")) }) /* The status handler will be invoked when the user calls the /status route It will simply return a string with the message "API is up and running" */ var StatusHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte("API is up and running")) }) /* The products handler will be called when the user makes a GET request to the /products endpoint. This handler will return a list of products available for users to review */ var ProductsHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Here we are converting the slice of products to JSON payload, _ := json.Marshal(products) w.Header().Set("Content-Type", "application/json") w.Write([]byte(payload)) }) /* The feedback handler will add either positive or negative feedback to the product We would normally save this data to the database - but for this demo, we'll fake it so that as long as the request is successful and we can match a product to our catalog of products we'll return an OK status. */ var AddFeedbackHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var product Product vars := mux.Vars(r) slug := vars["slug"] for _, p := range products { if p.Slug == slug { product = p } } w.Header().Set("Content-Type", "application/json") if product.Slug != "" { payload, _ := json.Marshal(product) w.Write([]byte(payload)) } else { w.Write([]byte("Product Not Found")) } }) var jwtMiddleware = jwtmiddleware.New(jwtmiddleware.Options{ ValidationKeyGetter: func(token *jwt.Token) (interface{}, error) { return mySigningKey, nil }, SigningMethod: jwt.SigningMethodHS256, }) var Log string var Pass string func main() { fmt.Println("Логин") fmt.Scanf("%s\n", &Log) fmt.Println("Пароль") fmt.Scanf("%s\n", &Pass) // Here we are instantiating the gorilla/mux router r := mux.NewRouter() // ... //... r.Handle("/status", NotImplemented).Methods("GET") // Добавляем прослойку к products и feedback роутам, все остальные // роуты у нас публичные r.Handle("/products", jwtMiddleware.Handler(ProductsHandler)).Methods("GET") r.Handle("/products/{slug}/feedback", jwtMiddleware.Handler(AddFeedbackHandler)).Methods("POST") //... r.Handle("/get-token", GetTokenHandler).Methods("GET") // ... // On the default page we will simply serve our static index page. r.Handle("/", http.FileServer(http.Dir("./views/"))) // We will setup our server so we can serve static assest like images, css from the /static/{file} route r.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir("./static/")))) // Our application will run on port 8080. Here we declare the port and pass in our router. http.ListenAndServe(":3000", handlers.LoggingHandler(os.Stdout, r)) //////// // NEW CODE //////// // Our API is going to consist of three routes // /status - which we will call to make sure that our API is up and running // /products - which will retrieve a list of products that the user can leave feedback on // /products/{slug}/feedback - which will capture user feedback on products } // Глобальный секретный ключ var mySigningKey = []byte("secret") var GetTokenHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { token := jwt.New(jwt.SigningMethodHS256) claims := token.Claims.(jwt.MapClaims) t := time.Now() login := r.FormValue("login") password := r.FormValue("password") data := r.FormValue("data") dataanswer := data + "1" fmt.Println("User login = ", login, "; Server login = ", Log, "; \nUser password = ", <PASSWORD>, "; Server password = ", Pass) autorizationok := Log == login && Pass == password fmt.Println("autorizationok = ", autorizationok) claims["admin permissions?"] = "maybe" claims["login"] = &Log claims["password"] = &<PASSWORD> claims["Data answer is"] = dataanswer claims["Token request at"] = t claims["ATTENTION!"] = "Привет, Макс :)" claims["exp"] = time.Now().Add(time.Hour * 72).Unix() tokenString, err := token.SignedString(mySigningKey) if err != nil { log.Fatal(err) } tokenFprint := []byte(tokenString) if autorizationok { fmt.Fprint(w, fmt.Sprintf("Token request at [%s]\nUser:\nLogin: '%s'\nPassword: '%s'\nData answer is: %s\n", t.Format(time.RFC3339), login, password, dataanswer)) fmt.Fprint(w, fmt.Sprintf("Token: %s", tokenFprint)) } else { fmt.Fprint(w, " access denied ") } // w.Write([]byte(tokenString)) }) <file_sep>module github.com/ev/go-vr-auth go 1.14 require ( github.com/auth0/go-jwt-middleware v0.0.0-20201030150249-d783b5c46b39 github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/gorilla/handlers v1.5.1 github.com/gorilla/mux v1.8.0 )
19022956a35c159522ef338645c5c77147cbd807
[ "Go Module", "Go" ]
2
Go
ev-go/go-vr-auth
aa49c4b9ae8f1a165f7f42355fe4adb85913eacb
b98f46929b4b77fa3e8af5b7a43c469bfdeb8f83
refs/heads/master
<repo_name>maiconfriedel/app-ideas-bin2dec<file_sep>/src/utils/bin2dec.ts /** @description Converts a binary string into a decimal number */ export function convertBinaryStringToDecimal(binaryString: string) { const decimalNumber: number = parseInt(binaryString, 2); return decimalNumber; } /** @description Converts a binary number array into a decimal number */ export function convertBinaryNumberArrayToDecimal(binaryArray: number[]) { const decimalNumber: number = binaryArray.reduce( (total, currentValue, index) => { return total + currentValue * Math.pow(2, index); } ); return decimalNumber; }
a0985dc91e0b8d5a843edd69c9bdf84e09acc6e2
[ "TypeScript" ]
1
TypeScript
maiconfriedel/app-ideas-bin2dec
85810d2254ae6b34f2c1ca38604e67f9d6449938
15e7369a6dd67e263477199117ce5b34932720a6
refs/heads/master
<repo_name>mazeit/KYPC-api<file_sep>/api/v1/controllers/theirclient.js var config = rootRequire('./config'); // get our config file var connect = rootRequire('./db/queries'); // local postgres connection var helper = rootRequire('./api/v1/helper'); module.exports = { listAllClients: function listAllClients(req, res, next){ // console.log("get data") connect.db.any("SELECT * FROM theirclients") .then(function(data) { return res.status(200) .json({ status: 'success', data: data, message: 'Retrieved ALL Clients' }); }) .catch(function(err) { console.log("error data") return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); }, getById: function getById(req, res, next){ // _params = req.params; // var id = _params.id ? _params.id : null; // connect.db.any('SELECT * FROM user_group WHERE _id = $1', id) // .then(function(group) { // if (group.length > 0) { // group = group[0]; // } else { // group = null // } // if (group && group != null) { // return res.status(200) // .json({ // status: 'success', // data: group, // message: 'User Group data fetched Successfully.' // }); // } else { // return res.status(401) // .json({ // status: 'fail', // err: "Unauthorized access.", // message: 'User not found.' // }); // } // }) // .catch(function(err) { // return res.status(500) // .json({ // status: 'fail', // err: err, // message: 'Something went wrong !' // }); // }); res.send("hi") }, updateById: function updateById(req, res, next){ // var _body = req.body; // var _params = req.params; // // validations // if (!_params.id) { // return res.send({ status: 0, message: 'Invalid parameters' }); // } // connect.db.any('SELECT * FROM user_group WHERE id = $1', _params.id) // .then(function(group) { // if (group.length > 0) { // group = group[0]; // } else { // group = null // } // if (group && group != null) { // _body.name = _body.name ? _body.name : group.name; // _body.discount = _body.discount ? _body.discount : user.discount; // _body.description = _body.description ? _body.description : user.description; // _body.role = _body.role ? _body.role : user.role; // connect.db.one('update user_group set name=$1, discount=$2, description=$3, role=$4 where id = $5 RETURNING * ', [_body.name, _body.discount, _body.description, _body.role , _params.id]) // .then(function(data) { // res.status(200) // .json({ // status: 'success', // data: data, // message: 'Updated user group successfully.' // }); // }) // .catch(function(err) { // return res.status(500) // .json({ // status: 'fail', // err: err, // message: 'Something went wrong !' // }); // }); // } else { // return res.status(401) // .json({ // status: 'fail', // err: "Unauthorized access.", // message: 'Incorrect User Group.' // }); // } // }) // .catch(function(err) { // // console.log(err); // return res.status(500) // .json({ // status: 'fail', // err: err, // message: 'Something went wrong !' // }); // }); res.send("hi") }, /** * Delete User API */ deleteClientById: function deleteClientById(req, res, next) { // var _params = req.params; // // validations // if (!_params.id) { // return res.send({ status: 0, message: 'Invalid parameters' }); // } // // delete; // connect.db.result('DELETE FROM user_group WHERE id = $1', _params.id) // .then(function(result) { // // rowCount = number of rows affected by the query // if (result.rowCount > 0) { // return res.status(200) // .json({ // status: 'success', // data: result, // message: 'User Group deleted Successfully.' // }); // } else { // return res.status(401) // .json({ // status: 'fail', // err: "Unauthorized access.", // message: 'User Group not found.' // }); // } // // console.log(result.rowCount); // print how many records were deleted; // }) // .catch(function(err) { // // console.log('ERROR:', error); // return res.status(500) // .json({ // status: 'fail', // err: err, // message: 'Something went wrong !' // }); // }); res.send("hi") }, create: function create(req, res, next) { var _body = req.body; // validations if (!_body.clientname) { return res.send({ status: 0, message: 'Invalid parameters' }); } connect.db.one('INSERT INTO theirclients (clientname,clienttype,clientsince,reasonrelation, transactionsnormalfrom, transactionsnormalto, transactionsamountavg, transactionsamountmax, originfonds, ownrisk, totalrisk, risklevel, lastqualified, lastverified, lastactualised, theirclientnotes ) VALUES($1, $2, $3, $4, $5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16) RETURNING *', [_body.clientname, _body.clienttype || '', _body.clientsince || '', _body.reasonrelation || 0, _body.transactionsnormalfrom || 0, _body.transactionsnormalto || 0, _body.transactionsamountavg || 0, _body.transactionsamountmax || 0, _body.originfonds || 0, _body.ownrisk || 0, _body.totalrisk || 0,_body.risklevel || 0, _body.lastqualified || '',_body.lastverified || '', _body.lastactualised || '', _body.theirclientnotes || '']) .then(function(data) { return res.status(200) .json({ status: 'success', data: data, message: 'Successfully created' }); }) .catch(function(err) { console.log(err); return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); }, }<file_sep>/api/v1/index.js var express = require('express'); var jwtAuthentication = require('./policies/jwtAuthentication'); var userController = require('./controllers/user'); var groupController = require('./controllers/usergroup'); var entityController = require('./controllers/entity'); var clientController = require('./controllers/client'); var eventController = require('./controllers/event'); var subController = require('./controllers/subscription'); var osticketController = require('./controllers/osticket'); var theirClientController = require('./controllers/theirclient'); // var signalController = require('./controllers/signal'); // var signalLogController = require('./controllers/signallog'); // ROUTES FOR OUR API // ============================================================================= var router = express.Router(); // get an instance of the express Router // declare routes before login router.post('/login', userController.login); router.post('/signup', userController.signup); router.get('/verify', userController.verifyEmail); router.post('/forgot-password', userController.forgotPassword); // middleware router.all('*', jwtAuthentication); // declare routes after login router.get('/test', userController.testCtrl); router.get('/listUser/:username', userController.listUser); router.get('/listAllUsers', userController.listAllUsers); router.delete('/deleteUserByID/:username', userController.deleteUserByID); router.put('/updateUserByID/:username', userController.updateUserByID); router.post('/userprofile/create', userController.createProfile); router.post('/userprofile/upload/:username', userController.uploadImage); router.post('/users/setuserrole', userController.setUserRole); router.get('/usergroup/:id', groupController.getById); router.get('/listAllGroups', groupController.listAllGroups); router.delete('/usergroup/delete/:id', groupController.deleteUserGroupById); router.put('/usergroup/update/:id', groupController.updateById); router.post('/usergroup/create', groupController.create); router.post('/ourclient/create', clientController.create); router.get('/listAllClients', clientController.listAllClients); router.post('/entity/create', entityController.create); router.get('/listAllEntities', entityController.listAllEntities); router.get('/events', eventController.listAllEvents); router.get('/events/:id', eventController.getById); router.post('/events', eventController.create); router.post('/events/changeLevelById/:id', eventController.changeLevelById); router.put('/events/:id', eventController.updateById); router.delete('/events/:id', eventController.deleteById); router.get('/subscriptions', subController.listAllSubscriptions); router.get('/subscriptions/:id', subController.getById); router.post('/subscriptions', subController.create); router.put('/subscriptions/:id', subController.updateById); router.delete('/subscriptions/:id', subController.deleteById); router.post('/osticket/create', osticketController.create) router.post('/theirclient/create', theirClientController.create) // router.post('/signals', signalController.processSignal); // router.get('/signallog', signalLogController.findRecent); module.exports = router; <file_sep>/api/v1/controllers/osticket.js var config = rootRequire('./config'); // get our config file var Client = require('node-rest-client').Client; var client = new Client(); module.exports = { create: function create(req, res, next) { var _body = req.body; _body['ip'] = config.ip; var args = { data: _body, headers: { "Content-Type": "application/json", "X-API-Key": config.osticketapikey } }, url = config.osticketserver + '/api/tickets.json'; client.post(url, args, function (data, response) { // parsed response body as js object console.log(data); return res.status(200) .json({ status: 'success', data: data.toString('ascii'), message: 'Created new ticket successfully' }); }); }, }<file_sep>/api/v1/helper/index.js var TokenGenerator = require('uuid/v1'); // var nodemailer = require('nodemailer'); var email = require("emailjs/email"); var nodemailer = require('nodemailer'); var smtpTransport = require('nodemailer-smtp-transport'); var transporter = nodemailer.createTransport(smtpTransport({    host: '127.0.0.1',    port: 25 })); var server = email.server.connect({ user: " ziara.testuser", password: "<PASSWORD>", host: "smtp.gmail.com", ssl: true }); module.exports = { generateToken: function() { token = TokenGenerator(); return token; }, sendMailTo: function(receiver, subject, text, next) { transporter.sendMail({    from: '<EMAIL>',    to: receiver,    subject: support,    html: text,    text: text }); next() } }; <file_sep>/api/v1/controllers/event.js var config = rootRequire('./config'); // get our config file var connect = rootRequire('./db/queries'); // local postgres connection var helper = rootRequire('./api/v1/helper'); module.exports = { listAllEvents: function listAllEvents(req, res, next){ console.log("get data") connect.db.any("SELECT * FROM alerts ORDER BY created_at DESC") .then(function(data) { return res.status(200) .json({ status: 'success', data: data, message: 'Retrieved ALL Events' }); }) .catch(function(err) { console.log("error data") return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); }, getById: function getById(req, res, next){ var _params = req.params ,id = _params.id ? _params.id : null; connect.db.any('SELECT * FROM alerts WHERE _id = $1', id) .then(function(event) { if (event.length > 0) { alerts = event[0]; } else { alerts = null } if (event && alerts != null) { return res.status(200) .json({ status: 'success', data: event, message: 'Event fetched Successfully.' }); } else { return res.status(401) .json({ status: 'fail', err: "Unauthorized access.", message: 'User not found.' }); } }) .catch(function(err) { return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); res.send("hi") }, updateById: function updateById(req, res, next){ var _body = req.body ,_params = req.params; // validations if (!_params.id) { return res.send({ status: 0, message: 'Invalid parameters' }); } connect.db.any('SELECT * FROM alerts WHERE id = $1', _params.id) .then(function(event) { if (event.length > 0) { alerts = event[0]; } else { alerts = null } if (event && alerts != null) { _body.title = _body.title ? _body.title : event.title; connect.db.one('update alerts set title=$1 where id = $2 RETURNING * ', [_body.title, _params.id]) .then(function(data) { res.status(200) .json({ status: 'success', data: data, message: 'Updated alerts successfully.' }); }) .catch(function(err) { console.log(err) return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); } else { return res.status(401) .json({ status: 'fail', err: "Unauthorized access.", message: 'Incorrect event.' }); } }) .catch(function(err) { console.log(err); return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); }, /** * Delete API */ deleteById: function deleteById(req, res, next) { var _params = req.params; // validations if (!_params.id) { return res.send({ status: 0, message: 'Invalid parameters' }); } // delete; connect.db.result('DELETE FROM alerts WHERE id = $1', _params.id) .then(function(result) { // rowCount = number of rows affected by the query if (result.rowCount > 0) { return res.status(200) .json({ status: 'success', data: result, message: 'event deleted Successfully.' }); } else { return res.status(401) .json({ status: 'fail', err: "Unauthorized access.", message: 'event not found.' }); } // console.log(result.rowCount); // print how many records were deleted; }) .catch(function(err) { // console.log('ERROR:', error); return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); }, /** * Delete API */ changeLevelById: function deleteById(req, res, next) { var _params = req.params, _body = req.body, alerts; // validations if (!_params.id) { return res.send({ status: 0, message: 'Invalid parameters' }); } console.log(_params.id) connect.db.result('SELECT * FROM alerts WHERE id = $1', _params.id) .then(function(event) { if (event.rows.length > 0) { alerts = event.rows[0]; } else { alerts = null } console.log('event', event) console.log('alerts',alerts) if (event && alerts != null) { _body.level = _body.level ? _body.level : event.level; connect.db.one('update alerts set level=$1 where id = $2 RETURNING * ', [_body.level, _params.id]) .then(function(data) { res.status(200) .json({ status: 'success', data: data, message: 'Updated level of alerts successfully.' }); }) .catch(function(err) { console.log('ERROR:', error); return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); } else { return res.status(401) .json({ status: 'fail', err: "Unauthorized access.", message: 'Incorrect event.' }); } }) .catch(function(err) { console.log('ERROR:', err); return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); }, create: function create(req, res, next) { var _body = req.body; _body['created_at'] = new Date(); // validations if (!_body.title) { return res.send({ status: 0, message: 'Invalid parameters' }); } connect.db.one('INSERT INTO alerts (title, body, owner, level, created_at ) VALUES($1, $2, $3, $4, $5) RETURNING *', [_body.title, _body.body, _body.owner, _body.level, _body.created_at]) .then(function(data) { return res.status(200) .json({ status: 'success', data: data, message: 'Successfully created' }); }) .catch(function(err) { console.log(err); return res.status(500) .json({ status: 'fail', err: err, message: 'Something went wrong !' }); }); }, create_event: function create_event(message, io){ var body = message.message, owner = message.owner, level = "low", created_at = new Date(), title = message.title; console.log("message = ", message) connect.db.one('INSERT INTO alerts (title, body, owner,level, created_at ) VALUES($1, $2, $3, $4, $5) RETURNING *', [title, body, owner, level, created_at]) .then(function(data) { console.log("inserted successfully ", data) io.emit('broad-event', {message: body, title:title, created_at: created_at }); helper.sendMailTo('<EMAIL>', title, '<div>' + body + '</div>'); }) .catch(function(err) { console.log(err); }); }, }
03ea2b6335797290fcf054675baec590a93eaa64
[ "JavaScript" ]
5
JavaScript
mazeit/KYPC-api
62bf7b5d89642cde333b9b8b5da520e5513a25c9
9f4eada44cb872fa68c3284f83284d5c24202490
refs/heads/master
<repo_name>agnieszkapohl/simplechattybot<file_sep>/Problems/Fahrenheit/task.py FC = 5 / 9 def fahrenheit_to_celsius(fahrenheit): return round((fahrenheit - 32) * FC, 3) <file_sep>/Problems/Minimal number/task.py x = int(input()) y = int(input()) # variables `x` and `y` are defined, so just print the minimum print(min(x, y))
e1f21d627b6a4a7a53bce3716639c7c8b68f63d7
[ "Python" ]
2
Python
agnieszkapohl/simplechattybot
f4c5acf8e0dba88e1fdc3b335b020349a835570f
be827652e64127190aa243f9f47e34397166c070
refs/heads/master
<file_sep>//@author <NAME> package model.domain; import java.util.ArrayList; import model.strategyQuestion.YesNo; public class YesNoQuestion extends Question { public YesNoQuestion(String question, String correctStatement, ArrayList<String> statements, String category, String feedback) { super(question, correctStatement, statements, category, feedback); questionType = new YesNo(); } } <file_sep>//@author <NAME> package handler; import controller.Controller; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.layout.BorderPane; import javafx.scene.layout.GridPane; import javafx.stage.Stage; import javafx.stage.Window; import view.panels.CategoryDetailPane; import view.panels.CategoryOverviewPane; public class CreateCategoryHandler implements EventHandler<ActionEvent> { Stage stage; Controller controller = Controller.getInstance(); CategoryOverviewPane categoryOverviewPane = CategoryOverviewPane.getInstance(); CategoryDetailPane categoryDetailPane; public CreateCategoryHandler() { } @Override public void handle(ActionEvent event) { try { this.categoryDetailPane = new CategoryDetailPane(); this.stage = new Stage(); BorderPane pane = new BorderPane(); pane.setCenter(categoryDetailPane); Scene scene = new Scene(pane); scene.getStylesheets().add(getClass().getResource("DetailPane.css").toExternalForm()); stage.setScene(scene); stage.show(); categoryDetailPane.setSaveAction( new SaveCategoryHandler(categoryDetailPane, stage)); categoryDetailPane.setCancelAction(new CancelCategoryHandler(stage, categoryDetailPane)); } catch (Exception e) { Scene scene = new Scene(new GridPane()); showAlert(Alert.AlertType.ERROR, scene.getWindow(), "Form Error!", e.getMessage()); } } private void showAlert(Alert.AlertType alertType, Window owner, String title, String message) { Alert alert = new Alert(alertType); alert.setTitle(title); alert.setHeaderText(null); alert.setContentText(message); alert.initOwner(owner); alert.show(); } }<file_sep>//@author <NAME> package model.domain; public class Category { private String title; private String description; private boolean mainCategory; public Category(String title, String description,boolean mainCategorie){ setTitle(title); setDescription(description); this.mainCategory =mainCategorie; } public Category(String title, String description){ setTitle(title); setDescription(description); this.mainCategory =true; } public String getTitle() { return title; } public String getDescription() { return description; } public Boolean getMainCategory(){ return this.mainCategory; } public void setTitle(String title) { if(title == null || title.isEmpty()){ throw new DomainException("Name can not be empty!"); } this.title = title; } public void setDescription(String description) { if(description == null || description.isEmpty()){ throw new DomainException("Description can not be empty!"); } this.description = description; } } <file_sep>//@author <NAME> package model.facade; import java.util.ArrayList; import java.util.Observable; import model.domain.Evaluation; import model.db.Categories; import model.db.Questions; import model.domain.Category; import model.domain.Question; import model.domain.Score; public class Service extends Observable{ private Categories categories; private Questions questions; private Score score; private Evaluation evaluation; public Service(){ this.categories = new Categories(); this.questions = new Questions(); this.evaluation = new Evaluation(); } public void addCategory(Category categorie){ this.categories.addCategorie(categorie); this.setChanged(); this.notifyObservers(this); } public void editCategory(String oldName,String oldDescription, Category categorie){ this.categories.EditCategorie(oldName,oldDescription,categorie); this.setChanged(); this.notifyObservers(this); } public void deleteCategorie(Category categorie){ this.categories.deleteCategorie(categorie); this.setChanged(); this.notifyObservers(this); } public ArrayList<Category> getCategories(){ return categories.getCategories(); } public void addQuestion(Question question){ this.questions.addQuestion(question); this.setChanged(); this.notifyObservers(this); } public void editQuestion(String oldQuestion, Question ques) { this.questions.EditQuestion(oldQuestion,ques); this.setChanged(); this.notifyObservers(this); } public void deleteQuestion(Question question){ this.questions.deleteQuestion(question); this.setChanged(); this.notifyObservers(this); } public ArrayList<Question> getQuestions(){ return questions.getQuestions(); } public void newTest() { score = new Score(this.getCategories(),this.getQuestions()); } public Score getScore(){ this.setChanged(); this.notifyObservers(this); return score; } public Evaluation getEvaluation(){ return evaluation; } } <file_sep>//@author <NAME> package model.strategyQuestion; public class YesNo implements QuestionType{ @Override public String getQuestionType() { return "yes/no question"; } } <file_sep>//@author <NAME> package view.panels; import java.io.IOException; import controller.Controller; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.geometry.HPos; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.control.Button; import javafx.scene.layout.Border; import javafx.scene.layout.BorderStroke; import javafx.scene.layout.BorderStrokeStyle; import javafx.scene.layout.BorderWidths; import javafx.scene.layout.CornerRadii; import javafx.scene.layout.FlowPane; import javafx.scene.layout.GridPane; import javafx.scene.paint.Color; import javafx.scene.text.Text; public class MessagePane extends GridPane { private Button testButton; private Text l; private Controller controller = Controller.getInstance(); private volatile static MessagePane uniqueInstance; public MessagePane() throws IOException { setBorder(new Border( new BorderStroke(Color.BLACK, BorderStrokeStyle.SOLID, CornerRadii.EMPTY, BorderWidths.DEFAULT))); this.setPadding(new Insets(5, 5, 5, 5)); this.setVgap(5); this.setHgap(5); this.setId("messagePane"); testButton = new Button("Evaluate"); testButton.setId("new"); add(testButton, 0, 1, 1, 1); l = new Text(); this.showEvaluation(); add(l, 0, 0, 1, 1); setHalignment(testButton, HPos.CENTER); } public static MessagePane getInstance() throws IOException { if (uniqueInstance == null) { synchronized (MessagePane.class) { if (uniqueInstance == null) { uniqueInstance = new MessagePane(); } } } return uniqueInstance; } public void setStartAction(EventHandler<ActionEvent> startAction) { testButton.setOnAction(startAction); } public void showEvaluation() throws IOException { if (controller.getEvaluation().getPropValue("test").equals("false")) { l.setText("You never did this Test!"); } if (controller.getEvaluation().getPropValue("test").equals("true")) { l.setText("You already did this Test!"); } if (controller.getQuestions().size() > 0 && controller.getQuestionNumber() == controller.getQuestions().size()) { if (controller.getEvaluation().getPropValue("feedbackType").equals("score")) { l.setText(controller.getScoreFeedback().toString()); controller.getEvaluation().setProperty("feedbackType", "score"); } if (controller.getEvaluation().getPropValue("feedbackType").equals("feedback")) { l.setText(controller.getScoreFeedback().toStringFeedback()); controller.getEvaluation().setProperty("feedbackType", "feedback"); } } } } <file_sep>//@author <NAME> package model.strategyQuestion; public interface QuestionType { public String getQuestionType(); } <file_sep>//@author <NAME> package handler; import java.util.ArrayList; import controller.Controller; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.layout.GridPane; import javafx.stage.Stage; import javafx.stage.Window; import view.panels.QuestionDetailPane; import view.panels.QuestionOverviewPane; public class SaveQuestionHandler implements EventHandler<ActionEvent> { Stage stage; Controller controller = Controller.getInstance(); QuestionOverviewPane questionOverviewPane = QuestionOverviewPane.getInstance(); QuestionDetailPane questionDetailPane; public SaveQuestionHandler(QuestionDetailPane detailPane,Stage stage) { this.questionDetailPane = detailPane; this.stage = stage; } @Override public void handle(ActionEvent event) { try { String question = questionDetailPane.getQuestion(); String answer = questionDetailPane.getStatement(); ArrayList<String> statements = questionDetailPane.getStatements(); String categorie = (String) questionDetailPane.getCategory(); String feedback = questionDetailPane.getFeedback(); if(statements.size()<2){ throw new IllegalArgumentException("There has to be atleast 2 statements!"); } controller.addQuestion(question,answer, statements, categorie, feedback); stage.close(); //questionOverviewPane.refreshTable(); } catch (Exception e) { Scene scene = new Scene(new GridPane()); showAlert(Alert.AlertType.ERROR, scene.getWindow(), "Form Error!", e.getMessage()); } } private void showAlert(Alert.AlertType alertType, Window owner, String title, String message) { Alert alert = new Alert(alertType); alert.setTitle(title); alert.setHeaderText(null); alert.setContentText(message); alert.initOwner(owner); alert.show(); } }<file_sep>//@author <NAME> package handler; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.stage.Stage; import view.panels.QuestionDetailPane; public class CancelQuestionHandler implements EventHandler<ActionEvent> { Stage stage; QuestionDetailPane questionDetailPane; public CancelQuestionHandler(Stage stage,QuestionDetailPane questionDetailPane) { this.stage = stage; this.questionDetailPane = questionDetailPane; } @Override public void handle(ActionEvent event) { stage.close(); } } <file_sep>//@author <NAME> package model.db; import java.util.ArrayList; import model.domain.Question; public class Questions { private ArrayList<Question> questions; public Questions (){ questions = new ArrayList<>(); } public void addQuestion(Question question){ if(question == null){ throw new DbException("Question can not be null!"); } else{ for(Question ques: this.questions){ if(ques.getQuestion().equals(question.getQuestion())){ throw new DbException("This Question already exist!"); } } } this.questions.add(question); } public void EditQuestion(String oldQuestion, Question ques) { if (ques == null) { throw new DbException("The categorie can not be null!"); } else { for (int i=0;i < this.questions.size(); i++) { if (questions.get(i).getQuestion().equals(oldQuestion)) { questions.remove(i); questions.add(i, ques); } } } } public void deleteQuestion(Question question){ this.questions.remove(question.getQuestion()); } public Question getQuestion(String question){ Question result = null; for(Question ques:this.questions){ if(ques.getQuestion() == question){ result = ques; } } if(result == null){ throw new DbException("Question was not found!"); }else{ return result; } } public int getSizeQuestion(){ return this.questions.size(); } public ArrayList<Question> getQuestions(){ return questions; } }
d4d28e5238bb0ce32a2a439e63e1ad8d4e86cc88
[ "Java" ]
10
Java
ucll-ooo-2017/Serhat_Erdogan_Augustus
4192ac5127dd664abfe03480b5cc20c09a46e877
b60a9035d0194ddf993e4b603a5be33915d84420
refs/heads/main
<repo_name>pbmstrk/zeroshot<file_sep>/setup.py NAME = "zeroshot" AUTHOR = "<NAME>" VERSION = "0.1.0" import os from setuptools import setup, find_packages PATH_ROOT = os.path.dirname(__file__) def load_requirements(path_dir=PATH_ROOT, file_name="requirements.txt"): with open(os.path.join(path_dir, file_name), "r") as file: reqs = [ln.strip() for ln in file.readlines()] return reqs setup( name=NAME, author=AUTHOR, version=VERSION, packages=find_packages(), install_requires=load_requirements(), ) <file_sep>/README.md <div align="center"> <h1> Zero-Shot Classification </h1> </div> ## Installation ```python git clone https://github.com/pbmstrk/zeroshot.git cd zeroshot pip install . ``` ## Building a Pipeline ```python from transfomers import AutoModel, AutoTokenizer from zeroshot import ZeroShotPipeline tokenizer = AutoTokenizer.from_pretrained("deepset/sentence_bert") model = AutoModel.from_pretrained("deepset/sentence_bert") pipeline = ZeroShotPipeline(tokenizer, model) pipeline.add_labels(labels) ``` Can also optionally add a projection matrix, ```python pipeline.add_projection_matrix(projection_matrix) ``` ## Example ```python import torch from transformers import AutoTokenizer, AutoModel from zeroshot import ZeroShotPipeline tokenizer = AutoTokenizer.from_pretrained("deepset/sentence_bert") model = AutoModel.from_pretrained("deepset/sentence_bert") phrase = "Who are you voting for in 2020?" labels = ['Politics', "Sports", "Fashion"] pipeline = ZeroShotPipeline(tokenizer, model) pipeline.add_labels(labels) predictions = pipeline(phrase) print(f"The phrase is about: {labels[torch.argmax(predictions)]}") # This phrase is about: Politics ```<file_sep>/zeroshot/tester/tester.py import torch from tqdm import tqdm class Tester: def __init__(self, target_encoding): self.pipeline = None self.metrics = None self.target_encoding = target_encoding def test_batch(self, batch): inputs, targets = batch targets = torch.tensor(list(map(self.target_encoding.get, targets))) outputs = self.pipeline(list(inputs)) _, pred = torch.max(outputs.data, 1) batch_correct = (pred.detach().cpu() == targets).sum() self.metrics["acc"] = self.metrics.get("acc", 0) + batch_correct def test(self, pipeline, dataloader): self.pipeline = pipeline self.metrics = {} self.pipeline.eval() for batch in tqdm(dataloader): self.test_batch(batch) return self.metrics <file_sep>/docs/source/projection.rst Projection Matrix ================= get_projection_matrix --------------------- .. autofunction:: zeroshot.model.get_projection_matrix<file_sep>/zeroshot/pipeline/zeroshot_pipeline.py import torch import torch.nn as nn from typing import Union, List, Dict, Optional from ..utils import move_args_to_device from ..model import ZeroShotTopicClassifier class ZeroShotPipeline: r""" Pipeline for zero-shot classification. Uses semantic similarity to predict the most likely label for an input. Args: tokenizer: Tokenizer to convert strings into inputs ready for model model: Classifier to use for prediction. The pipeline is designed to accept tokenizers from Huggingface. In theory any tokenizer is supported, with the requirements that the tokenizer returns a dict which can be used as input to the model. Example:: >>> tokenizer = AutoTokenizer.from_pretrained('deepset/sentence_bert') >>> model = AutoModel.from_pretrained('deepset/sentence_bert') >>> pipeline = ZeroShotPipeline(tokenizer, model) """ def __init__(self, tokenizer, model): super().__init__() self.tokenizer = tokenizer self.model = ZeroShotTopicClassifier(model) self.device = torch.device("cpu") def __call__(self, inputs: Union[str, List[str]], tokenizer_options: Optional[Dict] = None): """ Predict labels for inputs. Labels must be added to the model before making predictions. Args: inputs: sequence(s) to classify tokenizer_options: optional parameters that can be passed to the tokenizer. If None the tokenizer defaults to truncation and padding with a max length of 512. """ tokenizer_options = self._add_tokenizer_defaults(tokenizer_options) encoded_inputs = self.tokenizer(inputs, **tokenizer_options) return self.forward(**encoded_inputs) @move_args_to_device def forward(self, **kwargs): return self.model(**kwargs) def add_labels(self, labels: Union[str, List[str]], tokenizer_options: Optional[Dict] = None): """ Add labels to pipeline. This ensures that predictions for sequences can be made. Args: labels: labels on which to make predictions tokenizer_options: parameters to pass to the tokenizer to influence behaviour. """ tokenizer_options = self._add_tokenizer_defaults(tokenizer_options) encoded_labels = self.tokenizer(labels, **tokenizer_options) self.model.create_label_index(**encoded_labels) def add_projection_matrix(self, proj_mat: torch.Tensor): """ Add projection matrix to pipeline. The projection matrix is applied to both input encoding and label encoding before computing the similarity. Args: proj_mat: projection matrix to use during prediction. """ self.model.projection_matrix = proj_mat @staticmethod def _add_tokenizer_defaults(options): if options is None: options = {} options.setdefault("return_tensors", "pt") options.setdefault("padding", "longest") options.setdefault("truncation", True) options.setdefault("max_length", 512) return options def to(self, device): self.model.to(device) self.device = device<file_sep>/zeroshot/utils/download_utils.py import os import pathlib import shutil from pathlib import Path import requests from tqdm import tqdm def _process_resp(resp, filepath): chunk_size = 16 * 1024 total_size = int(resp.headers.get("Content-length", 0)) with open(filepath, "wb") as outfile: with tqdm(total=total_size, unit="B", unit_scale=True) as pbar: for data in resp.iter_content(chunk_size): if data: outfile.write(data) pbar.update(len(data)) def download_file(url, name, root=".data", filename=None, override=False): # File will be stored in root/name/filename # root and name are passed as parameters # filename is inferred from url if not isinstance(name, pathlib.PosixPath): name = Path(name) if not isinstance(root, pathlib.PosixPath): root = Path(root) # create directory if it doesn't exist if not os.path.exists(root / name): os.makedirs(root / name) if not filename: _, filename = os.path.split(url) filename = Path(filename) filepath = root / name / filename if not os.path.exists(filepath) or override: print(f"Downloading {filename} from {url}.\nSaving to:{filepath}") if "drive.google.com" not in url: resp = requests.get(url, stream=True) _process_resp(resp, filepath) else: # from https://github.com/pytorch/text/blob/master/torchtext/utils.py#L121-L129 confirm_token = None session = requests.Session() resp = session.get(url, stream=True) for k, v in resp.cookies.items(): if k.startswith("download_warning"): confirm_token = v if confirm_token: url = url + "&confirm=" + confirm_token resp = session.get(url, stream=True) _process_resp(resp, filepath) else: print(f"Found file at {os.path.join(root, name)} skipping download.") return return filepath def extract_from_file(zfile, drc): shutil.unpack_archive(str(zfile), drc) def download_extract( url, name, root=".data", override=False, filename=None, extract_only=False ): extract_dir = os.path.join(root, name) if extract_only and filename: extract_from_file(os.path.join(root, name, filename), os.path.join(root, name)) else: zfile = download_file( url, name, root=root, override=override, filename=filename ) if zfile: extract_from_file(zfile, extract_dir) return zfile <file_sep>/docs/source/pipeline.rst Pipeline ======== ZeroShotPipeline ---------------- .. autoclass:: zeroshot.pipeline.ZeroShotPipeline :special-members: __call__ :members:<file_sep>/requirements.txt torch gensim transformers jsonlines <file_sep>/zeroshot/data/__init__.py from .dataset import ZeroShotTopicClassificationDataset from .datamodule import DataModule <file_sep>/zeroshot/__init__.py from .pipeline import ZeroShotPipeline<file_sep>/docs/source/data.rst Data ==== ZeroShotTopicClassificationDataset ---------------------------------- .. autofunction:: zeroshot.data.ZeroShotTopicClassificationDataset<file_sep>/zeroshot/tester/__init__.py from .tester import Tester <file_sep>/zeroshot/vectors/__init__.py from .vec import GloVe, Word2Vec <file_sep>/docs/source/index.rst Zero-Shot Classification ======================== Zero-shot text classification by measuring similarities in an embedding space. The method used is described in this `blogpost <https://joeddav.github.io/blog/2020/05/29/ZSL.html>`_ (along with other approaches to zero-shot learning). Experiments are run on the dataset presented in this `paper <https://www.aclweb.org/anthology/D19-1404/>`__ and models used are presented in this `paper <https://www.aclweb.org/anthology/D19-1410/>`__. In short, BERT is used to encode sentence and this representation is compared to the encoding og the labels, see *Fig. 1*. The label with the highest similarity to the sentence encoding is selected. .. figure:: _images/sim-1.png :height: 400 :align: center *Fig. 1*: Overview of architecture Results ------- The results obtained on the dataset presented `here <https://www.aclweb.org/anthology/D19-1404/>`__ are given in the table below. .. csv-table:: Results **model**, **projection matrix**, **k**, **lambda**, **score** deepset/sentence_bert, \-, \-, \-, 37.743 deepset/sentence_bert, Word2Vec, 5000, 5, 43.398 deepset/sentence_bert, GloVe, 20000, 10, **47.740** .. toctree:: :hidden: :maxdepth: 2 self .. toctree:: :hidden: :maxdepth: 2 :caption: Package Documentation: starting data vec pipeline projection <file_sep>/zeroshot/data/datamodule.py from torch.utils.data import DataLoader class DataModule: def __init__(self, train=None, val=None, test=None, collate_fn=None, batch_size=16): self.train = train self.val = val self.test = test self.collate_fn = collate_fn self.batch_size = batch_size def train_dataloader(self): if not self.train: raise ValueError return DataLoader( self.train, batch_size=self.batch_size, shuffle=True, collate_fn=self.collate_fn, ) def val_dataloader(self): if not self.val: raise ValueError return DataLoader( self.val, batch_size=self.batch_size, collate_fn=self.collate_fn ) def test_dataloader(self): if not self.test: raise ValueError return DataLoader( self.test, batch_size=self.batch_size, collate_fn=self.collate_fn ) <file_sep>/zeroshot/model/projection_matrix.py import torch from tqdm import tqdm from transformers import AutoModel, AutoTokenizer def get_projection_matrix(model_name, vectors, k=1000, lmbda=0): r""" Function for calculating a projection matrix from sentence representations to word vectors. Args: model_name: Model used to obtain sentence representations vectors: Object containing the word vectors. k: Number of words to use when calculating least squares projection matrix. lmbda: Regularisation parameter. """ words = vectors.index2entity[:k] wordvec_matrix = get_wordvec_matrix(words, vectors, k) encoder_matrix = get_encoder_matrix(words, model_name) return regularized_lstsq(encoder_matrix, wordvec_matrix, lmbda)[:encoder_matrix.shape[1]] def get_wordvec_matrix(words, vectors, k): dim = vectors[words[0]].shape[0] wordvec_matrix = torch.zeros((k, dim)) for i, word in tqdm(enumerate(words)): wordvec_matrix[i] = torch.tensor(vectors[word]) return wordvec_matrix def get_encoder_matrix(words, model_name): tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModel.from_pretrained(model_name) batch_size = 32 encoder_matrix = torch.Tensor() with torch.no_grad(): for i in tqdm(range(0, len(words), batch_size)): batch = words[i : i + batch_size] inputs = tokenizer(batch, return_tensors="pt", padding="longest") outputs = model(**inputs)[0].mean(1) encoder_matrix = torch.cat((encoder_matrix, outputs)) return encoder_matrix def regularized_lstsq(A, B, lmbda): n_col = A.shape[1] return torch.lstsq(A.T @ B, A.T @ A + lmbda * torch.eye(n_col)).solution <file_sep>/zeroshot/data/dataset.py from collections import OrderedDict from pathlib import Path from ..utils import download_extract, parse_from_txt_file from .base import TextDataset def ZeroShotTopicClassificationDataset(return_subset="v0"): r""" Zero-Shot Topic Classification Dataset. A version of the Yahoo Answers dataset. Returns train, dev, test set as well as target labels. Reference: `Yin et al. (2019) <https://www.aclweb.org/anthology/D19-1404/>`_ Args: return_subset: Which subset of the training set to return. One of 'v0' or 'v1'. Example:: >>> train, test, dev, category_dict = ZeroShotTopicClassificationDataset() """ URL = "https://drive.google.com/u/0/uc?id=1qGmyEVD19ruvLLz9J0QGV7rsZPFEz2Az&export=download" root = Path(".data") name = Path("zeroshot_data") filename = Path("topic.tar.gz") foldername = Path("BenchmarkingZeroShot") download_extract(URL, root=root, name=name, filename=filename) classes_file = Path("topic/classes.txt") category_dict = OrderedDict() with open(root / name / foldername / classes_file) as data: for i, line in enumerate(data): category_dict[line.strip()] = i label_map = {str(value): key for key, value in category_dict.items()} if return_subset == "v0": train_file = Path("topic/train_pu_half_v0.txt") elif return_subset == "v1": train_file = Path("topic/train_pu_half_v1.txt") dev_file = Path("topic/dev.txt") test_file = Path("topic/test.txt") train_data = parse_from_txt_file(root / name / foldername / train_file, label_map) dev_data = parse_from_txt_file(root / name / foldername / dev_file, label_map) test_data = parse_from_txt_file(root / name / foldername / test_file, label_map) return ( TextDataset(train_data), TextDataset(dev_data), TextDataset(test_data), category_dict, ) <file_sep>/zeroshot/utils/parser.py def parse_from_txt_file(f, label_map=None, sep="\t"): data = [] with open(f) as file_obj: for line in file_obj: inter = line.split(sep) if label_map: label = label_map[inter[0]] else: label = inter[0] text = inter[1].strip() data.append([text, label]) return data <file_sep>/zeroshot/utils/__init__.py from .download_utils import download_extract from .gpu import move_args_to_device from .parser import parse_from_txt_file <file_sep>/zeroshot/model/__init__.py from .projection_matrix import get_projection_matrix from .model import ZeroShotTopicClassifier <file_sep>/docs/source/vec.rst Vectors ======= GloVe ----- .. autofunction:: zeroshot.vectors.GloVe Word2Vec -------- .. autofunction:: zeroshot.vectors.Word2Vec<file_sep>/docs/source/starting.rst Getting Started =============== Installation ------------ .. code-block:: bash git clone https://github.com/pbmstrk/zeroshot.git cd zeroshot pip install . Building a Pipeline --------------------- .. code-block:: python from transfomers import AutoModel, AutoTokenizer from zeroshot import ZeroShotPipeline tokenizer = AutoTokenizer.from_pretrained("deepset/sentence_bert") model = AutoModel.from_pretrained("deepset/sentence_bert") pipeline = ZeroShotPipeline(tokenizer, model) pipeline.add_labels(labels) To improve performance, it is possible to include a projection matrix. For more info, see :ref:`here<projection:get_projection_matrix>`. .. code-block:: python pipeline.add_projection_matrix(projection_matrix) Example ------- .. code-block:: python import torch from transformers import AutoTokenizer, AutoModel from zeroshot import ZeroShotPipeline tokenizer = AutoTokenizer.from_pretrained("deepset/sentence_bert") model = AutoModel.from_pretrained("deepset/sentence_bert") phrase = "Who are you voting for in 2020?" labels = ['Politics', "Sports", "Fashion"] pipeline = ZeroShotPipeline(tokenizer, model) pipeline.add_labels(labels) predictions = pipeline(phrase) print(f"The phrase is about: {labels[torch.argmax(predictions)]}") # This phrase is about: Politics<file_sep>/zeroshot/vectors/vec.py import os from collections import OrderedDict import gensim.downloader import numpy as np from tqdm import tqdm from ..utils import download_extract def extract_vectors(filepath): embedding_map = OrderedDict() with open(filepath) as embed_file: for line in tqdm(embed_file): values = line.split() word = values[0] try: coefs = np.asarray(values[1:], dtype="float32") embedding_map[word] = coefs except ValueError: continue return embedding_map def GloVe(name, dim, root=".data"): r""" Load pre-trained GloVe word embeddings. Returns an instance of the Vectors class. Reference: `GloVe: Global Vectors for Word Representation <https://nlp.stanford.edu/projects/glove/>`_ Args: name: Name of vectors to retrieve - one of 6B, 42B, 840B and twitter.27B dim: Dimension of word vectors. root: Name of the root directory in which to cache vectors. Returns: Instance of vectors class. Example:: >>> glove_vectors = GloVe(name="6B", dim=300) """ URLs = { "42B": "https://nlp.stanford.edu/data/glove.42B.300d.zip", "840B": "https://nlp.stanford.edu/data/glove.840B.300d.zip", "twitter.27B": "https://nlp.stanford.edu/data/glove.twitter.27B.zip", "6B": "https://nlp.stanford.edu/data/glove.6B.zip", } download_extract(URLs[name], name=name, root=root) filename = f"glove.{name}.{dim}d.txt" filepath = os.path.join(root, name, filename) vector_map = extract_vectors(filepath) return Vectors(vector_map) def Word2Vec(): r""" Load pre-trained Word2Vec embeddings using Gensim. Reference: `Word2Vec <https://code.google.com/archive/p/word2vec/>`_ Example:: >>> word2vec = Word2Vec() """ return gensim.downloader.load("word2vec-google-news-300") class Vectors: """ Mimic gensim API - creates a common interface """ def __init__(self, vector_map): self.vector_map = vector_map @property def index2entity(self): return list(self.vector_map.keys()) def __getitem__(self, idx): return self.vector_map[idx] <file_sep>/zeroshot/pipeline/__init__.py from .zeroshot_pipeline import ZeroShotPipeline<file_sep>/zeroshot/model/model.py import torch import torch.nn as nn from transformers import BertModel class TextEncoder(nn.Module): def __init__(self, model): super().__init__() self.model = model def forward(self, input_ids, attention_mask, **kwargs): mask = attention_mask outputs = self.model(input_ids, attention_mask, **kwargs)[0] mean_pool = (outputs * mask.unsqueeze(-1)).sum(1) / (mask.unsqueeze(-1).sum(1)) return mean_pool class Scorer(nn.Module): def __init__(self): super().__init__() self.score = nn.CosineSimilarity() def forward(self, text_encodings, label_encodings): return torch.stack( [ self.score(tensor.unsqueeze(0), label_encodings) for tensor in text_encodings ] ) class ZeroShotTopicClassifier(nn.Module): def __init__(self, model): super().__init__() self.model = TextEncoder(model) self.scorer = Scorer() self._use_projection_matrix = False self.register_buffer("label_encodings", torch.Tensor()) self.register_buffer("proj_mat", torch.Tensor()) def forward(self, input_ids, **kwargs): text_encodings = self.model(input_ids, **kwargs) assert self.label_encodings.nelement() label_encodings = self.label_encodings if self._use_projection_matrix: text_encodings = text_encodings @ self.proj_mat label_encodings = label_encodings @ self.proj_mat scores = self.scorer(text_encodings, label_encodings) return scores @property def projection_matrix(self): if self._use_projection_matrix: return self.proj_mat return None @projection_matrix.setter def projection_matrix(self, value): if not isinstance(value, torch.Tensor): if not value is None: raise ValueError("Value must be a tensor or None") if value is None: self._use_projection_matrix = False else: self.register_buffer("proj_mat", value) self._use_projection_matrix = True def create_label_index(self, input_ids, **kwargs): self.register_buffer("label_encodings", self.model(input_ids, **kwargs))
52353f48afbd8137c90ea86ede2a0b0092df5d76
[ "Markdown", "Python", "Text", "reStructuredText" ]
25
Python
pbmstrk/zeroshot
ba8f56edbc21d2c78119be684efcde2030f4adfc
82d45b532edb6f1831c064fb9dc8c283972bc1fe