branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<file_sep>import React, { Component } from 'react';
import memoize from 'memoize-one';
export default class Footer extends Component {
static defaultProps = {
currentMediaObj: {}
};
computeAlbumArtStyle = memoize(currentMediaObj => ({
background: `url(${currentMediaObj.imageUrl})`
}));
handlePlayPause = () => {
this.props.onPlayPause(this.props.currentlyPlaying);
};
render() {
const { currentMediaObj, paused } = this.props;
const albumArtStyle = this.computeAlbumArtStyle(currentMediaObj);
return (
<div id="Footer">
<div className="trackInfo">
<div className="albumArt" style={albumArtStyle} />
<div className="trackTitle">{currentMediaObj.title}</div>
</div>
<div className="controls">
<div className="previous" onClick={this.props.onPlayPrevious}>
|◄
</div>
<div className="playPause" onClick={this.handlePlayPause}>
{paused ? '►' : '||'}
</div>
<div className="next" onClick={this.props.onPlayNext}>
►|
</div>
</div>
</div>
);
}
}
<file_sep>import React, { Component } from 'react';
import Header from './Header';
import Media from './Media';
import Footer from './Footer';
import './App.scss';
const apiUrl =
'https://s3-us-west-2.amazonaws.com/anchor-website/challenges/bsb.json';
export default class App extends Component {
constructor(props) {
super(props);
this.state = { mediaList: [], currentlyPlaying: 0, paused: true };
}
componentDidMount() {
fetch(apiUrl)
.then(res => res.json())
.then(res => {
this.setState({ mediaList: res.tracks });
});
}
handlePlayPause = index => {
this.setState(({ currentlyPlaying, paused }) => {
if (index !== undefined && currentlyPlaying !== index)
return {
currentlyPlaying: index,
paused: false
};
else
return {
paused: !paused
};
});
};
handlePlayPrevious = () => {
const { currentlyPlaying, mediaList } = this.state;
this.handlePlayPause(
currentlyPlaying === 0 ? mediaList.length - 1 : currentlyPlaying - 1
);
};
handlePlayNext = () => {
const { currentlyPlaying, mediaList } = this.state;
this.handlePlayPause(
currentlyPlaying === mediaList.length - 1 ? 0 : currentlyPlaying + 1
);
};
handleEnded = () => {
this.setState(({ currentlyPlaying }) => ({
currentlyPlaying: currentlyPlaying + 1
}));
};
render() {
const { mediaList, currentlyPlaying, paused } = this.state;
return (
<div id="App">
<Header />
<div className="mediaOuter">
{mediaList.map((mediaObj, i) => (
<Media
key={mediaObj.mediaUrl}
isCurrentIndex={currentlyPlaying === i}
index={i}
mediaObj={mediaObj}
onEnded={this.handleEnded}
onPlayPause={this.handlePlayPause}
paused={paused}
/>
))}
</div>
<Footer
currentlyPlaying={currentlyPlaying}
currentMediaObj={mediaList[currentlyPlaying]}
onPlayNext={this.handlePlayNext}
onPlayPause={this.handlePlayPause}
onPlayPrevious={this.handlePlayPrevious}
paused={paused}
/>
</div>
);
}
}
<file_sep>import React, { Component } from 'react';
import memoize from 'memoize-one';
export default class Audio extends Component {
computeOuterDivStyle = memoize(mediaObj => ({
background: `url(${mediaObj.imageUrl})`
}));
pauseOrPlayIfPropsChange = memoize((isCurrentIndex, paused) => {
if (this.audio)
isCurrentIndex && this.audio.paused && !paused
? this.audio.play()
: this.audio.pause();
});
render() {
const { isCurrentIndex, mediaObj, paused } = this.props;
this.pauseOrPlayIfPropsChange(isCurrentIndex, paused);
const outerDivStyle = this.computeOuterDivStyle(mediaObj);
return (
<div
className="audio"
onClick={this.props.onPlayPause}
style={outerDivStyle}
>
<div className="playPause">
{isCurrentIndex && !paused ? '||' : '►'}
</div>
<audio
src={mediaObj.mediaUrl}
ref={ele => {
this.audio = ele;
}}
onEnded={this.props.onEnded}
/>
</div>
);
}
}
|
bff9a25273043fb33f41a982cfc852dd10c3d98c
|
[
"JavaScript"
] | 3 |
JavaScript
|
jpfiorilla/anchor
|
8faf6674e9055a6b4ea3a2c96cedc467df38990e
|
d9ff0808d4e37a0f712de0cd772c52f729bd9096
|
refs/heads/master
|
<repo_name>labcomu/smart-traffic-prototype<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/registration/Sensor.java
package com.smarttrafficprototype.trafficmanager.service.registration;
public interface Sensor {
void registerSensingUnit(SensingUnitObserver sensingObserver);
void notifySensingUnit();
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/registration/IRSensor.java
package com.smarttrafficprototype.trafficmanager.service.registration;
import java.util.Random;
public class IRSensor implements Sensor {
private SensingUnitObserver sensingObserver;
public IRSensor() {
registerSensingUnit(sensingObserver);
}
@Override
public void registerSensingUnit(SensingUnitObserver sensingObserver) {
this.sensingObserver = sensingObserver;
}
@Override
public void notifySensingUnit() {
Integer countCars = new Random().nextInt(2);
sensingObserver.increaseDensity(countCars);
}
}
<file_sep>/traffic-controller/settings.gradle
pluginManagement {
repositories {
gradlePluginPortal()
}
}
// include ':circuitbreaker'
// project(':circuitbreaker').projectDir = new File(settingsDir, '../circuitbreaker')
rootProject.name = 'traffic-controller'
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/TrafficJunctionService.java
package com.smarttrafficprototype.trafficmanager.service;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.smarttrafficprototype.trafficmanager.service.registration.OutboundTrafficLine;
import com.smarttrafficprototype.trafficmanager.service.registration.TrafficJunction;
@Service
public class TrafficJunctionService {
@Autowired
private TrafficJunction trafficJunction;
public Logger logger = LoggerFactory.getLogger(getClass());
public Integer getOutboundDensityByTrafficJuncion(String junctionKey) {
Optional<OutboundTrafficLine> trafficLine = Optional.ofNullable(null);
for (OutboundTrafficLine outboundTrafficLine : trafficJunction.getOutboundLines()) {
if (outboundTrafficLine.getOutboundTrafficJunction() != null
&& outboundTrafficLine.getOutboundTrafficJunction().getJunctionKey().equals(junctionKey)) {
trafficLine = Optional.of(outboundTrafficLine);
break;
}
}
OutboundTrafficLine outbound = trafficLine.orElseThrow(() -> new RuntimeException("There is no Traffic Line for the informed Key"));
Integer density = outbound.getSensingUnit().getResultDensity();
return density;
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/SensingUnitImpl.java
package com.smarttrafficprototype.trafficmanager.service;
import java.util.ArrayList;
import java.util.List;
import com.smarttrafficprototype.trafficmanager.service.registration.SensingUnitObserver;
import com.smarttrafficprototype.trafficmanager.service.registration.Sensor;
public class SensingUnitImpl implements SensingUnit, SensingUnitObserver {
private Integer density;
private List<Sensor> sensorArray = new ArrayList<>();
public SensingUnitImpl(List<Sensor> sensors) {
clearDensity();
this.sensorArray = sensors;
}
@Override
public Integer getResultDensity() {
return this.density;
}
@Override
public void increaseDensity(Integer countCars) {
this.density += countCars;
}
@Override
public void clearDensity() {
this.density = 0;
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/ExecutionStatus.java
package com.smarttrafficprototype.trafficmanager;
import java.util.Date;
public class ExecutionStatus {
private int id;
private boolean executionFailed;
private Date start;
private Date startAdjacentDensityCalculation;
private Date startLocalDensityCalculation;
private Date startTakeDecision;
private Date startColaboration;
private Classification classification;
private int timeInSeconds;
public ExecutionStatus(int id) {
this.executionFailed = false;
this.classification = Classification.COMPLETE;
this.timeInSeconds = 0;
this.start = new Date();
this.
setId(id);
}
public boolean isExecutionFailed() {
return executionFailed;
}
public void setExecutionFailed(boolean executionFailed) {
this.executionFailed = executionFailed;
}
public long getStarting() {
return getStart().getTime();
}
public Classification getClassification() {
return classification;
}
public void setClassification(Classification classification) {
this.classification = classification;
}
public int getTimeInSeconds() {
return timeInSeconds;
}
public void setTimeInSeconds(int timeInSeconds) {
this.timeInSeconds = timeInSeconds;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Date getStart() {
return start;
}
public void setStart(Date start) {
this.start = start;
}
public Date getStartAdjacentDensityCalculation() {
return startAdjacentDensityCalculation;
}
public void setStartAdjacentDensityCalculation(Date startAdjacentDensityCalculation) {
this.startAdjacentDensityCalculation = startAdjacentDensityCalculation;
}
public Date getStartLocalDensityCalculation() {
return startLocalDensityCalculation;
}
public void setStartLocalDensityCalculation(Date startLocalDensityCalculation) {
this.startLocalDensityCalculation = startLocalDensityCalculation;
}
public Date getStartColaboration() {
return startColaboration;
}
public void setStartColaboration(Date startColaboration) {
this.startColaboration = startColaboration;
}
public Date getStartTakeDecision() {
return startTakeDecision;
}
public void setStartTakeDecision(Date startTakeDecision) {
this.startTakeDecision = startTakeDecision;
}
public void markLocalCalculation() {
setStartLocalDensityCalculation(new Date());
}
public void markAdjacentCalculation() {
setStartAdjacentDensityCalculation(new Date());
}
public void markStartColaboration() {
setStartColaboration(new Date());
}
public void markTakeDecision() {
setStartTakeDecision(new Date());
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/registration/TrafficLight.java
package com.smarttrafficprototype.trafficmanager.service.registration;
public class TrafficLight {
private TrafficLightSignal signal;
private InboundTrafficLine inboundTrafficLine;
public TrafficLight(TrafficLightSignal signal) {
this.signal = signal;
}
public TrafficLight() {
this.signal = TrafficLightSignal.RED;
}
public TrafficLightSignal getSignal() {
return signal;
}
public void setSignal(TrafficLightSignal signal) {
this.signal = signal;
}
public InboundTrafficLine getInboundTrafficLine() {
return inboundTrafficLine;
}
public void setInboundTrafficLine(InboundTrafficLine inboundTrafficLine) {
this.inboundTrafficLine = inboundTrafficLine;
}
public void turnGreen() {
this.signal = TrafficLightSignal.GREEN;
}
public void turnRed() {
this.signal = TrafficLightSignal.RED;
}
public void turnOrange() {
this.signal = TrafficLightSignal.ORANGE;
}
public boolean isRed() {
return this.signal.equals(TrafficLightSignal.RED);
}
public boolean isGreen() {
return this.signal.equals(TrafficLightSignal.GREEN);
}
public boolean isOrange() {
return this.signal.equals(TrafficLightSignal.ORANGE);
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/TrafficControllerApplication.java
package com.smarttrafficprototype.trafficmanager;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication(scanBasePackages= {"com.smarttrafficprototype", "com.microthingsexperiment"} )
@EnableScheduling
public class TrafficControllerApplication {
public static void main(String[] args) {
SpringApplication.run(TrafficControllerApplication.class, args);
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/request/RemoteRequestService.java
package com.smarttrafficprototype.trafficmanager.service.request;
public interface RemoteRequestService {
<T> T requestDensity(String host, String deviceId, String[] values);
default void requestSetup() {}
}
<file_sep>/README.md
# smart-traffic-prototype
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/ExecutionCyclesRepository.java
package com.smarttrafficprototype.trafficmanager;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class ExecutionCyclesRepository {
public Logger logger = LoggerFactory.getLogger(getClass());
private List<ExecutionCycle> cycles = new ArrayList<>();
public List<ExecutionCycle> getAll() {
cycles.sort((e1, e2) -> e1.getId()-e2.getId());
return cycles;
}
public void addExecution(long duration, ExecutionStatus execution) {
Date moment = new Date();
logger.info("#ID: " + execution.getId() + "; Moment:" + moment + "; Duration: " + duration + "; Classification: " + execution.getClassification());
cycles.add(new ExecutionCycle(execution.getId(),
execution.getStart(),
new Date(),
execution.getStartAdjacentDensityCalculation(),
execution.getStartLocalDensityCalculation(),
execution.getStartTakeDecision(),
execution.getStartColaboration(), duration, execution.getClassification()));
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/request/DeviceCBRequestService.java
package com.smarttrafficprototype.trafficmanager.service.request;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import com.microthingsexperiment.circuitbreaker.CircuitBreakerManager;
import com.microthingsexperiment.circuitbreaker.ResponseWrapper;
@Component
@Profile("deviceCBRequest")
public class DeviceCBRequestService implements RemoteRequestService {
@Autowired
private CircuitBreakerManager<Integer> cbService;
private Logger logger = LoggerFactory.getLogger(getClass());
@SuppressWarnings("unchecked")
@Override
public Integer requestDensity(String deviceHost, String devicePort, String[] values) {
String deviceId = deviceHost + ":" + devicePort;
Integer result = Integer.MIN_VALUE;
StringBuilder urlBuilder = new StringBuilder("http://")
.append(deviceHost)
.append(":")
.append(devicePort)
.append("/trafficManager");
for (String value : values) {
urlBuilder.append("/").append(value);
}
String baseUrl = urlBuilder.toString();
try {
logger.info("Request Started: " + baseUrl);
ResponseWrapper<Integer> response = cbService.executeGetRequest(baseUrl, deviceId, Integer.class);
result = response.getResponse();
logger.info("Request Returned: " + baseUrl);
} catch (Exception ex) {
logger.info("Failure Requesting: " + baseUrl);
throw ex;
}
return result;
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/ExecutionCycle.java
package com.smarttrafficprototype.trafficmanager;
import java.util.Date;
public class ExecutionCycle {
private Integer id;
private Date start;
private Date end;
private Date startAdjacent;
private Date startLocal;
private Date startDecision;
private Date startCol;
private Classification classification;
private long duration;
public ExecutionCycle(int id, Date start, Date end, Date startAdjacent,
Date startLocal, Date startDecision, Date startCol, long duration,
Classification classification) {
setId(id);
setStart(start);
setEnd(end);
setDuration(duration);
setClassification(classification);
setStartAdjacent(startAdjacent);
setStartLocal(startLocal);
setStartDecision(startDecision);
setStartCol(startCol);
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Date getEnd() {
return end;
}
public void setEnd(Date moment) {
this.end = moment;
}
public Classification getClassification() {
return classification;
}
public void setClassification(Classification classification) {
this.classification = classification;
}
public long getDuration() {
return duration;
}
public void setDuration(long duration) {
this.duration = duration;
}
public Date getStart() {
return start;
}
public void setStart(Date start) {
this.start = start;
}
public Date getStartAdjacent() {
return startAdjacent;
}
public void setStartAdjacent(Date startAdjacent) {
this.startAdjacent = startAdjacent;
}
public Date getStartLocal() {
return startLocal;
}
public void setStartLocal(Date startLocal) {
this.startLocal = startLocal;
}
public Date getStartDecision() {
return startDecision;
}
public void setStartDecision(Date startDecision) {
this.startDecision = startDecision;
}
public Date getStartCol() {
return startCol;
}
public void setStartCol(Date startCol) {
this.startCol = startCol;
}
}
<file_sep>/traffic-controller/src/main/java/com/smarttrafficprototype/trafficmanager/service/request/DeviceRequestService.java
package com.smarttrafficprototype.trafficmanager.service.request;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
@Component
@Profile("deviceRequest")
public class DeviceRequestService implements RemoteRequestService {
private RestTemplate restTemplate;
@Value("${request.timeout}")
private int timeout;
private Logger logger = LoggerFactory.getLogger(getClass());
public DeviceRequestService(RestTemplateBuilder rtBuilder) {
this.restTemplate = rtBuilder.build();
}
@SuppressWarnings("unchecked")
@Override
public Integer requestDensity(String host, String deviceId, String[] values) {
Integer result = Integer.MIN_VALUE;
StringBuilder urlBuilder = new StringBuilder("http://")
.append(host)
.append(":")
.append(deviceId)
.append("/trafficManager")
;
for (String value : values) {
urlBuilder.append("/").append(value);
}
String baseUrl = urlBuilder.toString();
try {
logger.info("Request Started: "+baseUrl);
result = restTemplate.getForObject(baseUrl, Integer.class);
logger.info("Request Returned: "+baseUrl);
} catch (Exception ex) {
logger.info("Failure Requesting: "+baseUrl);
throw ex;
}
return result;
}
}
|
24954cf4374be751fae07b64b03325582f99f4a4
|
[
"Markdown",
"Java",
"Gradle"
] | 14 |
Java
|
labcomu/smart-traffic-prototype
|
7c5806fea402d8cf806a1d7069c5b6a7f5880ff3
|
35f4be838bb5d5878762728a2d725de55ebe7017
|
refs/heads/master
|
<repo_name>priyanshu-kun/sunhack<file_sep>/app/src/Pages/Host/RedirectUser/RedirectUser.jsx
import React, { useEffect } from 'react';
import axios from 'axios';
import "./Redirect.css";
import {setUser} from "../../../auth.slice"
import {useDispatch} from "react-redux"
import {useHistory} from "react-router-dom"
function RedirectUser(props) {
const dispatch = useDispatch()
const history = useHistory()
useEffect(() => {
// After requesting Github access, Github redirects back to your app with a code parameter
const url = window.location.href;
const hasCode = url.includes("?code=");
console.log(url.split("?code="))
// If Github API returns the code parameter
if (hasCode) {
const newUrl = url.split("?code=");
window.history.pushState({}, null, newUrl[0]);
const requestData = {
code: newUrl[1]
};
axios("https://github.com/login/oauth/access_token",{
method: "POST",
params: {
client_id: process.env.REACT_APP_GITHUB_CLIENT_ID,
client_secret: process.env.REACT_APP_GITHUB_CLIENT_SECRET,
code: requestData.code
}
})
.then(async res => {
const token = res.data.split("=")[1].split("&")[0]
const {data} = await axios("https://api.github.com/user",{
method: "GET",
headers: {"Authorization": `token ${token}`}
})
console.log(data)
const {avatar_url,name} = data;
const user = {avatar_url,name,token}
localStorage.setItem("user",JSON.stringify(user))
dispatch(setUser(user))
history.push("/Host")
// console.log(data)
}).catch(e => {
console.log(e.message)
})
}
}, []);
return (
<div className="redirect-div">
<div class="spinner"></div>
<p className="message">Please wait...</p>
</div>
);
}
export default RedirectUser;<file_sep>/app/src/Navbar/Navbar.jsx
import React, { useState } from 'react';
import { FiDownload,FiLogOut } from "react-icons/fi"
import { AiFillCloud } from "react-icons/ai"
import { FaChevronDown,FaCloudversify } from "react-icons/fa"
import { GiTorch } from "react-icons/gi"
import { Link } from "react-router-dom"
import "./Navbar.css"
import { useAuth0 } from '@auth0/auth0-react';
function Navbar({editorTitle,editorContent,saveState, handleToogle, handleDownloadCode, userData: { displayName, photoURL },saveStuff }) {
const [dropDown,setDropDown] = useState(false)
const {logout} = useAuth0()
function handleDropDown(e) {
setDropDown(!dropDown)
}
return (
<div className="navbar">
<div className="logo">
<div className="profile-picture">
<img src={photoURL} alt="profile" />
</div>
<p className="profile-name">{displayName}</p>
</div>
<div className="navigation">
<button onClick={handleDownloadCode}><FiDownload style={{ color: "#fff", fontSize: "1.3rem" }} /></button>
<button onClick={() => {
saveStuff()
}} ><AiFillCloud className={saveState ? "unsaved": "saved"} style={{ color: "#fff", fontSize: "1.5rem", transition: "all 0.2s ease" }} /></button>
<div className="navigation-btns">
<button onClick={handleToogle}>TOOGLE PREVIEW</button>
<div className="dropdown-background">
<FaChevronDown onClick={handleDropDown} style={{ fontSize: "1.2rem",color: "#fff", cursor: "pointer" }} />
<div className={`dropdown ${!dropDown && "hideDropdown"}`}>
<a style={{textDecoration: "none",color: "#fff"}} target="_blank" href="https://daringfireball.net/projects/markdown/basics"><GiTorch style={{fontSize: "1.2rem", marginRight: "5px"}} /> GUIDE</a>
<Link style={{textDecoration: "none",color: "#fff"}} target="_blank" to="/Host"><FaCloudversify style={{fontSize: "1.2rem", marginRight: "5px"}} /> HOST</Link>
<button style={{textDecoration: "none",color: "#fff",background: "orange"}} onClick={() => logout()}>Log Out</button>
</div>
</div>
</div>
</div>
</div>
);
}
{/* <button></button>
<button></button>
*/}
export default Navbar;<file_sep>/README.md
# App for teachers<file_sep>/app/src/Pages/Home/Home.jsx
import { useState, useEffect } from 'react';
import Navbar from '../../Navbar/Navbar';
import ReactMarkdown from "react-markdown"
import remarkGfm from 'remark-gfm'
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'
import { docco } from "react-syntax-highlighter/dist/esm/styles/hljs";
import downloadContent from '../../downloadContent';
import { userSelector, useSelector } from "react-redux"
import showdown from "showdown"
import db from "../../config/firebase.config"
import { getDoc, doc, setDoc } from "firebase/firestore"
import { useAuth0 } from "@auth0/auth0-react"
function Home() {
const { user, isAuthenticated, isLoading } = useAuth0()
const { name,picture,sub } = isAuthenticated ? user : {name: "",picture: "",sub: ""};
const [editorTitle, setEditorTitle] = useState("")
const [editorContent, setEditorContent] = useState("")
const [setContent, setSetContent] = useState("Nothing to preview🧐")
const [tooglePreview, setTooglePreview] = useState(true)
const [saveState, setSaveState] = useState(false)
const converter = new showdown.Converter()
// console.log("Github User: ",user)
function handleEditorTitle(e) {
setEditorTitle(e.target.value)
}
useEffect(() => {
if (setContent === "") {
setSetContent("Nothing to preview🧐")
}
}, [setContent])
async function handleEditorContent(e) {
setEditorContent(e.target.value)
setSetContent(e.target.value)
setSaveState(true)
}
function handleToogle(e) {
setTooglePreview(!tooglePreview)
}
function handleDownloadCode() {
if (editorTitle === "") {
return alert("Must use title before download content")
}
const html = converter.makeHtml(editorContent);
downloadContent(html, editorTitle, name)
}
async function saveStuff() {
const uid = sub
try {
const docRef = doc(db,"Documents",uid)
const payload = {editorContent,editorTitle}
await setDoc(docRef,payload)
setSaveState(false)
}
catch (e) {
console.log(e)
}
}
useEffect(() => {
(async () => {
const uid = sub;
if(uid === "") {
return;
}
const docRef = doc(db, "Documents", uid);
const docSnap = await getDoc(docRef);
console.log(docSnap)
if (docSnap.exists()) {
console.log("Document data:", docSnap.data());
setEditorContent(docSnap.data().editorContent)
setEditorTitle(docSnap.data().editorTitle)
setSetContent(docSnap.data().editorContent)
} else {
// doc.data() will be undefined in this case
console.log("No such document!");
}
})()
}, [sub])
return (
isLoading ? (
<div className="redirect-div">
<div class="spinner"></div>
<p className="message">Please wait...</p>
</div>
) : (
isAuthenticated ? (
<>
<Navbar editorTitle={editorTitle} editorContent={editorContent} saveState={saveState} saveStuff={saveStuff} userData={{ displayName: name, photoURL: picture }} handleToogle={handleToogle} handleDownloadCode={handleDownloadCode} />
<div className="content-area">
<div className={`editor ${!tooglePreview && "editor-width"}`}>
<textarea cols="30" rows="10" placeholder="TITLE" value={editorTitle} onChange={handleEditorTitle}></textarea>
<textarea cols="30" rows="10" placeholder="CONTENT" value={editorContent} onChange={handleEditorContent}></textarea>
</div>
{
tooglePreview && (
<div className="preview">
<ReactMarkdown
components={{
code({ node, inline, className, children, ...props }) {
const match = /language-(\w+)/.exec(className || '')
return !inline && match ? (
<SyntaxHighlighter
children={String(children).replace(/\n$/, '')}
style={docco}
language={match[1]}
PreTag="div"
{...props}
/>
) : (
<code className={className} {...props}>
{children}
</code>
)
}
}}
remarkPlugins={[[remarkGfm, { singleTilde: false }]]} className="markdown">{setContent}</ReactMarkdown>
</div>
)
}
</div>
</>
): (
<h1 style={{color: "#fff"}}>401 UnAuthorized</h1>
)
)
);
}
export default Home;<file_sep>/app/src/Pages/Auth/Auth.jsx
import React from 'react';
import {useHistory} from "react-router-dom"
import "./Auth.css"
import { useAuth0 } from "@auth0/auth0-react"
function Auth() {
const {loginWithRedirect} = useAuth0()
const { isAuthenticated, isLoading } = useAuth0()
const history = useHistory()
return (
isLoading ?<div class="spinner"></div> : (
!isAuthenticated ? (
<div className="auth-overlay">
<div className="auth-card">
<h1 style={{opacity: "0.6",fontSize: "1.6rem",color: "#000"}}>Create a account 👩🏫</h1>
<button className="loginBtn" onClick={() => loginWithRedirect()}>Log In with Auth0 </button>
</div>
</div>
): (
history.push("/Home")
)
)
);
}
export default Auth;<file_sep>/app/src/api/index.js
import axios from "axios"
export async function createRepoContent({userLogin,token,Content,name}) {
console.log(userLogin,token)
const headers = {
"Authorization": `token ${token}`,
"Accept": "application/vnd.github.v3+json",
}
const res = await axios(
{
method: "PUT",
url: `https://api.github.com/repos/${userLogin}/${name}/contents/index.html`,
headers,
data: {
message: "initial commit",
content: Content
}
}
)
return res
// http PUT https://api.github.com/repos/lee-dohm/test-repo/contents/hello.txt \
// "Authorization: token REDACTED-TOKEN" \
// message="my commit message" \
// committer:="{ \"name\": \"<NAME>\", \"email\": \"1038121+<EMAIL>\" }" \
// content="bXkgbmV3IGZpbGUgY29udGVudHM="
}
export async function createARepo({name,description,token}) {
const headers = {
"Authorization": `token ${token}`,
"Accept": "application/vnd.github.v3+json",
}
const {data} = await axios(
{
method: "POST",
url: "https://api.github.com/user/repos",
data: {name,description,auto_init: true},
headers
}
)
return data
// console.log(res)
}
|
e4daacaf315000b575d1d6400a51e7b037c17ace
|
[
"JavaScript",
"Markdown"
] | 6 |
JavaScript
|
priyanshu-kun/sunhack
|
554784f610538abc5ac986ea527aa614d4c04d78
|
279d8ee4f6c283a8209314cc2709f040fe935c4f
|
refs/heads/main
|
<file_sep>#!/bin/bash
# rtmp key
key="your_key"
# rtmp ip
ip="your_livego"
# resolution
res="1280x720"
# framerate
fps="30"
# birrate
bt="1000000"
v4l2-ctl --set-ctrl video_bitrate=$bt
ffmpeg -loglevel error -re -f video4linux2 -input_format h264 -video_size $res -framerate $fps -i /dev/video0 -vcodec copy -an -strict -2 -f flv rtmp://$ip:1935/live/$key
|
f0113056f33f7140539f7a5fd79f8a73cdc8db63
|
[
"Shell"
] | 1 |
Shell
|
nwgat/livego-scripts
|
d1e74d66c66c719dfbd017ecc2cfd7d2fac9d1c6
|
2722ea571b8e271e7e58d53b34da8f5917a4c439
|
refs/heads/master
|
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.8.5
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1:3306
-- Generation Time: Sep 01, 2019 at 03:50 PM
-- Server version: 5.7.26
-- PHP Version: 7.2.18
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `quizdbase`
--
-- --------------------------------------------------------
--
-- Table structure for table `answers`
--
DROP TABLE IF EXISTS `answers`;
CREATE TABLE IF NOT EXISTS `answers` (
`aid` int(255) NOT NULL,
`answer` varchar(255) DEFAULT NULL,
`ans_id` int(255) DEFAULT NULL,
PRIMARY KEY (`aid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Dumping data for table `answers`
--
INSERT INTO `answers` (`aid`, `answer`, `ans_id`) VALUES
(1, '<NAME>', 1),
(2, '<NAME>', 1),
(3, '<NAME>', 1),
(4, '<NAME>', 1),
(5, 'The operating system', 2),
(6, 'The motherboard', 2),
(7, 'The platform', 2),
(8, 'Application software', 2),
(9, 'Internal hard disk', 3),
(10, 'Solid state disks', 3),
(11, 'External hard disk', 3),
(12, 'Mouse', 3),
(13, '5', 4),
(14, '6', 4),
(15, '4', 4),
(16, '8', 4),
(17, 'Program', 5),
(18, 'Operating system', 5),
(19, 'Data', 5),
(20, 'Software', 5);
-- --------------------------------------------------------
--
-- Table structure for table `questions`
--
DROP TABLE IF EXISTS `questions`;
CREATE TABLE IF NOT EXISTS `questions` (
`qid` int(255) NOT NULL,
`question` varchar(255) DEFAULT NULL,
`ans_id` int(255) DEFAULT NULL,
PRIMARY KEY (`qid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Dumping data for table `questions`
--
INSERT INTO `questions` (`qid`, `question`, `ans_id`) VALUES
(1, 'Who is father of modern computer?', 1),
(2, '......controls the way in which the computer system functions and provides a means by which users can interact with the computer.', 5),
(3, 'The most widely used computer device is.', 9),
(4, 'How many generations of computers we have?', 13),
(5, '.......are software which is used to do particular task.', 17);
-- --------------------------------------------------------
--
-- Table structure for table `user`
--
DROP TABLE IF EXISTS `user`;
CREATE TABLE IF NOT EXISTS `user` (
`uid` int(255) NOT NULL,
`username` varchar(255) DEFAULT NULL,
`totalques` int(255) DEFAULT NULL,
`answerscorrect` int(255) DEFAULT NULL,
PRIMARY KEY (`uid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
DROP TABLE IF EXISTS `users`;
CREATE TABLE IF NOT EXISTS `users` (
`id` int(255) NOT NULL AUTO_INCREMENT,
`username` varchar(255) NOT NULL,
`email` varchar(255) NOT NULL,
`password` varchar(255) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=33 DEFAULT CHARSET=latin1;
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`id`, `username`, `email`, `password`) VALUES
(1, 'dinu', '<EMAIL>', '<PASSWORD>'),
(2, 'admin', '<EMAIL>', '<PASSWORD>'),
(3, 'admin', '<EMAIL>', '<PASSWORD>'),
(4, 'admin', '<EMAIL>', '<PASSWORD>'),
(5, 'dd', '<EMAIL>', '<PASSWORD>'),
(6, 'budd', '<EMAIL>', '<PASSWORD>'),
(7, 'gt', '<EMAIL>', '<PASSWORD>'),
(8, 'bidu', '<EMAIL>', '<PASSWORD>'),
(9, 'cat', '<EMAIL>', '<PASSWORD>'),
(10, 'hhl', '<EMAIL>', '<PASSWORD>7129e7'),
(11, 'uu', '<EMAIL>', '<PASSWORD>'),
(12, 'cccc', '<EMAIL>', '<PASSWORD>'),
(13, 'CCCCC', '<EMAIL>', '<PASSWORD>'),
(14, 'ad', '<EMAIL>', '<PASSWORD>'),
(15, 'ft', '<EMAIL>', '<PASSWORD>'),
(16, 'gg', '<EMAIL>', '<PASSWORD>'),
(17, 'dddd', '<EMAIL>', '<PASSWORD>'),
(18, 'll', '<EMAIL>', '<PASSWORD>'),
(19, 'vv', '<EMAIL>', '<PASSWORD>'),
(20, 'vve', '<EMAIL>', '<PASSWORD>'),
(21, 'w', '<EMAIL>', '<PASSWORD>'),
(22, 'ff', '<EMAIL>', '8fa14cdd754f91cc<PASSWORD>c9<PASSWORD>'),
(23, 'ffs', '<EMAIL>', '<PASSWORD>'),
(24, 'wd', '<EMAIL>', '<PASSWORD>'),
(25, 'x', '<EMAIL>', '<PASSWORD>'),
(26, 'eeeee', '<EMAIL>', '08a4415e9d594ff960030b921d42b91e'),
(27, 'eeeeee', '<EMAIL>', '08a4415e9d594ff<PASSWORD>'),
(28, 'yyyy', '<EMAIL>', '2fb1c5cf58867b5bbc9a1b145a86f3a0'),
(29, 'admine', '<EMAIL>', '08a4415e9d594ff960030b921d42b91e'),
(30, 't', '<EMAIL>', 'e358efa489f58062f10dd7316b65649e'),
(31, 'ttt', '<EMAIL>', 'accc9105df5383111407fd5b41255e23'),
(32, 'xxxx', '<EMAIL>', '<PASSWORD>87d91c818ee6e9ec29f8c1');
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep><?php
session_start();
if (!isset($_SESSION['username'])) {
$_SESSION['msg'] = "You must log in first";
header('location: login.php');
}
if (isset($_GET['logout'])) {
session_destroy();
unset($_SESSION['username']);
header("location: login.php");
}
?>
<html>
<head>
<title>Home</title>
</head>
<body>
<?php if (isset($_SESSION['success'])) : ?>
<div class="error success" >
<h3>
<?php
echo $_SESSION['success'];
unset($_SESSION['success']);
?>
</h3>
</div>
<?php endif ?>
<div class="container">
<div class="login-box" style="max-width:550px; float:none; margin: 50px auto; ">
<div class="row">
<div class="col-md-6 login-left">
<form >
<div class="card-body">
<img src="qq.png"/>
</form>
</div>
</div>
</div>
<h2 class="text-center text-light card-header">Quiz World!</h2>
<div class="card-body">
<a href="home.php"><input type='submit' name='submit' value='Start' class='start' /></a>
</div>
</div>
</div>
</body>
</html>
|
9262542b83c557a1b952c65fab269c005517d15e
|
[
"SQL",
"PHP"
] | 2 |
SQL
|
ThiliniDBandara/OnlineQuiz
|
478640fa1acf94ace9e00f2f98948e5e8932701c
|
fab3808d1fcbd393be8cf65c360afe65565b9a6b
|
refs/heads/master
|
<repo_name>S-stu/1<file_sep>/HARDWARE/ADS1292/ads1292.c
//-----------------------------------------------------------------
// 程序描述:
// ADS1292驱动程序
// 作 者: 凌智电子
// 开始日期: 2018-08-04
// 完成日期: 2018-08-04
// 修改日期:
// 当前版本: V1.0
// 历史版本:
// - V1.0: (2018-08-04)ADS1292驱动
// 调试工具: 凌智STM32F429+CycloneIV电子系统设计开发板、LZE_ST_LINK2
// 说 明:
//
//-----------------------------------------------------------------
//-----------------------------------------------------------------
// 头文件包含
//-----------------------------------------------------------------
#include "ads1292.h"
#include "spi.h"
#include "delay.h"
#include "usart.h"
//-----------------------------------------------------------------
//-----------------------------------------------------------------
// void ADS1292_Init(void)
//-----------------------------------------------------------------
//
// 函数功能: ADS1292初始化
// 入口参数: 无
// 返 回 值: 无
// 注意事项: 无
//
//-----------------------------------------------------------------
void ADS1292_Init(void)
{
GPIO_InitTypeDef GPIO_InitStructure;
RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_GPIOE, ENABLE);//使能GPIOE时钟
// ADS1292_DRDY -> PE9
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_9; // ADS1292_DRDY -> PE9
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IN;//普通输入模式
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_100MHz;//100M
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_UP;//上拉
GPIO_Init(GPIOE, &GPIO_InitStructure);//初始化GPIOE2,3,4
// ADS1292_START -> PE7
// ADS1292_PWDN -> PE8 掉电或系统复位; 活跃低
// ADS1292_CS -> PE10 片选
// ADS1292_GPIO1 -> PE11
// ADS1292_GPIO2 -> PE12
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_7 | GPIO_Pin_8 | GPIO_Pin_10 |
GPIO_Pin_11 | GPIO_Pin_12;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_OUT;//普通输出模式
GPIO_InitStructure.GPIO_OType = GPIO_OType_PP;//推挽输出
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_100MHz;//100MHz
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_UP;//上拉
GPIO_Init(GPIOE, &GPIO_InitStructure);//初始化
// ADS1292_DRDY -> PE9
// GPIO_InitStruct.Pin = GPIO_PIN_9; // 配置ADS1292_DRDY
// GPIO_InitStruct.Mode = GPIO_MODE_INPUT; // 输入
// GPIO_InitStruct.Pull = GPIO_PULLUP; // 上拉
// GPIO_InitStruct.Speed = GPIO_SPEED_HIGH; // 高速
// HAL_GPIO_Init(GPIOE, &GPIO_InitStruct); // 初始化
SPI1_Init(); // SPI初始化
}
//-----------------------------------------------------------------
// void ADS1292_PowerOnInit(void)
//-----------------------------------------------------------------
//
// 函数功能: ADS1292上电复位
// 入口参数: 无
// 返 回 值: 无
// 注意事项: 无
//
//-----------------------------------------------------------------
void ADS1292_PowerOnInit(void)
{
u8 device_id;
ADS1292_START = 1;
ADS1292_CS = 1;
ADS1292_PWDN = 0; // 进入掉电模式
delay_ms(700);
ADS1292_PWDN = 1; // 退出掉电模式
delay_ms(700); // 等待稳定
ADS1292_PWDN = 0; // 发出复位脉冲
delay_us(10);
ADS1292_PWDN = 1;
delay_ms(700); // 等待稳定,可以开始使用ADS1292R
ADS1292_START = 0;
ADS1292_CS = 0;
SPI1_ReadWriteByte(SDATAC); // 发送停止连续读取数据命令
delay_us(10);
ADS1292_CS = 1;
// 获取芯片ID
/*device_id = ADS1292_Read_Reg(RREG | ID);
while(device_id != 0x73)
{
printf("ERROR ID:%02x\r\n",device_id);
device_id = ADS1292_Read_Reg(RREG | ID);
delay_ms(700);
}*/
delay_us(10);
ADS1292_Write_Reg(WREG | CONFIG2, 0XE0); // 使用内部参考电压
delay_ms(10); // 等待内部参考电压稳定
ADS1292_Write_Reg(WREG | CONFIG1, 0X01); // 设置转换速率为250SPS
delay_us(10);
ADS1292_Write_Reg(WREG | LOFF, 0XF0); // 该寄存器配置引出检测操作
delay_us(10);
ADS1292_Write_Reg(WREG | CH1SET, 0X60); // 增益12,连接到电极
delay_us(10);
ADS1292_Write_Reg(WREG | CH2SET, 0X00); // 增益6,连接到电极
delay_us(10);
ADS1292_Write_Reg(WREG | RLD_SENS, 0xEF);
delay_us(10);
ADS1292_Write_Reg(WREG | LOFF_SENS,0x0F);
delay_us(10);
ADS1292_Write_Reg(WREG | LOFF_STAT,0x00);
delay_us(10);
ADS1292_Write_Reg(WREG | RESP1, 0xEA); // 开启呼吸检测(ADS1292R特有)
delay_us(10);
ADS1292_Write_Reg(WREG | RESP2, 0x03);
delay_us(10);
ADS1292_Write_Reg(WREG | GPIO, 0x0C);
delay_us(10);
}
//-----------------------------------------------------------------
// void ADS1292_Write_Reg(u8 com, u8 data)
//-----------------------------------------------------------------
//
// 函数功能: 对ADS1292的内部寄存器进行写操作
// 入口参数: 无
// 返 回 值: 无
// 注意事项: 无
//
//-----------------------------------------------------------------
void ADS1292_Write_Reg(u8 addr, u8 data)
{
ADS1292_CS = 0; // 片选拉低
SPI1_ReadWriteByte(addr); // 包含命令操作码和寄存器地址
delay_us(10);
SPI1_ReadWriteByte(0x00); // 要读取的寄存器数+1
delay_us(10);
SPI1_ReadWriteByte(data); // 写入的数据
delay_us(10);
ADS1292_CS = 1; // 片选置高
}
//-----------------------------------------------------------------
// u8 ADS1292_Read_Reg(u8 addr)
//-----------------------------------------------------------------
//
// 函数功能: 对ADS1292的内部寄存器进行读操作
// 入口参数: 无
// 返 回 值: 无
// 注意事项: 无
//
//-----------------------------------------------------------------
u8 ADS1292_Read_Reg(u8 addr)
{
u8 Rxdata;
ADS1292_CS = 0;
SPI1_ReadWriteByte(addr); // 包含命令操作码和寄存器地址
delay_us(10);
SPI1_ReadWriteByte(0x00); // 要读取的寄存器数+1
delay_us(10);
Rxdata = SPI1_ReadByte(); // 读取的数据
delay_us(10);
ADS1292_CS = 1;
return Rxdata;
}
//-----------------------------------------------------------------
// u8 ADS1292_Read_Data(u8 addr)
//-----------------------------------------------------------------
//
// 函数功能: 读取ADS1292的数据
// 入口参数: 无
// 返 回 值: 无
// 注意事项: 无
//
//-----------------------------------------------------------------
void ADS1292_Read_Data(u8 *data)
{
u8 i;
ADS1292_CS = 0;
delay_us(10);
SPI1_ReadWriteByte(RDATAC); // 发送启动连续读取数据命令
delay_us(10);
ADS1292_CS = 1;
ADS1292_START = 1; // 启动转换
while (GPIO_ReadInputDataBit(GPIOE, GPIO_Pin_9) == 1); // 等待DRDY信号拉低
ADS1292_CS = 0;
for (i = 0; i < 9; i++) // 连续读取9个数据
{
*data = SPI1_ReadByte();
data++;
}
ADS1292_START = 0; // 停止转换
SPI1_ReadWriteByte(SDATAC); // 发送停止连续读取数据命令
delay_us(10);
ADS1292_CS = 1;
}
//-----------------------------------------------------------------
// End Of File
//-----------------------------------------------------------------
<file_sep>/HARDWARE/EXTI/exti.c
//-----------------------------------------------------------------
// 程序描述:
// 外部中断驱动程序
// 作 者: 凌智电子
// 开始日期: 2018-08-04
// 完成日期: 2018-08-04
// 修改日期:
// 当前版本: V1.0
// 历史版本:
// - V1.0: (2018-08-04)外部中断初始化,中断时执行相应的事情
// 调试工具: 凌智STM32F429+Cyclone IV电子系统设计开发板、LZE_ST_LINK2
// 说 明:
//
//-----------------------------------------------------------------
//-----------------------------------------------------------------
// 头文件包含
//-----------------------------------------------------------------
#include "exti.h"
#include "spi.h"
#include "delay.h"
#include "sys.h"
//-----------------------------------------------------------------
extern u8 flog;
extern u32 ch1_data;
extern u32 ch2_data;
extern u16 point_cnt;
//-----------------------------------------------------------------
// void EXTI_Init(void)
//-----------------------------------------------------------------
//
// 函数功能: 外部中断初始化
// 入口参数: 无
// 返回参数: 无
// 注意事项: 无
//
//-----------------------------------------------------------------
void EXTIX_Init(void)
{
NVIC_InitTypeDef NVIC_InitStructure;
EXTI_InitTypeDef EXTI_InitStructure;
RCC_APB2PeriphClockCmd(RCC_APB2Periph_SYSCFG, ENABLE);//使能SYSCFG时钟
SYSCFG_EXTILineConfig(EXTI_PortSourceGPIOE, EXTI_PinSource9);//PE9 连接到中断线9
/* 配置EXTI_Line0 */
EXTI_InitStructure.EXTI_Line = EXTI_Line9;//LINE0
EXTI_InitStructure.EXTI_Mode = EXTI_Mode_Interrupt;//中断事件
EXTI_InitStructure.EXTI_Trigger = EXTI_Trigger_Falling; //上升沿触发
EXTI_InitStructure.EXTI_LineCmd = ENABLE;//使能LINE0
EXTI_Init(&EXTI_InitStructure);//配置
NVIC_InitStructure.NVIC_IRQChannel = EXTI9_5_IRQn;//外部中断9_5
NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0x00;//抢占优先级0
NVIC_InitStructure.NVIC_IRQChannelSubPriority = 0x02;//子优先级2
NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;//使能外部中断通道
NVIC_Init(&NVIC_InitStructure);//配置
}
//-----------------------------------------------------------------
// void EXTI9_5_IRQHandler(void)
//-----------------------------------------------------------------
//
// 函数功能: 中断线9-5中断服务函数,调用中断处理公用函数
// 入口参数: 无
// 返回参数: 无
// 注意事项: 无
//
//-----------------------------------------------------------------
void EXTI9_5_IRQHandler(void)
{
GPIO_EXTI_Callback(GPIO_Pin_9);
EXTI_ClearITPendingBit(EXTI_Line9);//清除LINE4上的中断标志位
}
void GPIO_EXTI_Callback(uint16_t GPIO_Pin)
{
if(GPIO_Pin==GPIO_Pin_9)
{
u8 j;
u8 read_data[9];
for (j = 0; j < 9; j++) // 连续读取9个数据
{
read_data[j] = SPI1_ReadWriteByte(0xFF);
}
ch1_data=0;
ch2_data=0;
ch1_data |= (uint32_t)read_data[3] << 16;
ch1_data |= (uint32_t)read_data[4] << 8;
ch1_data |= (uint32_t)read_data[5] << 0;
ch2_data |= (uint32_t)read_data[6] << 16;
ch2_data |= (uint32_t)read_data[7] << 8;
ch2_data |= (uint32_t)read_data[8] << 0;
point_cnt++;
flog=1;
}
}
|
4c5bcbcec87bb469d15a688033f756048170e0db
|
[
"C"
] | 2 |
C
|
S-stu/1
|
65572251b595a8f1276a4eafb7d38bd25087a4cd
|
b3f1020b0984a1de19510825fbe31aeb3ffdc4c3
|
refs/heads/master
|
<repo_name>ChrisPupo22/Sprint-Challenge--Java-Fundamentals-piggybank<file_sep>/src/piggybank/DollarFromAbstract.java
package piggybank;
public class DollarFromAbstract extends AbstractMoney {
public DollarFromAbstract(int amount) {
super(amount);
value = 1.00;
}
@Override
public double getValue() {
return value;
}
}<file_sep>/src/piggybank/NickelFromAbstract.java
package piggybank;
public class NickelFromAbstract extends AbstractMoney {
public NickelFromAbstract(int amount) {
super(amount);
value = 0.05;
}
@Override
public double getValue() {
return value;
}
@override
public double getTotal(int amount) {
return amount * value;
}
}<file_sep>/src/piggybank/AbstractMoney.java
package piggybank;
public abstract class AbstractMoney {
protected int amount;
protected double value;
public AbstractMoney() {
}
public AbstractMoney(int amount) {
this.amount = amount;
}
public int getAmount() {
return amount;
}
public String getName() {
return name;
}
public abstract double getValue();
public double getTotal() {
return amount * value;
}
@Override
public String toString() {
return "Abstract Money{" +
"Amount:'" + amount + '\'' +
", Value: " + value +
'}';
}
}
|
6ce5ec6bb3cfabd57ccec2b70764886e712f87c0
|
[
"Java"
] | 3 |
Java
|
ChrisPupo22/Sprint-Challenge--Java-Fundamentals-piggybank
|
d44c08763ecd4ef89472c27254866205fa4940e8
|
ec4bb0a8c5b4dd619e3076796c86d9f8eb45c769
|
refs/heads/master
|
<repo_name>Jeremie1707/sandbox<file_sep>/04-Front-End/02-CSS-components/04-Activity-feed/README.md
## Background & Objectives
Build [an activity feed](http://lewagon.github.io/html-css-challenges/13-activity-feed/) similar to Dribbble's one.
1. Create your `avatar` CSS component.
2. Implement your tabs and your notification design in `tabs.css` and `notification.css`.
But first, **read all the instructions**!
## Tab design
Tabs are pretty easy to design. The HTML looks like this:
```html
<div class="tabs">
<a href="#" class="tab active">Traveling</a>
<a href="#" class="tab">Hosting</a>
</div>
```
Once that is done:
- Make `.tabs` a flexbox
- Add some `padding` on each `.tab`
- You don't even need `space-between` or `align-items` here because the tabs already have the same `height`
- You can also design the **active** and **hover** states of tabs using `.tab.active` & `.tab:hover`. You will probably need to play with the `opacity` and the `border-bottom` 😬
## Notification design
For the `.notification` design, **go back to the slides!** This is a classic use case for flexbox here, with the body of the notification pushing the other items thanks to a `flex-grow`.
Once that is done, it's just a matter of fine-tuning your `margin`, `padding`, and playing with fonts and colors.
So what are you waiting for? Time to make a cool activity feed! 🚀🚀
## [Extra tip] User Images
For user images in your activity feed you can use a placeholder service that we have built to get any Kitt user's github image using their GitHub nickname. Just use this URL: `https://kitt.lewagon.com/placeholder/users/<user.github_nickname>`, and try it with a few different github handles.
## [Extra tip] First & last child selectors
You can select first (or last notification) with these selectors:
```css
.notification:first-child {
/* CSS code for the first element with class="notification" */
}
.notification:last-child {
/* CSS code for the last element with class="notification" */
}
```
It can be useful to get rid of `border-bottom` on the last notification of the feed for instance!
NB: don't forget to **hard refresh** your browser (`cmd + shift + r`) to clear your browser's cache if your page doesn't seem to display your current code!
<file_sep>/04-Front-End/01-HTML-and-CSS/01-Profile-content/Rakefile
require 'rake'
require 'rake/testtask'
require 'minitest/autorun'
require 'minitest/pride'
task :default do
describe "index.html" do
it "should have your own profile HTML code" do
(File.open("profile/index.html","r").readlines.size > 15).must_equal true
end
end
end
<file_sep>/04-Front-End/07-JavaScript-Plugins/02-Geocoder/README.md
## Background & Objectives
In this exercise, we'll practise our AJAX skills. Let's start simple with a `GET` request. Here we'll use the [MapBox Geocoding API](https://www.mapbox.com/search/). We want to build a tool where we can input an address, hit a button, and get the **GPS Coordinates** back! For the cherry on top, we'll display the map as well.
<div class="text-center">
<img src="https://raw.githubusercontent.com/lewagon/fullstack-images/master/frontend/mapbox_ajax_geocoder.gif" alt="MapBox Geocoding demo" width="100%">
</div>
## Specs
You can launch your local server with:
```bash
rake webpack
```
### Geocoding
First, you will need to create an account with MapBox and get and API key (it's free to sign up!) Then, read the [MapBox Geocoding API documentation](https://www.mapbox.com/api-documentation/#geocoding). It boils down to doing an HTTP `GET` request with an address as a query string parameter.
```js
'https://api.mapbox.com/geocoding/v5/mapbox.places/Los%20Angeles.json?access_token=YOUR-API-KEY'
```
NOTE: The request to the MapBox API will require your API key as one of the parameters in your request. You can find your key on your [account page](https://www.mapbox.com/account/) once you have created an account and signed in.
Go ahead and add a form to your HTML page. It should contain an `input` of type `"text"` where a user can type an address in, and an `input` of type `"submit"` to display a button.
Once that's done, use the `submit` event to catch the moment the form is posted by the user. That's when you'll want to trigger the AJAX query to the MapBox Geocoding service using `fetch` (go back to yesterday's lecture slides).
As always when you fetch data from an API, start by `console.log()`ing what you get back from MapBox. It's a massive JSON! Now you've got that, figure out where the GPS coordinates are buried and display them on screen.
HINT: Mapbox returns coordinates with longitude first, and latitude second!
### [OPTIONAL] Displaying a map
To display a MapBox Map with a marker at the specified address, we'll use a second API, the [MapBox JavaScript API](https://www.mapbox.com/mapbox-gl-js/api/).
To use it, add this line in the `head` of your HTML file, so you can use MapBox's stylesheet for your map:
```html
<link href='https://api.tiles.mapbox.com/mapbox-gl-js/v0.50.0/mapbox-gl.css' rel='stylesheet' />
```
To add a map, you'll need an empty supporting HTML element. For instance:
```html
<div id="map" style="height: 300px; width: 600px"></div>
```
To easily build the map and add a marker to it, we'll use [npm's mapbox-gl package](https://yarnpkg.com/en/package/mapbox-gl).
You already have a `package.json` so you just need to `yarn add mapbox-gl` to download it locally in `02-Geocoder/node_modules`.
To display a map in your `#map` with the `mapbox-gl` package you can use these lines:
```js
import mapboxgl from 'mapbox-gl';
mapboxgl.accessToken = 'yourApiKey';
const map = new mapboxgl.Map({
container: 'map',
style: 'mapbox://styles/mapbox/streets-v9',
center: [ -0.077, 51.533 ],
zoom: 12
});
```
To add a marker to the map, if the variable `map` holds the `mapboxgl` object, you can run:
```js
new mapboxgl.Marker()
.setLngLat([ -0.077, 51.533 ])
.addTo(map);
```
Happy geocoding! 🌎 🌍 🌏
<file_sep>/02-OOP/05-Food-Delivery-Day-One/01-Food-Delivery/app/repositories/meal_repository.rb
require 'csv'
require_relative '../models/meal'
class MealRepository
def initialize(csv_path)
@csv_path = csv_path
@meals = []
@next_id = 1
load_csv
end
def load_csv
csv_options = { headers: :first_row, header_converters: :symbol }
CSV.foreach(@csv_path, csv_options) do |row|
# byebug
row[:id] = row[:id].to_i
row[:price] = row[:price].to_i
@meals << Meal.new(row)
@next_id = row[:id]
end
@next_id = @meals.last.id + 1 unless @meals.empty?
end
def save
end
end
<file_sep>/04-Front-End/02-CSS-components/05-Open-component-challenge/README.md
## Background & Objectives
OK now that you've reproduced the designs we've given to you let's see how you manage an open component challenge 😎.
You will have to find a component that you like on Dribbble and try to reproduce it.
When designing a new component you always have follow the same flow:
### Step 1: Find inspiration 🤔
Find which component you want to design (login form, tabs, navbar, etc.), here's a [Dribbble bucket](https://dribbble.com/arthur-littm/buckets/1030911-Open-Component-Challenge) we've made for this challenge. Find a shot that you like.
### Step 2: Draw the HTML structure ✏️
Now that you know what you're going to design you have to draw the structure of the HTML. The classic beginner mistake is to skip this step and dive directly in the code ⚠️. Trying to do the CSS with bad HTML structure is a nightmare 😱.
For example if you've decided to draw a component like this one, you should have the following HTML structure drawn **before any code is attempted**.
<div class="text-center">
<img src="https://raw.githubusercontent.com/lewagon/fullstack-images/master/frontend/open-component-challenge-structure.png" alt="">
</div>
Once you've drawn you HTML structure feel free to ask one of the TAs for their opinion about it, to be sure you're going in the right direction.
### Step 3: Code the HTML 💻
### Step 4: Code the CSS 💅
## Specs
Code the HTML in the `index.html` file.
For the CSS as usual add a new file in the `components` folder (or more if you're doing a complicated component which is using other components eg: `avatars`, `buttons`, ...).
<file_sep>/02-OOP/02-OO-Advanced/Optional-01-Web-Browser/spec/browser_spec.rb
require "browser"
describe "Browser" do
let (:browser) { Browser.new }
it "should implement a fetch_content method" do
expect(browser).to respond_to :fetch_content
end
describe "#fetch_content" do
let(:content) { browser.fetch_content("http://www.motherfuckingwebsite.com/") }
it 'should return a String' do
expect(content).to be_kind_of(String)
end
it "should return a string for http://www.motherfuckingwebsite.com/" do
expect(content).to match /This is a motherfucking website/
end
it "should not contain HTML tags" do
expect(content).not_to match /<body>/
end
end
end
<file_sep>/02-OOP/01-OO-Basics/Optional-01-Richest-Student/README.md
## Background & Objectives
Congrats for reaching the first optional exercice of the first OOP day. We will now work on an exercise to mix sorting and comparable methods with objects.
Suppose you have students (defined by the class `Student`) who each have an amount of money (in bills of five, ten and twenty euros). We want to be able to compare them based on their wealth.
### Specs
- When initializing a `Student`, you should pass 4 arguments representing the student name and the number of bills they own (fives, tens and twenties)
- Implement a `wealth` instance method on `Student`
- Implement [Comparable](http://www.ruby-doc.org/core-2.5.3/Comparable.html) on this method so that it's possible to compare 2 students, and to sort an `Array` of students. You can read more about the spaceship operator `<=>` and sorting objects [here](http://stackoverflow.com/a/28014514).
<file_sep>/04-Front-End/09-WX-MP-Frontend/01-WX-MP-Frontend-02/README.md
## Background & objectives
The goal of this challenge is to practice with more WeChat's template language.
## Specs
### 1. Setup one more page in your app
Use the setup file `app.json` to add again a new route inside the array `pages:[]`:
```
"pages/stories/stories"
```
### 2. Create a small header to begin
Let's re-use the same [Banner Component](https://uikit.lewagon.com/documentation#banners) than on our landing page, and turn it into a nice header! We can use the `inline CSS` again to customize its size... e.g 100px height.
### 3. Then a card for our stories...
We'll want a card component to display our FMC stories: each card will host **content** and **author**.
Once again we can save time using [Le Wagon's card component](https://uikit.lewagon.com/documentation#card_product) (but no need of a product image).
Just one story is enough, we're just templating here.
### 4. Data binding introduction
WXML is much more than HTML: it's a **templating language** allowing us to [inject javascript variables and even loop through our local data](https://developers.weixin.qq.com/miniprogram/en/dev/framework/view/wxml/data.html)! The magic happens when you use this syntax: `{{variable or operation}}`.
We call it the **mustache syntax** 👨🦰 and it connects JS and HTML both ways...
**From JS to WXML ➡️**
- Store ‘Who is here?’ inside a `text` variable in the `stories.js` page data
```js
//stories.js
Page({
data: {
text: 'Who is here?'
}
})
```
- Display the text inside your `stories.wxml` page
```html
<!— stories.wxml -->
<view>{{text}}</view>
```
Any string stored inside `text` will dynamically show in your view 🤓
**From WXML to JS ⬅️**
- Create a button with the "bindtap" attribute and a function name as a value
```html
<!— stories.wxml -->
<button bindtap="clickMe">{{text}}</button>
```
- Define this new function in your Page object:
```js
//stories.js
Page({
clickMe: function() {
this.setData({ text: "Hello World" })
}
})
```
Well done! Now you have a button firing a a "bindtap" event to the logical layer (the javascript file), and a function that re-sets the local data to another string... immediately rendered in the view.
⚠️ Do not break the Page object! **Each key-value pair is chained with a comma.** Notice how the Page object contains different keys provided by the framework: `onReady`, `onShow` etc. We call them [lifestyle functions](https://developers.weixin.qq.com/miniprogram/en/dev/framework/app-service/page.html).
### Bonus action:
Transform your `<navigator>` from the landing page into a `<button>` element, calling this function on **bindtap**:
```js
//landing.js
goToStoriesPage: function() {
wx.navigateTo({
url: '/pages/stories/stories'
})
}
```<file_sep>/04-Front-End/09-WX-MP-Frontend/01-WX-MP-Frontend-OPT-01/README.md
## Background & objectives
Each time you restart your application, your post data disappears! To retain the information, we need to save it in our user's **phone cache**.
## Specs
- When a user submits a new FMC story, save the stories into the the phone cache. Check out the [setStorage API documentation](https://developers.weixin.qq.com/miniprogram/en/dev/api/data.html#wxsetstoragesynckeydata) to find out how.
- Inspect the phone cache using the console ("debugger") **"Storage"** tab!
- When the app launches or when the page shows, get the stories from the cache storage and load them in local data *. Check out the [getStorage API documentation](https://developers.weixin.qq.com/miniprogram/en/dev/api/data.html#wxgetstoragesynckey
).* _Tip: Tencent provided a use case of cache storage, directly at the `onLaunch` of your App ;)_
<file_sep>/04-Front-End/05-DOM-and-Events/03-My-First-Event-Listener/lib/listener.js
// TODO: React to a click on the button!
const buttonx = document.querySelector('button');
const audio = new Audio('sound.mp3');
buttonx.addEventListener('click', (event) => {
buttonx.classList.add('disabled');
buttonx.innerText = 'Bingo!';
audio.play();
});
<file_sep>/02-OOP/01-OO-Basics/02-Getters-and-Setters/lib/vending_machine.rb
class VendingMachine
# TODO: add relevant getter/setter to this class to make the scenarios work properly.
def initialize(snack_price_cents, snacks)
@amount_cents = 0
@snacks = snacks
@snack_price_cents = snack_price_cents
end
def insert_coin(value_cents)
# TODO: what happens to @snacks, @amount_cents and @snack_price_cents
# when the user inserts a coin?
end
def buy_snack
# TODO: what happens to @snacks, @amount_cents and @snack_price_cents
# when the user pushes a button to buy a Snack?
end
end
<file_sep>/02-OOP/01-OO-Basics/03-Basic-OOP/spec/orange_tree_spec.rb
require "orange_tree"
describe OrangeTree do
let(:orange_tree) { OrangeTree.new }
it "OrangeTree constructor (initialize method) should not take any parameters" do
initialize_parameters_count = OrangeTree.allocate.method(:initialize).arity
expect(initialize_parameters_count).to eq 0
end
it "should have an age" do
expect(orange_tree).to respond_to :age
expect(orange_tree.age).to be_a Integer
end
it "should be 0 years old when created" do
expect(orange_tree.age).to eq 0
end
it "should have a height" do
expect(orange_tree).to respond_to :height
expect(orange_tree.height).to be_a Integer
end
it "should measure 0 meters when 0 years old" do
expect(orange_tree.height).to eq 0
end
it "should have fruits" do
expect(orange_tree).to respond_to :fruits
expect(orange_tree.fruits).to be_a Integer
end
it "should have 0 fruits when 0 years old" do
expect(orange_tree.fruits).to eq 0
end
it "should let us check whether the tree is dead or alive" do
expect(orange_tree).to respond_to(:dead?)
expect(orange_tree.dead?).to eq(false)
end
it "should have an `one_year_passes!` method to simulate a year passing" do
expect(orange_tree).to respond_to :one_year_passes!
end
it "should age each year. After 8 years, it should be 8 years old" do
8.times { orange_tree.one_year_passes! }
expect(orange_tree.age).to eq(8)
end
it "should always live until 50 years old" do
expect(orange_tree.dead?).to eq false
50.times do
orange_tree.one_year_passes!
expect(orange_tree.dead?).to eq false
end
end
it "should measure 10 meters when 10 years old" do
10.times do
orange_tree.one_year_passes!
end
expect(orange_tree.height).to eq 10
end
it "should still measure 10 meters when 20 years old" do
20.times do
orange_tree.one_year_passes!
end
expect(orange_tree.height).to eq 10
end
it "should not be able to live more than 100 years old" do
100.times do
orange_tree.one_year_passes!
end
expect(orange_tree.dead?).to eq true
end
1.upto(5) do |i|
it "should not produce fruits at #{i} year#{i > 1 ? 's' : ''} old" do
i.times do
orange_tree.one_year_passes!
end
expect(orange_tree.fruits).to eq 0
end
end
6.upto(9) do |i|
it "should produce 100 fruits at #{i} years old" do
i.times do
orange_tree.one_year_passes!
end
expect(orange_tree.fruits).to eq 100
end
end
10.upto(14) do |i|
it "should produce 200 fruits at #{i} years old" do
i.times do
orange_tree.one_year_passes!
end
expect(orange_tree.fruits).to eq 200
end
end
it "should stop producing fruits when reaching 15 years old" do
15.times do
orange_tree.one_year_passes!
end
expect(orange_tree.fruits).to eq 0
end
it "should have a `pick_a_fruit!` method to simulate people picking a single fruit from the tree" do
expect(orange_tree).to respond_to :pick_a_fruit!
end
it "should let people pick an orange" do
10.times do
orange_tree.one_year_passes!
end
orange_tree.pick_a_fruit!
expect(orange_tree.fruits).to eq 199
end
it "should not let people pick fruits if there are no fruits remaining" do
10.times do
orange_tree.one_year_passes!
end
# There should be 200 fruits
# Pick all fruits
200.times { orange_tree.pick_a_fruit! }
# Let's try to pick one more.
orange_tree.pick_a_fruit!
expect(orange_tree.fruits).to eq 0
end
it "from 50 years old, probability of dying should increase until 100 years old" do
50.times do
orange_tree.one_year_passes!
end
found_dead = false
50.times do
orange_tree.one_year_passes!
found_dead = found_dead || orange_tree.dead?
end
expect(found_dead).to eq true
end
end
<file_sep>/02-OOP/02-OO-Advanced/Optional-03-Tracking-Subclasses/README.md
## Background & Objectives
Let's play a bit with inheritance. In ruby, you can call `ancestors` on a Class to
get a list of all the parents. Look at [this question](http://stackoverflow.com/questions/19045195/understanding-ruby-class-and-ancestors-methods) and the recommended answer on
Stack Overflow to get a sense of the class hierarchy.
## Specs
For this exercise, we want to do the opposite. We have a `Mother` class that should be able to call
its descendants. Say you have two classes `Daughter` and `Son` with class method `phone`, then:
```ruby
Mother.phone_kids
# => should call Daughter.phone and Son.phone
```
The specs provide you with two children classes of `Mother`, so you just have to implement
the class method `phone_kids`. You may need to use the [`inherited`](http://www.ruby-doc.org/core-2.5.3/Class.html#method-i-inherited) method of `Class`.
<file_sep>/04-JavaScript-basics/Optional-02-Split-The-Bill/lib/split_the_bill.js
const splitTheBill = (group) => {
// TODO: implement the function and return an Object
let totalSpending = 0;
let numberMember = 0;
Object.keys(group).forEach((member) => {
totalSpending += group[member];
numberMember += 1;
});
const spendingAverage = totalSpending / numberMember;
Object.keys(group).forEach((member) => {
group[member] -= spendingAverage;
});
return group;
};
module.exports = splitTheBill; // Do not remove this line.
<file_sep>/04-Front-End/04-JavaScript-basics/04-Unordered-list/lib/list_generator.js
const listItem = content => `<li class="list-group-item">${content}</li>`;
// TODO: return the proper <li> HTML tag with its content (as a string)
const unorderedList = (items) => {
// TODO: return the proper <ul> markup (as a string)
// return '<ul class="list-group">'
const string = items.map(item => listItem(item)).join('');
return `<ul class="list-group">${string}</ul>`;
};
console.log(unorderedList(['milk', 'butter', 'bread']));
// <ul class="list-group">
// <li class="list-group-item">milk</li>
// <li class="list-group-item">butter</li>
// <li class="list-group-item">bread</li>
// </ul>
module.exports = { listItem, unorderedList }; // Do not remove.
<file_sep>/02-OOP/01-OO-Basics/01-Define-your-own-class/README.md
## Background & Objectives
Grasp the basic concepts of object-oriented programming while writing your very first class.
## Specs
Before you create your class, create a file in the `lib` directory to test out your class as you go along (create new instances, call instance methods, and print the results to the terminal). Let's call this file `interface.rb`.
Next, choose something from the real world that you would like to model. Restaurants, vehicles, users, games, recipes.. *It's your call!*
Once chosen, create a file in the `lib` directory which has the name of your object.
For instance, if you chose to model Restaurants, create the `restaurant.rb` file:
```bash
touch lib/restaurant.rb
```
Now you can safely delete the `.gitkeep` file. This file is only there so that git sees the otherwise empty folder.
## Convention
**Pay attention** to your class file and your class name. Remember, `lower_snake_case(.rb)` for file name,
`UpperCamelCase` for class name in the class definition. **Both must be singular!** Remember, the class is the structure that allows you to create lots of different restaurants (with `.new`).
### What are the inner properties of your objects?
What are the characteristics of a restaurant? Of a user? Of a game?
Choose some characteristics of your class that you want to model. They will be your **instance variables**, sometimes named **properties**.
### Define the constructor
`initialize` is the instance method called when calling `new` on your class. For instance:
```ruby
# lib/car.rb
class Car
def initialize(model, brand, kilometers)
@model = model
@brand = brand
@kilometers = kilometers
end
end
```
Now define the `initialize` method on the class you chose!
To test it you may want to create a `lib/interface.rb` file and call the `.new` constructor on your class with the relevant arguments, e.g:
```ruby
# lib/interface.rb
require_relative "car"
second_hand_panda = Car.new("Panda 4x4", "Renault", 30_000)
new_testarossa = Car.new("Testarossa", "Ferrari", 0)
```
### Define an instance method
Time to add some **behavior** to your class with an **instance method**.
Here's an example of how we might want to use a `start` instance method on a `Car` class:
```ruby
# lib/interface.rb
require_relative "car"
car = Car.new("T", "Ford", 0)
car.start
```
<file_sep>/02-OOP/01-OO-Basics/02-Getters-and-Setters/spec/vending_machine_spec.rb
require "vending_machine"
describe "VendingMachine" do
let(:vending_machine) { VendingMachine.new(250, 10) }
let(:empty_vending_machine) { VendingMachine.new(250, 0) }
context "Buying Scenario" do
describe "#amount_cents (getter)" do
it "should return the amount of money that has been inserted into the machine" do
expect(vending_machine.amount_cents).to eq(0)
end
end
describe "#snacks (getter)" do
it "should return how many snacks are left in the machine" do
expect(vending_machine.snacks).to eq(10)
expect(empty_vending_machine.snacks).to eq(0)
end
end
describe "#snack_price_cents (getter)" do
it "should return the unit price of a snack" do
expect(vending_machine.snack_price_cents).to eq(250)
end
end
describe "#insert_coin (instance method)" do
it "should update the vending machine account balance (in cents)" do
expect(vending_machine.amount_cents).to eq(0)
vending_machine.insert_coin(100)
expect(vending_machine.amount_cents).to eq(100)
end
end
describe "#buy_snack (instance method)" do
it "should let you buy a snack if you inserted enough money (happy path)" do
vending_machine.insert_coin(200)
vending_machine.insert_coin(100) # We inserted 3 euros, snack is 2.5 euros
vending_machine.buy_snack
expect(vending_machine.snacks).to eq(9)
expect(vending_machine.amount_cents).to eq(50)
end
it "should not let you buy a snack if you didn't insert enough money (error path)" do
vending_machine.insert_coin(100)
vending_machine.buy_snack
expect(vending_machine.snacks).to eq(10)
expect(vending_machine.amount_cents).to eq(100)
end
it "should not let you buy a snack if there aren't any snacks left! (error path)" do
empty_vending_machine.insert_coin(200)
empty_vending_machine.insert_coin(51)
empty_vending_machine.buy_snack
expect(empty_vending_machine.snacks).to eq(0)
expect(empty_vending_machine.amount_cents).to eq(251)
end
end
end
context "Refilling Scenario" do
describe "#snacks=" do
it "should let a technician refill a machine" do
vending_machine.snacks = vending_machine.snacks + 10
expect(vending_machine.snacks).to eq(20)
end
end
end
end
<file_sep>/04-Front-End/09-WX-MP-Frontend/01-WX-MP-Frontend-04/README.md
## Background & objectives
In this challenge, you will learn to how **use the global storage** to send data from page to page.
You will also discover new components such as a **form** and a **tab bar** for navigation!
## Specs
### 1. From Global to Local
Each Page (e.g `stories.js`) has its own separate *local* data to render in its view.
Fortunately, the whole App (`app.js`) shares a *global* data storage we can access everywhere. That's where you want to save anything to re-use. Example: your user information!
Although the [documentation here](https://developers.weixin.qq.com/miniprogram/en/dev/framework/app-service/app.html) says very little about it, note that WXML can't directly access the *global* data storage. You'll have to go through the *local* data first!
```js
//app.js
App({
globalData: {
userInfo: { nickName: "salmon", gender: 1 }
}
})
// setting globalData
```
```js
//page.js
let app = getApp() // register the app instance
Page({
data: { userInfo: app.globalData.userInfo }
})
// take from app.globalData and set the local data
```
```html
<!-- index.wxml -->
<view>Hello {{userInfo.nickName}}</view>
<!-- display our local data -->
```
Following this logic, you want to:
- Use `App.js` to add a **globalData** storage with our `stories` array
- At the top of `stories.js`, use getApp() to get an instance of your App
- Inside your `stories.js` **onShow function**, reset your **local storage** to equal the globalData storage
```js
Page({
...
data: {
stories: []
}
onShow: function () {
this.setData({
stories: app.globalData.stories
})
}
...
})
```
**Check the view again.**
If all goes well, your **WX:FOR** loop still renders all your stories!
### 2. Let's post new stories!
You want to **add one new route** on your app to let users POST a new FMC story.
- Edit your `app.json` file to add one new "post" page.
- Still in `app.json`, create a new 'tabBar'! We'll use it to navigate between our 2 main tabs: **stories** on the left, **post** on the right. Check the [settings documentation](https://developers.weixin.qq.com/miniprogram/en/dev/framework/config.html) to implement the tabBar object correctly.
**Test it now!** In your simulator, now you should be able to switch tabs back & forth. Take the time to [set a nice icon](https://www.iconfont.cn/)?
**Let's design a nice form now** 🎨
- In your `post.wxml` view page, use the [form](https://developers.weixin.qq.com/miniprogram/en/dev/component/form.html) component and its **bindsubmit** attribute.
- We'll need two [inputs](https://developers.weixin.qq.com/miniprogram/en/dev/component/input.html) (name and content), and of course one button to submit the form!
Do you know how to **receive this form data** inside the logical layer `post.js`? it's via the `bindsubmit` of your form!
Create a function called **formSubmit**. This function receives an `event` argument with all your form data 😉
```js
Page({
...
formSubmit: function (event) {
console.log(event.detail.value.name)
console.log(event.detail.value.content)
}
...
})
```
Go ahead and **implement this function**:
- taking the values of your form inputs
- saving them inside your App's `globalData`*
- switching the user back to the stories page thanks to the [`wx.switchTab`](https://developers.weixin.qq.com/miniprogram/en/dev/api/ui-navigate.html#wxswitchtabobject) API
*👉 *tip: remember how we've used `getApp()` to get an instance of the App inside our page!*
**Test and test again!** use console.log at every step! 🤓
<file_sep>/04-Front-End/07-JavaScript-Plugins/01-Bugfix/README.md
## Background & Objectives
A simple challenge to start the day! This morning you saw how to work with multiple files in a frontend project. You'll find two javascript files in the `lib` folder. The code is currently broken, and your job is to fix it.
## Specs
Launch your local server with:
```bash
rake webpack
```
Open [localhost:8080](http://localhost:8080) in your browser, open your console and read the error message.
Found what's missing? Go ahead and fix it! When it's done you should see a pretty familiar logo displayed in your Browser ;)
<file_sep>/02-OOP/01-OO-Basics/Optional-02-Method-Missing/README.md
## Background & Objectives
This is a very advanced exercise. We will now play with internalities of Ruby, and the way it handles `NoMethodError`.
### Specs
We want to create a `UberHash` class which will be able to store anything. A classic hash works like this:
```ruby
classic_hash = Hash.new
classic_hash[:color] = "red"
classic_hash[:color]
# => "red"
```
But we want to do this instead:
```ruby
uber_hash = UberHash.new
uber_hash.color
# => nil
uber_hash.color = "red"
uber_hash.color
# => "red"
```
You might think that you just have to put an `attr_accessor :color` on `UberHash`, and that's it, but wait! We want to store **any** possible property like that.
If you try to call an instance method that wasn't defined in the class, ruby calls a `method_missing` built-in method that raises a `NoMethodError`.
To prevent the `NoMethodError` to be raised, you can define **your own** `method_missing` instance method in your class, and implement it to do whatever you want!
You can read [this article](http://rubylearning.com/satishtalim/ruby_method_missing.html) to get a sense of how and when ruby calls `method_missing` ([docs here](https://ruby-doc.org/core-2.5.3/BasicObject.html#method-i-method_missing)).
You may also want to read how Ruby lets you dynamically [get](https://ruby-doc.org/core-2.5.3/Object.html#method-i-instance_variable_get) or [set](https://ruby-doc.org/core-2.5.3/Object.html#method-i-instance_variable_set) an instance variable.
Good luck!
<file_sep>/04-Front-End/01-HTML-and-CSS/03-Finishing-profile-design/README.md
## Setup
Again if you haven't copied the files from the previous exercise yet, let's move the profile folder from the previous exercise in the current directory of this challenge:
```bash
cp -r ../02-Fonts-and-colors/profile .
```
## Background & Objectives
Play with the box model (`margin/border/padding/width/height`) by splitting your profile information into different `<div>` tags. Then, use advanced CSS selectors (id, class, grouping, descendant selectors) to fine-tune your page with a more subtle design.
Don't forget to **hard refresh** your browser (`cmd + shift + r`) to clear your browser's cache if your page doesn't seem to display your current code!
## Specs
### Step 1: Box Model
Here is [your objective](https://lewagon.github.io/html-css-challenges/03-box-model-and-selectors/).
- You should start with the following structure for your page
```html
<div class="container">
<div class="card"></div>
<div class="card"></div>
</div>
```
- The `<div class="container">` is here to center the page content using left & right margins to prevent it taking 100% of the window (which would be ugly).
- The `<div class="card">` is used to group information into a nice white box.
- Add cool touches to your cards using CSS properties like `background`, `border`, `border-radius` and `box-shadow`. Keep it simple, with white background and subtle radius and shadows (as you saw in the lecture).
### Step 2: Selectors
Any time you want to **name** an element of your page, ask yourself:
- Should I use a `class` or an `id`? Is it unique or re-usable?
- What name should I pick for my class? Respect the `component-shape` convention
- Should I split this design into several classes instead of one big class?
Here is an example of **bad** CSS code:
```css
#home-page-first-image {
border-radius: 50%;
}
.home-card {
text-align: center;
background: white;
padding: 30px;
border: 1px solid lightgrey;
}
```
And here is the **good version** of it:
```css
.img-circle {
border-radius: 50%;
}
.text-center {
text-align: center;
}
.card-white {
background: white;
padding: 30px;
border: 1px solid lightgrey;
}
```
- Making an image circle and centering texts are **very common design tasks**. They deserve their own re-usable class, not to be mixed in other classes or ids!
- Don't repeat yourself and try to use **generic class names**. Consider each CSS class as a re-usable design that you can apply everywhere on your website. Getting this mindset is the main difficulty for CSS beginners.
## Further suggestions & resources
### Container
Here is the div centering technique for the main container:
```css
.container {
width: 500px; /* This sets the width */
margin: 0 auto; /* This automatically sets left/right margins */
}
```
### Inline list
To design your lists of icons, you'll have to change the `block` behavior of list items by **inlining them**. Here is the corresponding CSS rules.
```css
.list-inline > li {
display: inline-block;
padding: 0px 20px;
}
```
Even inline, a list `<ul>` has some `padding-left` and bullet points that you will also have to kill to have a nicer looking list.
```css
.list-inline {
list-style: none;
padding-left: 0px;
}
```
From this exercise onwards, **it is critical that you use your developer tool (`inspect`)** to play with your CSS in the browser and test things out before you write the final code.
## Finished?
Once you've finished you can push this exercise and copy the content to the next exercise folder with this command:
```bash
# Push to Gihtub
git add .
git commit -m "Added div to my profile page"
git push origin master
# Copy folder into next exercise folder
cp -r profile ../04-Responsive-profile
```
<file_sep>/04-Front-End/01-HTML-and-CSS/02-Fonts-and-colors/README.md
## Setup
We want to continue building our profile page. If you haven't already, let's copy our previous profile into this challenge's folder and add a CSS file to it:
```bash
cp -r ../01-Profile-content/profile . # don't forget the trailing dot!
cd profile
touch style.css
```
## Background & Objectives
Add simple CSS rules to design **fonts** and **colors** for your profile page.
Don't forget to **hard refresh** your browser (`cmd + shift + r`) to clear your browser's cache if your page doesn't seem to display your current code!
## Specs
[This is an example](https://lewagon.github.io/html-css-challenges/02-fonts-colors-new/) of what you need to create. Here is a list of CSS rules to write:
### Body
Pick a nice `background-color`, `font-family`, `color`, `font-size` and `line-height` for the `<body>`. **Setting these font rules at the body-level will apply them on all basic text tags** (`<p>`, `<li>`, etc.).
### Headers
- Choose a nice `color` and `font-family` for headers (`<h1>`, `<h2>`, `<h3>`)
- Choose harmonious `font-size` and `line-height` for headers
- Hint: **small headers** are more elegant. If you check out any website ([Medium](https://medium.com/), [Airbnb](https://www.airbnb.com), etc.), you'll see that the `font-size` of their headers is quite small.
### Links
- Change the `color` & `text-decoration` of all the links
- Add hover effects to links using the pseudo-class `a:hover`.
## Further suggestions & resources
- Find inspiration on [Coolors](http://coolors.co/) or [Color hunt](http://colorhunt.co/) to choose an awesome color scheme.
- Pick your fonts on [Google fonts](https://www.google.com/fonts)
- On Google fonts, **Open Sans** is the standard choice for the `body`. **Raleway**, **Varela**, **Poppins**, **Roboto** are a few good ones you could use for `h1`, `h2`, `h3`.
For instance, if you wanted to use Open-Sans and Poppins (with different font-weights), you could add this at the top of your `style.css`:
```css
@import url("http://fonts.googleapis.com/css?family=Open+Sans:400,300,700|Poppins:300,400,500,700");
```
Then you can apply it in the rest of your CSS:
```css
body {
font-family: 'Open Sans', sans-serif;
font-weight: 300; /* if you want the light version */
}
```
**Feel free to choose other fonts on Google fonts and be creative 😎🌈!**
## Finished?
Once you've finished you can push this exercise and copy the content to the next exercise folder with this command:
```bash
# Push to gihtub
git add .
git commit -m "Added fonts & colors to my profile page"
git push origin master
# Copy folder into next exercise folder
cp -r profile ../03-Finishing-profile-design
```
<file_sep>/02-OOP/05-Food-Delivery-Day-One/Optional-01-Medium-pocket-part-two/README.md
## Background and Objectives
You managed your way through the first part of the Food Delivery challenge, kudoz!
In this challenge, we are going to extend the Medium Pocket challenge (Cookbook Day One's optional challenge), with an `Author` model. We are going to model the following relation between `Post` and `Author`:

We want to extend the user actions to:
```
1. List posts
2. Save post for later
3. Read post
4. Mark post as read
5. List authors # NEW
6. List author's posts # NEW
7. See author info # NEW
8. Exit
```
As you probably imagine, the info about authors will be scraped when a post is saved. Let's go ahead and think over our architecture knowing that.
## Specs
Start by navigating to Cookbook day one's Medium Pocket optional challenge and download the solution. This will be your starting point for coding this extended version.
### Models
First of all, we need to code an `Author` model with instance variables we can deduce from the schema.
**Author-posts relationship**
The relation we want to model between posts and authors is the following:
```
An author can write several posts
A post is written by an author
```
This means that in the code:
- We need to add a `@posts` instance variable in `Author` (and expose it to reading)
- The `@posts` is an array of **`Post` instances**
- We need to update the `@author` instance variable in `Post` to no longer store the name, but **the `Author` instance instead** (and expose it to reading)
- We need to add `@id`s in both models to persist the relationship in our csv files
- The relationship is carried by the children (the posts here), meaning we need an `author_id` column in our `posts.csv`
To associate a post to its author, consider coding the following:
```ruby
# lib/models/author
class Author
# [...]
def add_post(post)
@posts << post
post.author = self # <-- what do you need in your Post class to write this?
end
end
```
Test your relationship in `irb`, fix bugs and move on to the repositories.
### Repositories
The `Repository` should become a `PostRepository` and we need to add the `id` and `author_id` in the csv mechanism.
**As a reminder, it's the repository's role to give ids to the instances whenever it adds them.**
Consider renaming `find(index)` into `find_by_index(index)`. We'll keep the `find` denomination for the method that takes an `id` as a parameter.
We need to code an `AuthorRepository` and link it to an `authors.csv` file. Nothing exotic in this repo for now.
In the `PostRepository`, we need to update the `load_csv` method now that a post is related to an author object.
On a given post, when we read the `author_id` from the csv, we need to find the corresponding author instance to associate it to our post object. This means:
- The `PostRepository` must be instantiated after the `AuthorRepository`
- The `PostRepository` should have access to an `@author_repo` instance
- The `AuthorRepository` should have a `find(id)` method that returns the right `Author` instance from its `@authors` array
Make sure your repositories work well before moving on to the applicative brick of our software.
### Controllers
Start by renaming `Controller` in `PostsController`. We want to add user stories related to authors and they will be served by an `AuthorsController`.
Before coding any new user stories, try running the existing ones. You'll find a series of bugs to fix due to all the changes we did.
**Fix the save post feature**
The hardest part of the challenge is just around the corner. We said we wanted to scrape details about the post's author when saving it to our Pocket app.
We already know how to scrape a post. We need to update our scraping script to get the author's **medium nickname** on the post's page.
Thanks to the medium nickname, you should be able to open the author's page and scrape the information detailed in the schema.
Once you have all the data needed to create a post, go ahead and instantiate a `Post.new`. Same thing for the `Author.new`. Let's pause a minute before adding them to our repositories.
For simplicity's sake, let's assume our user is smart and won't try and save a post already saved. However, if you save a 2nd post of the same author, **you don't want to duplicate the author in your repo**! Consider the following steps in your `PostsController#create` action:
```ruby
def create
# 1. Ask user for a post path (view)
# 2. Scrape post (?)
# 3. Scrape author (?)
# 4. Find corresponding author (author repo)
# 5. If we don't have it, save it and get it back with its id (author repo)
# 6. Associate post to its author (model)
# 7. Save post (post repo)
end
```
Code everything in the `PostsController` until the action works fine. Then you might consider extracting the scraping part in a service object. A fine candidate would be:
```ruby
# lib/services/medium_scraper.rb
class MediumScraper
def initialize(post_path)
@post_path = post_path
end
def call
# Scrape post
# [...]
post = Post.new(post_attributes)
# Scrape author
# [...]
author = Author.new(author_attributes)
return { post: post, author: author }
end
end
```
Make sure it still works, and move on to the next user story.
**List authors**
Phew, the hardest part is behind us. This one is an easy one. We want to list authors, so let's code an `AuthorsController`.
It will need to access the author repo and an authors view. The view must display authors names and associated unread posts in an indexed list:
```
1. <NAME> (1 unread)
2. <NAME>
```
**List authors posts**
Our work on the models should make this one pretty easy. We can access all posts from an author thanks to the `posts` getter. Write the pseudo-code to breakdown the problem in small steps, test and move on to the last feature.
**See author's details**
As always, start by breaking down the action in small steps in pseudo-code, then translate line by line in ruby and test regularly.
Our user must see the following in the terminal when choosing Boris:
```
1. <NAME> (3 unread)
2. <NAME> (1 unread)
3. Le Wagon Montréal
Index?
> 1
<NAME> (@papillard)
CEO at Le Wagon. Bringing tech skills to creative people and entrepreneurs.
71 following - 753 followers
```
Happy modeling!
<file_sep>/02-OOP/05-Food-Delivery-Day-One/01-Food-Delivery/app.rb
# TODO: require relevant files to bootstrap the app.
# Then you can test your program with:
# ruby app.rb
require_relative 'app/repositories/meal_repository'
meal_repository = MealRepository.new("data/meal_repository.csv")
<file_sep>/04-Front-End/09-WX-MP-Frontend/01-WX-MP-Frontend-OPT-02/README.md
## Background & objectives
The benefit of using WeChat is all its user data... Why not taking advantage of it, allowing our users to **login**?
## Specs
### 1. Get the golden data
Change your **landing page button** to turn it into a Login!
- add an **open-type** of `getUserInfo` and a **bindgetuserinfo** handler.
- You'll need the [documentation](https://developers.weixin.qq.com/miniprogram/en/dev/component/button.html)...
Create the `getUserInfo` function and console.log its event parameter 😉
### 2. Use it!
**Persist** this user data globally, using your App's globalData or the cache storage.
**Improve the user experience!** 🏗️
- Display the avatar of your user somewhere in the app
- Pre-fill the name input, or even hide it?
<file_sep>/02-OOP/02-OO-Advanced/03-Instance-vs-Class/lib/restaurant.rb
class Restaurant
# TODO: add relevant accessors if necessary
def initialize(city, name)
# TODO: implement constructor with relevant instance variables
end
# TODO: implement .filter_by_city and #rate methods
end
<file_sep>/04-Front-End/README.md
The Front-end module has 7 days + 1 project-day to start preparing your final projects.
## `01 HTML & CSS`
You'll cover a lot of HTML & CSS concepts in the next few days, such as:
- HTML skeleton and common tags (in `<head>` and `<body>`)
- CSS simple and advanced selectors
- Web-design resources for fonts & colors (google fonts, fontawesome, etc.)
The objective of this day is to build [your profile page](https://papillard.github.io/my-profile/) and put it online using [Github Pages](https://pages.github.com/).
## `02 CSS Components`
We will go a bit further and make you code your own CSS component library. The objectives are:
- Improve the way you organize and store components
- Re-code your [own UI Kit](https://uikit.lewagon.com)
**Take-away**: Once you've finished, you'll be able to take HTML/CSS code directly from [Le Wagon UI Kit](https://uikit.lewagon.com).
## `03 Bootstrap, Grids & Layouts`
We will first have a look at the CSS framework Bootstrap and everything it gives us then we will explore the different ways to structure your webpages by looking at the Bootstrap grid, flexboxes for simple layouts and CSS grid for more complex ones. The objectives at the end of the day are:
- For rich components, we've now built our own CSS library
- For standard ones, let's not re-invent the wheel! We'll be using built-in Bootstrap classes
- Using the Bootstrap grid for funtional responsive layouts
- Using flexboxes for horizontal or vertical alginment of components
- Using CSS grid to build mosaic of cards
## `04 JavaScript Basics`
First day of the JavaScript submodule! JavaScript is THE programming language that is understood by browsers. It's still a programming language, like Ruby, with types, variables, functions etc. But the syntax in JavaScript is different from Ruby's, so you need to familiarise with it.
For today's challenges, we'll leave the browser and go back to the terminal to play with algorithmics in JavaScript!
Then, instead of the live-code it's Pitch Night! You will have 5 minutes to pitch your idea in front of the class. Have a look at `Front-End/Bootstrap/Pitch-Preparation` if you want a head-start on preparing.
## `05 DOM & Events`
Now that we picked up the syntax, let's go back to the browser and use JavaScript as a frontend language. On this day, we will introduce the DOM – how the browser processes the HTML webpage – and how we can use Javascript to interact with it.
We will also have a look at the different user events we can listen to with JavaScript in order to build highly dynamic interfaces.
## `06 HTTP & AJAX`
Let's rehearse the basics of the Web with HTTP requests / response cycles.
We will discover how to fetch content dynamically using AJAX and JavaScript without having to reload the page.
## `07 JavaScript Plugins`
In this lecture, we will download external JavaScript packages with **[Yarn](https://yarnpkg.com/en/)** and use them in our interfaces. We'll also see how to organise our code in several files in a frontend app.
## `08 Product Design Sprint`
This is a special day. **No coding 😱, just prototyping and Product design 🖌🖍📝**. The objective is to:
- Prototype your project with your team
- Make a demo of your project in front of the class as if you had coded it.
<file_sep>/02-OOP/02-OO-Advanced/02-Dessert-inheritance/spec/bicycle_spec.rb
require "bicycle"
describe "#quiz" do
it "should understand #super behavior" do
expect(quiz).to eq true
end
end
<file_sep>/04-Front-End/01-HTML-and-CSS/05-Push-on-Github-Pages/README.md
## Setup
We don't want to keep your profile within `fullstack-challenges` because we want to version it with `git` as a separate project. So let's copy your profile as an independent folder and jump into it:
```bash
cd ~/code/<user.github_nickname>/fullstack-challenges/04-Front-End/01-HTML-and-CSS/04-Responsive-profile
cp -r profile ~/code/<user.github_nickname>
cd ~/code/<user.github_nickname>/profile
```
## Create the Github repo
Let's install the `hub` gem to create a Github repo from your terminal without opening your browser (very useful when you are lazy 😉)
```bash
gem install hub
```
Now you can **init**(ialize) a git repository, commit your changes, and create the associated Github repo:
```bash
git init
git add .
git commit -m "my profile page"
hub create # this creates the associated repo on Github!
```
To open the Github repo from your browser you can run:
```bash
hub browse
```
## Github Pages
[Github Pages](https://pages.github.com/) is a sub-service of Github that makes it easy to deploy any **static website** in 10 seconds (static == not a Rails app). It is based on a "magic" branch, called `gh-pages`. When Github detects this branch, it puts your website online. Awesome right? Let's create this magic branch and push it. ✨🌿✨
```bash
git co -b gh-pages
git push origin gh-pages # we push the gh-pages branch, not master!
```
Now you can build the URL `http://<user.github_nickname>.github.io/profile/` (this is the URL built automatically by Github) and have a look at your masterpiece online! Share the link on Slack with your buddies.
From now and until the end of the day, you can keep working in your `~/code/<user.github_nickname>/profile` directory AND on the `gh-pages` branch. This means any updates of your profile can be pushed on `http://<user.github_nickname>.github.io/profile/` through usual git commands:
```bash
git add .
git commit -m "make my profile prettier"
git push origin gh-pages
```
## Submit your code to Kitt
As you weren't in `fullstack-challenges` your work won't be submitted on Kitt, if you want to mark this challenge as complete you can do the following:
```bash
cd ~/code/<user.github_nickname>/fullstack-challenges/04-Front-End/01-HTML-and-CSS/05-Push-on-Github-Pages
cp -r ~/code/<user.github_nickname>/profile .
rm -rf profile/.git
git add .
git commit -m "Submitting my work to Kitt"
git push origin master
```
<file_sep>/04-JavaScript-basics/01-Even-or-odd/README.md
## Background & Objectives
Today is your first day of JavaScript. The goal of this first day is for you to realize that it's a programming language, like Ruby. It has variables, functions, conditions, loops, etc.
Today we won't need the browser. Instead, we'll use [Node.js](https://nodejs.org/en/) to execute some JavaScript directly in our terminal.
Make sure that the following command returns a version greater than `10`:
```bash
node -v
```
If not, Node might not be installed on your system. Please have a look at the Setup section in the lecture slides.
## Install Eslint Sublime Linter
Before starting, take the time to install **Eslint Sublime Linter** in Sublime Text:
1. Open Sublime Text
```bash
stt
```
2. In **Sublime Text**, open **Package Control**'s menu:
```bash
# macOS
cmd shift p
# Ubuntu
ctrl shift p
```
3. Type `install package` and select `Package Control: Install Package`
4. Type `SublimeLinter-eslint` and select it
5. Restart Sublime Text
It will highlight instantly your syntax errors / style offenses in Sublime Text. Picking up the JavaScript syntax after Ruby may be tricky, so this should help you **a lot**.
## Specs
Let's start with a very simple algorithm. Open the `lib/even_or_odd.js` file. Implement the `evenOrOdd` function which takes one parameter `number` (of type `Number`) and returns a `String`:
- `"even"` if the number is even (0, 2, 4, etc.)
- `"odd"` if the number is odd (1, 3, 5, etc.)
**⚠️ Warning**: In JavaScript, you need to **explicitly** write the `return` keyword, otherwise [the function will return `undefined`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/return#Syntax)! The only exception to this rule is when you use a one-liner arrow function with [implicit return](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions#Function_body).
_Hint: remember the Ruby modulo operator? It also exists in JavaScript and might be useful!_
Run `rake` to check your style and the correctness of your function!
Once the first exercise is all green (style + tests), **please commit and push** 🙏
## About the testing setup
Open the `Rakefile` in Sublime Text. You will find two tasks:
- `eslint`, a [JavaScript linter](http://eslint.org/), the equivalent of Rubocop in the Ruby world.
- `mocha`, a [JavaScript testing framework](https://mochajs.org), the equivalent of Rspec in the Ruby world.
These two commands are run from the `node_modules` folder. It was created by running `yarn install` in the `04-FrontEnd` folder (`cd ../..`), reading the `package.json` file (open it!).
<file_sep>/04-Front-End/06-HTTP-and-AJAX/03-AJAX-Autocomplete/lib/index.js
// TODO: Autocomplete the input with AJAX calls.
// <li>Result 1 (TODO: remove)</li>
const input = document.getElementById("search");
const results = document.getElementById("results");
const callApi = () => {
fetch(`https://wagon-dictionary.herokuapp.com/autocomplete/${input.value}`)
.then(response => response.json())
.then((data) => {
data.words.forEach((word) => {
let inpTry = "";
inpTry = `<li>${word}</li>`;
results.insertAdjacentHTML("beforeend", inpTry);
});
});
};
const displaySuggestions = (event) => {
results.innerHTML = "";
callApi();
};
input.addEventListener("keyup", displaySuggestions);
<file_sep>/02-OOP/02-OO-Advanced/02-Dessert-inheritance/lib/dessert.rb
class Dessert
def initialize(name, calories)
end
def healthy?
end
def delicious?
end
end
class JellyBean < Dessert
def initialize(name, calories, flavor)
end
end
<file_sep>/04-Front-End/02-CSS-components/06-Take-away/README.md
## Your CSS future is bright!
Congratulations for today, it was a very important day in your CSS career.
From now on:
- You can use the [Le Wagon UI Kit](https://uikit.lewagon.com/) without recoding simple components every time 😊
- Remember, always **take time to think about your HTML stucture** (wrapper, container, inside components, etc.). It's **90% of the work**. Once you've done that, you just need to re-use components and make some small CSS adjustments.
## [Optional] Enhanced profile on Github Pages
For those who have time, let's improve your online profile page (the one you pushed on Github Pages yesterday). You can now add some awesome components to your page to make it look slick 😍. If you're looking for personal website design inspiration have a look at [One Page Love](https://onepagelove.com/) or [Dribbble](https://dribbble.com/search?q=personal)
## [Optional] Flexbox Froggy
For those who want to practice their flexbox skills, here's a cool [froggy game](http://flexboxfroggy.com/). We love it, because we are French 🐸
<file_sep>/04-Front-End/09-WX-MP-Frontend/01-WX-MP-Frontend-03/README.md
## Background & objectives
The goal of this challenge is to practice advanced WXML attributes to make dynamic views.
## Specs
We want to show more than one story in the **stories page** without repeating the same WXML markup.
### 1. Start from the data!
- In `stories.js`, locate the data key at the top. Add a `stories` array inside.
```js
//stories.js
data: {
stories: []
}
```
- Each story will be a **new object** stored inside the `stories` array.
- Each story will have a **content** and **name**.
```js
{ content: "Building a mini program is fun!!! FMC.", name: "Yinghui" },
```
You want to create 2-5 stories. After saving this local data succesfully, you can preview it in the **AppData** tab of the console ("debugger"). Anything stored here is available in our WXML template!
### 2. Render a list
We can now render our list of stories using the [`WX:FOR`](https://developers.weixin.qq.com/miniprogram/en/dev/framework/view/wxml/list.html) control attribute.
In your **stories.wxml** page:
- Wrap your card component around a `<view>` or `<block>` element (they are equivalent, just containers)
- Use the `wx:for` control attribute to repeat this container element for each item of your stories array
- Use `wx:for-item` to specify the variable name of the current element of the array
- Use some `{{mustache}}` syntax inside your card, to display the **content** and **name** keys of your stories!
### 3. With conditions
What if we had **no stories to show**? An empty page wouldn't be so nice! 😱
We can use the [`WX:IF`](https://developers.weixin.qq.com/miniprogram/en/dev/framework/view/wxml/conditional.html) control attribute to anticipate this case.
- Make a card with a welcome message: "*There's nothing here yet!*"
- Render this card IF if the ``stories`` array is empty!
👉 Tip: an empty array has a length equal to 0
<file_sep>/04-Front-End/09-WX-MP-Frontend/01-WX-MP-Frontend-01/README.md
## Your take away
This day is split into **four challenges**. By completing all of the challenges you should be able to ship your first WeChat Mini Program today: _"F*** My Code"._
Have you heard of [Fmylife.com](https://fmylife.com)? Visitors share personal stories of some funny sh*t they have been through. Every message ends with "FML". We're going to make a copy cat, but about your new life as programmers 😆
After each demonstration by your lead teacher, you will write your own solution to the challenge!
**Morning challenges**
1. Create a Landing Page to greet users
2. Create a Stories Page to display our FMC stories
3. Improve the Stories page with advanced WXML
**Afternoon challenges**
4. Create a Post Page and use global data
5. Optional: Using the cache
6. Optional: Login users
---
### Challenge 1: Create a Landing Page to greet users
## Background & objectives
The goal of this first challenge is to understand the basic structure of an app, and in particular the view layer written in WXML/WXSS.
## Specs
### 1. Setup your app
Create a new WeChat Mini Program and name it FMC. Using its setup file `app.json`, customize the **navigation bar** with a better name and style.
### 2. Use the Pages array
Use the setup file `app.json` to add a new route inside the array `pages:[]`:
```
"pages/landing/landing"
```
⚠️ Warning: don't forget to separate each route by a comma! If you break the array `pages:[]`, your app is down...
When you save this file, WeChat's IDE will generate a new page folder for you. All files inside are a boilerplate for you to use! 👏
### 3. Build a quick landing page
This window will greet your users and explain the concept behind FMC app. You want to make a great first impression!
Save time by transforming the [Banner Component from Le Wagon UI](https://uikit.lewagon.com/documentation#banner) into a quick landing page.
**Tip**: WeChat Mini Programs CAN’T load a CSS `background-image` with a `local file` (inside your directory)...
It’s a restriction of the framework, to *keep your app lightweight.* Instead, you must load a `remote file` 🌏, and it must come through HTTPs!
Let's use some `inline CSS` through the style="" attribute:
```
<view class="banner" style="height: 100vh; background-image: linear-gradient(rgba(0,0,0,0.4),rgba(0,0,0,0.4)), url(https://www.lewagon.com/api/v1/cities/shenzhen/cover?width=1200);">
</view>
```
<file_sep>/02-OOP/01-OO-Basics/04-Public-Interface/spec/bank_account_spec.rb
require "bank_account"
describe BankAccount do
let(:account) { BankAccount.new("<NAME>", "FR14-2004-1010-0505-0001-3M02-606", 200, "yoko") }
describe 'Getters' do
it 'has a way to access the account name' do
expect(account.name).to eq '<NAME>'
end
it 'has a way to access the account balance' do
expect(account.balance).to be_a Integer
end
end
describe '#add_transaction (private)' do
it 'add transactions to your account' do
account.send(:add_transaction, 200)
expect(account.instance_variable_get(:@transactions).length).to eq 2
end
it 'update its balance when a new transaction is made' do
account.send(:add_transaction, -100)
expect(account.balance).to eq 100
end
end
describe '#to_s' do
it 'returns a string' do
expect(account.to_s).to be_a String
end
it 'returns data about the account' do
expect(account.to_s).to match /#{account.name}/
end
it 'returns hidden iban' do
expect(account.to_s).to match /FR14\*+606/
end
end
describe '#withdraw' do
it 'changes the account balance' do
account.withdraw(50)
expect(account.balance).to eq 150
end
it 'returns a message containing the withdrawn amount' do
expect(account.withdraw(150)).to match /150/
end
end
describe '#deposit' do
it 'changes the account balance' do
account.deposit(200)
expect(account.balance).to eq 400
end
it 'returns a message containing the deposit amount' do
expect(account.deposit(150)).to match /150/
end
end
describe '#transactions_history' do
it 'verifies the given password' do
expect(account.transactions_history(password: '<PASSWORD>i')).to eq 'wrong password'
end
it 'is not happy if you do not provide a password' do
expect(account.transactions_history()).to eq 'no password given'
end
it 'displays all your transactions if given the correct password' do
account.instance_variable_set(:@transactions, [300])
expect(account.transactions_history(password: '<PASSWORD>')).to match /300/
end
end
end
<file_sep>/02-OOP/02-OO-Advanced/02-Dessert-inheritance/README.md
## Background & Objectives
So why do we need inheritance? Because we don't want to have the same logic repeated in multiple places in our code! If multiple classes share some of the same behaviour then it may be time to introduce inheritance.
e.g. You want to code a generic `Parser` with the basic features (read a file, store its content, etc.). After a while, you decide you want to create more specific parsers like `XmlParser` or a `JsonParser` to handle specific formats. By making these new classes **children** of the `Parser` class, it means you don't have to re-write all the basic methods created in Parser, and you only need to create the methods that are **specific** to your Xml or Json needs. So inheritance keeps things DRY!
Read more about inheritance in [ruby learning](http://rubylearning.com/satishtalim/ruby_inheritance.html).
## Specs
#### Dessert inheritance
Complete the class `Dessert`
- Add getters and setters for `name` and `calories`
- Instance methods `Dessert#healthy?` should return `true` if a dessert has less than 200 calories
- `Dessert#delicious?` should return `true` for all desserts 😊
Complete `JellyBean` which inherits from `Dessert`
- Add a getter for `flavor`
- Modify `delicious?` to return false if the flavor is `"black licorice"` (but still true for everything else).
#### Super bicycle
- In `bicycle.rb`, replace all the `"?"` in the `#quiz` method by the correct integer.
#### (Optional)
- Do you know what the `super` keyword does? If you do, use it to rewrite your `JellyBean#initialize`, in `dessert.rb`, using the `super` keyword.
## Key learning points
- Why do we make classes inherit from others? What's the point?
- What's the keyword to extend the behavior of an inherited method?
- Let's assume we have `class Bike < Vehicle` and we defined `Vehicle#drive`. If we implement `Bike#drive` which method will apply to `Bike` objects, `Vehicle#drive` or `Bike#drive`?
- Can you find any examples of object inheritance in Rails?
- Digression: About `nil?` and inheritance. In the ruby doc, look for the ruby implementation of the `nil?` method in the `NilClass`, and in the `Object` class which is the superclass of all ruby objects. Now try to figure out what happens exactly when you call `an_example_object.nil?`. If you find out the answer with so few clues, we'll buy you a beer 😊
<file_sep>/02-OOP/02-OO-Advanced/01-Dog-inheritance/lib/dog.rb
class Dog
def bark
puts "woof woof"
end
end
class GermanShepherd
end
<file_sep>/02-OOP/04-Cookbook-Day-Two/01-Cookbook-Advanced/README.md
⚠️ There's **no `rake`** for this exercise. Sorry 😉
So now we want to enhance our cookbook by finding recipes online. We will use
[🇫🇷 Marmiton](http://www.marmiton.org) or [🇬🇧 LetsCookFrench](http://www.letscookfrench.com/recipes/find-recipe.aspx), because their markup structure is pretty clean (making them good candidates for parsing). If you want to choose another recipe website, please go ahead! It just needs to have a **search** feature where the search keywords are passed in the [query string](https://en.wikipedia.org/wiki/Query_string).
## Setup
First, let's copy paste your Cookbook's code in today's challenge `lib` folder:
```bash
# make sure you're in the right directory
cd ~/code/<user.github_nickname>/fullstack-challenges/02-OOP/04-Cookbook-Day-Two/01-Cookbook-Advanced
# copy your code from Cookbook Day 1
cp -r ../../03-Cookbook-Day-One/02-Cookbook/lib .
```
You can also take the solution from the livecode as a starting point for today (ask your teacher to share it in Slack).
Before starting, run your pasted cookbook to make sure that day one's user actions (list / create / destroy) are working!
```bash
ruby lib/app.rb
```
## 1 - Import recipes from the web
You can scrape from any recipe website that you know, but good ones are [LetsCookFrench](http://www.letscookfrench.com/recipes/find-recipe.aspx) and [Marmiton](http://www.marmiton.org/) for the french speakers. Here's how this feature should work:
```
-- My CookBook --
What do you want to do?
1. List all recipes
2. Add a recipe
3. Delete a recipe
4. Import recipes from LetsCookFrench
5. Exit
> 4
What ingredient would you like a recipe for?
> strawberry
Looking for "strawberry" on LetsCookFrench...
1. Strawberry shortcake
2. Strawberry slushie
3. Strawberry martini
[...] display only the first 5 results
Which recipe would you like to import? (enter index)
> 2
Importing "Strawberry slushie"...
```
### Pseudo-code
For this new **user action** (hence new _route_), we need to:
1. Ask a user for a keyword to search
2. Make an HTTP request to the recipe's website with our keyword
3. Parse the HTML document to extract the first 5 recipes suggested and store them in an Array
4. Display them in an indexed list
5. Ask the user which recipe they want to import (ask for an index)
6. Add it to the `Cookbook`
### Analyze the page markup
First, let's have a look at how we'll retrieve information from the Web.
You can download an HTML document on your computer with the `curl` command. Get the following HTML page saved as a `.html` file in your working directory by running one of these two commands in the terminal:
```bash
curl --silent 'https://www.marmiton.org/recettes/recherche.aspx?aqt=fraise' > fraise.html
curl --silent 'http://www.letscookfrench.com/recipes/find-recipe.aspx?aqt=strawberry' > strawberry.html
```
👆 **This step is really important**!
The reason why we keep the page on our hard drive is that we need to run Ruby scripts over it hundreds of times to test our code. It's much faster to open the file on disk rather than making a network call to Marmiton/LetsCookFrench every time (that would probably also get us blacklisted).
### Parsing with Nokogiri
Nokogiri is a cool and famous gem used to parse HTML documents (it does other stuff too!) Here is how you can use it to parse a document once you know the CSS selectors of the elements you are interested in. CSS selectors will be explained later, but the gist of it is that you can select all elements with a given `class` attribute by creating the query `.class`.
For instance, if you want to find all elements with the `student` class in the following HTML, you will use the query `".student"`
```html
<ul>
<li class="student">John</li>
<li>Paul</li>
<li class="student">Ringo</li>
</ul>
```
You can use the following boilerplate code to start:
```ruby
require 'nokogiri'
file = 'fraise.html' # or 'strawberry.html'
doc = Nokogiri::HTML(File.open(file), nil, 'utf-8')
# Up to you to find the relevant CSS query.
```
You can work in a temporary file -- `parsing.rb` for instance -- to find the right selectors and the ruby code to get all the data you want to extract from the HTML. You can start by just displaying the information extracted with `puts`. Once you found all the selectors you need, go on and code the action in your cookbook.
**Resource**: here's a [good starting point for Nokogiri](https://www.sitepoint.com/nokogiri-fundamentals-extract-html-web/).
### Get response HTML data using `open-uri`
Time to use your parsing code on a live URL with different queries (not just `[fraise|strawberry]`). Use the [open-uri](http://www.ruby-doc.org/stdlib/libdoc/open-uri/rdoc/OpenURI.html) library to get the HTML response from a given URI:
```ruby
require 'nokogiri'
require 'open-uri'
url = "http://the_url_here"
doc = Nokogiri::HTML(open(url), nil, 'utf-8')
# Rest of the code
```
### `Controller` / `View` / `Router`
Once you have this parsing logic, time to add this new user action in your `Controller`. Use the pseudo-code above as a guide of this new method. For your first attempt, you can copy-paste the working parsing code into your controller.
Think about the **class** that should be used to hold information parsed from the web, what is it?
Try it live running your Cookbook!
## 2 - Add a `@prep_time` property to `Recipe`
This new property should be:
- Stored in the CSV
- Parsed from the web when importing a recipe
- Printed when listing the recipes
## 3 - (User Action) Mark a recipe as done
Once you're done with the "Search", try to add a feature to mark a recipe as done:
```
-- Here are all your recipes --
1. [X] Crumpets (15 min)
2. [ ] Beans & Bacon breakfast (45 min)
3. [X] Plum pudding (90 min)
4. [X] Apple pie (60 min)
5. [ ] Christmas crumble (30 min)
```
## 4 - Add a `@difficulty` property to `Recipe`
Again, this new property should be stored in the csv file and displayed when listing recipes.
Try modifying the web-import feature so that you can import recipes with a given difficulty (you might want to make this argument optional keeping the old import feature possible).
## 5 - (Optional) Service
Try to extract the **parsing** logic out of the controller and put it into a [**Service Object**](http://brewhouse.io/blog/2014/04/30/gourmet-service-objects.html):
```ruby
class ScrapeLetsCookFrenchService # or ScrapeMarmitonService
def initialize(keyword)
@keyword = keyword
end
def call
# TODO: return a list of `Recipes` built from scraping the web.
end
end
```
<file_sep>/04-Front-End/07-JavaScript-Plugins/Optional-01-AJAX-wagon-race/README.md
## Objectives & Background
Let's take our Wagon Race and make it a little bit more interesting by using an API to register the games we play and store the results.
**Before anything else:**
Go to the exercise folder
```bash
bundle install
```
**For the API:**
* You can read the [API documentation](https://github.com/lewagon/fullstack-challenges/blob/master/04-Front-End/07-JavaScript-Plugins/Optional-01-AJAX-wagon-race/API.md)
* Start the API by calling the `bundle exec rake api` command in the exercise forlder
* The API will be accessible on http://localhost:4567
NB: If you have trouble launching the api, try to run `bundle exec rake db_reset`.
**For YOUR javascript code:**
* Use the files located in the `public` folder
* Put your JS code in `public/js/game.js`
* Put your CSS in `public/css/main.css`
* Put your HTML in `public/index.html`
**Access the game**
Once you've started the API with the `bundle exec rake api` command, the game will be accessible on http://localhost:4567/index.html.
## Specs
**Start by reading the API documentation!**
Your application will work like this:
1. When the page loads, ask the API to create a new Game Session
2. Once the Game Session is created, your page will display a button `Start New Game!`
3. When clicked, the button disappears and is replaced by a form where you can type Player1 and Player2 names
4. On form submission you will ask the API to create a new Game based on the players' names
5. When you get a response, hide the form, display the board (what you did in the last exercise) and listen for users' input
6. When the game ends, send a request to the API to _finish_ the game, provide the winner and show the time taken to win (in seconds)
7. When you get the API response, display the information on a score board and provide a "Play Again!" button
<file_sep>/04-JavaScript-basics/Optional-01-Age-In-Days/lib/age_in_days.js
const ageInDays = (day, month, year) => {
// TODO: Implement the method and return an integer
const ageDays = Math.round(((Date.now() - new Date(year, (month - 1), day)) / 86400000));
console.log(ageDays);
return ageDays;
};
module.exports = ageInDays; // Do not remove. We need this for the spec to know about your method.
<file_sep>/04-Front-End/04-JavaScript-basics/02-Sum-of-negative/lib/sum_of_negative.js
const sumOfNegative = (numbers) => {
// TODO: You are getting a `numbers` array. Return the sum of **negative** numbers only.
// iterate over an array
// assign and Increment a variable sum
let sumOfNumber = 0;
numbers.forEach((number) => {
if (number < 0) {
sumOfNumber += number;
}
});
return sumOfNumber;
};
console.log(sumOfNegative([-4, 5, -2, 9]));
module.exports = sumOfNegative; // Do not remove.
<file_sep>/04-Front-End/05-DOM-and-Events/04-Wagon-Race/lib/wagon_race.js
// TODO: write your code here
const place = document.querySelectorAll('#player1_race td');
let i = 1;
let j = 1;
document.addEventListener("keyup", (event) => {
while (i < 10 && j < 10) {
if (event.code === "ArrowRight") {
console.log(event);
i += 1;
const inactive = document.querySelector(`#player2_race td.num${i - 1}`);
inactive.classList.remove('active');
const active = document.querySelector(`#player2_race td.num${i}`);
active.classList.add('active');
} else if (event.code === "KeyA") {
console.log(event);
j += 1;
const inactive = document.querySelector(`#player1_race td.num${j - 1}`);
inactive.classList.remove('active');
const active = document.querySelector(`#player1_race td.num${j}`);
active.classList.add('active');
}
}
if (i === 10) {
alert(`Player 1 wins! Play again?`);
window.location.reload();
} else {
alert(`Player 2 wins! Play again?`);
window.location.reload();
}
});
<file_sep>/04-Front-End/08-Product-Design-Sprint/01-Schedule/README.md
## Guidelines
### Setup
Before you start:
- Sign up to [Whimsical](https://whimsical.co)
- Sign up to [Figma](https://www.figma.com/) and install the desktop app from [this page](https://www.figma.com/downloads/)
### Morning
- **9:00am to 10:30am**: Lecture
- **10:30am to 11:15am** (45 min) 👉 define your product pitch and primary persona
- **11:15am - 12:00am** (45 min) 👉 write down your core user journey and translate it into a user flow with [Whimsical](https://whimsical.co)
- **12:00am - 1pm** (60 min) 👉 build your UI kit and start working on your wireframe
For your product pitch, persona and user journey, make a copy of our <a href="https://docs.google.com/spreadsheets/d/1OGCJLizpcROt0WwbGV_bCOxBktHO9XPdNHkXcZIyX1o/edit?usp=sharing" target="_blank">UX Research spreadsheet</a> and complete the different templates.
### Afternoon
- **2pm to 2:30pm**: Learn to integrate a visual identity to your wireframe (see video tutorial below 👇)
- **2:30pm to 5:30pm**: finish your prototype and start adding the visual identity **only if you have time**. It's more strategic to have a kickass prototype in black and white rather than a poor mockup with lots of pictures and colors but that no one understands 😬.
### Resources for visual identity
#### Figma Tutorial
Tutorial in 🇬🇧
<iframe src="https://player.vimeo.com/video/315676081" width="640" height="360" frameborder="0" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
Tutorial in 🇫🇷
<iframe src="https://player.vimeo.com/video/301271712" width="640" height="360" frameborder="0" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
#### Design resources
- **Fonts**: download and install `.ttf` fonts from [Google fonts](https://fonts.google.com/) or [getthefont](https://www.getthefont.com/)
- **Colors**: find them on [Color Hunt](https://colorhunt.co/) or use a color picker like [Colorzilla](http://www.colorzilla.com/)
- **Pictures**: you can use [unsplash](https://unsplash.com/)
- **Icons**: download and use [Nucleo app](https://nucleoapp.com/) for utility icons
### Demo
When you are done, copy / paste your prototype URL on your product's page in Kitt. To get the prototype URL, first play your prototype and then click on the `Share Prototype` button like below:
<figure style="width: 100%">
<img alt="Screen Shot 2018-10-28 at 13.36.39.png" src="https://wagon-rc3.s3.eu-west-1.amazonaws.com/p23uzJ9h6DUHZzG1E1dNyAWA">
</figure>
At 5:30pm, you will pitch your product by showcasing your prototype.
Good luck 🚀🚀🚀!
<file_sep>/04-Front-End/04-JavaScript-basics/03-Valid-email/lib/valid_email.js
const valid = (email) => {
// TODO: return true if the `email` string has the right pattern!
const string = email;
if (string.match(/([\w]+.[\w]+|[\w]+)@[\w]+\.[a-z]{2,3}/)) {
return true;
} return false;
};
module.exports = valid; // Do not remove.
<file_sep>/02-OOP/01-OO-Basics/04-Public-Interface/bonus_formatter.rb
RSpec::Support.require_rspec_core 'formatters/base_text_formatter'
class BonusFormatter < RSpec::Core::Formatters::DocumentationFormatter
RSpec::Core::Formatters.register self, :dump_summary
def dump_summary(notification)
output.puts BONUS if notification.failure_count.zero?
super notification
end
end
BONUS = "
___________________________________
|#######====================#######|
|#(1)*UNITED STATES OF AMERICA*(1)#|
|#** /===\\ ******** **#|
|*# {G} | (\") | #*|
|#* ****** | /v\\ | O N E *#|
|#(1) \\===/ (1)#|
|##=========ONE DOLLAR===========##|
------------------------------------
"
|
ae27150ffcffd77635c46a84abf8677d53bcbea3
|
[
"Markdown",
"JavaScript",
"Ruby"
] | 46 |
Markdown
|
Jeremie1707/sandbox
|
005f95d7c357c62756a3c70aafc05851abc7da4a
|
ac08cb0a96e0043102f4d45aaa7e5af536de198c
|
refs/heads/master
|
<repo_name>huvii174/project-2<file_sep>/project2/src/ProjectManager.java
import UserData.*;
import UserData.Task.STATUS;
import java.sql.SQLOutput;
import java.util.ArrayList;
import java.io.File;
import java.util.Objects;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.Scanner;
public class ProjectManager {
private ArrayList<User> userList;
private String userFile="user.txt";
private final String projectFile="project.txt";
private final String taskFile = "task.txt";
private ArrayList<Project> listProject;
private ArrayList<Task> listTask;
private User user;
private Scanner scn;
public ProjectManager() {
this.userList = new ArrayList<>();
this.listProject = new ArrayList<>();
this.listTask = new ArrayList<>();
scn = new Scanner(System.in);
}
public void run() {
System.out.println("Loading...");
try {
loadUser();
loadProject();
loadTask();
} catch (IOException e) {
System.out.println("Can't load. Something wrong with your file.");
return;
}
System.out.println("Loading complete.");
if (!login()) return;
while (true) {
System.out.println("Choose: ");
System.out.println("1. Project list");
System.out.println("2. Task joined");
System.out.println("3. User list");
System.out.println("4. Employee list");
System.out.println("5. Close");
boolean isBreak = false;
int choose;
try {
choose = Integer.parseInt(scn.nextLine());
} catch (Exception e) {
continue;
}
if (choose > 5 || choose < 1) {
System.out.println("Invalid.");
continue;
}
switch (choose) {
case 1: projectListDisplay(); break;
case 2: taskJoinedDisplay(); break;
case 3: userListDisplay(); break;
case 4: employeeListDisplay(); break;
case 5:
System.out.println("closing...");
isBreak = true;
break;
}
if (isBreak) break;
}
}
private boolean login() {
return true;
}
private void projectListDisplay() {
if (this.user.getType() == User.TYPE.CEO) showAllProjectAndTaskCount();
else showProjectAndTaskCountFromId();
while (true) {
System.out.println("1. Create new project (CEO only)");
System.out.println("2. Show task");
System.out.println("3. CSV project info");
System.out.println("4. Cancel");
boolean isBreak = false;
int choose;
try {
choose = Integer.parseInt(scn.nextLine());
} catch (Exception e) {
continue;
}
if (choose > 4 || choose < 1) {
System.out.println("Invalid.");
continue;
}
switch (choose) {
case 1: createNewProject(); break;
case 2: showTaskInProject(); break;
case 3: printProjectToCSV(); break;
case 4: isBreak = true; break;
}
if (isBreak) break;
}
}
private void showAllProjectAndTaskCount() {
System.out.println("Project: ");
for (int i = 0;i< this.listProject.size();i++)
{
this.listProject.get(i).toString();
System.out.println();
}
int avl =0;
System.out.println("Task available: ");
for (int i = 0;i< this.listTask.size();i++)
{
if(this.listTask.get(i).status != STATUS.CLOSED )
{
avl++;
}
}
int jn = 0;
System.out.println("Task closed: ");
for (int i = 0;i< this.listTask.size();i++)
{
if(this.listTask.get(i).status == STATUS.CLOSED )
{
jn++;
}
}
}
private void showProjectAndTaskCountFromId() {
System.out.println("Project joined: ");
for (int i = 0;i< this.listProject.size();i++)
{
if(this.user.getId() == this.listProject.get(i).mnID )
{
System.out.print(this.listProject.get(i).name + " " + this.listProject.get(i).prjID);
System.out.println();
}
else
{
for(int j = 0;j<this.listProject.get(i).user.size();j++)
{
if (this.user.getId() == this.listProject.get(i).user.get(j).getId() )
{
System.out.print(this.listProject.get(i).name + " " + this.listProject.get(i).prjID);
System.out.println();
}
}
}
}
System.out.println("Task joined: ");
int count = 0;
for (int i = 0;i< this.listTask.size();i++)
{
for(int j = 0;j<this.listTask.get(i).developer.size();j++)
{
if (this.user.getId() == this.listTask.get(i).developer.get(j).getId() )
{
count ++;
}
}
}
for (int i = 0;i< this.listTask.size();i++)
{
for(int j = 0;j<this.listTask.get(i).tester.size();j++)
{
if (this.user.getId() == this.listTask.get(i).tester.get(j).getId() )
{
count ++;
}
}
}
System.out.print(count);
}
private void createNewProject() throws IOException {
System.out.println("Name of project ");
String prjName = scn.nextLine();
System.out.println("ID of project ");
String prjID = scn.nextLine();
System.out.println("Manager of project ");
String mnID = scn.nextLine();
Project newProject = new Project(prjName,prjID,mnID);
BufferedWriter buffer = null;
newProject.saveinFile(buffer);
}
private void showTaskInProject() {
}
private void printProjectToCSV() {
}
private void taskJoinedDisplay() {
if (this.user.getType() == User.TYPE.CEO) {
System.out.println("You have no permision.");
return;
}
}
private void userListDisplay() {
if (this.user.getType() != User.TYPE.CEO) {
System.out.println("Only CEO have permission for this option.");
return;
}
else for(int i = 0; i< this.userList.size();i++)
{
this.userList.get(i).toString();
System.out.println();
}
}
private void employeeListDisplay() {
if (this.user.getType() != User.TYPE.MANAGER || this.user.getType() != User.TYPE.CEO) {
System.out.println("You have no permission");
System.out.println("Only Manager & CEO");
return;
}
else
for(int i = 0; i< this.userList.size();i++)
{
this.userList.get(i).toString();
System.out.println();
}
}
private void loadProject() throws IOException {
FileInputStream fis = new FileInputStream(this.projectFile);
InputStreamReader isr = new InputStreamReader(fis);
BufferedReader bfr = new BufferedReader(isr);
ArrayList<Task> task = new ArrayList<>();
ArrayList<User> emp = new ArrayList<>();
String line = bfr.readLine();
while(line != null){
String prjID = line;
line = bfr.readLine();
String pjName = line;
line = bfr.readLine();
String mnID = line;
line = bfr.readLine();
String tsk = line;
if(tsk.equals("Task: "))
{
int i = 0;
while(line != null && line != "Employees ")
{
String []array = line.split("/");
Date start = new Date();
if (array.length == 3 )
{
start.day = array[0];
start.month = array[1];
start.year = array[2];
}
line = bfr.readLine();
String []arr = line.split("/");
Date end = new Date();
if (array.length == 3 )
{
end.day = array[0];
end.month = array[1];
end.year = array[2];
}
line = bfr.readLine();
String stt = line;
Task.STATUS stt1 = null ;
if(stt.equals("Created")) stt1 = Task.STATUS.CREATED;
if(stt.equals("Assigned")) stt1 = Task.STATUS.ASSIGNED;
if(stt.equals("Started")) stt1 = Task.STATUS.STARTED;
if(stt.equals("Done")) stt1 = Task.STATUS.DONE;
if(stt.equals("Completed")) stt1 = Task.STATUS.COMPLETED;
if(stt.equals("Reject")) stt1 = Task.STATUS.REJECT;
if(stt.equals("Closed")) stt1 = Task.STATUS.CLOSED;
line = bfr.readLine();
String []dev = line.split(",");
ArrayList<Developer> id = new ArrayList<>();
for (int j = 0; j<dev.length;j++)
{
id.get(j).setId(dev[j]);
}
line = bfr.readLine();
String []tst = line.split(",");
ArrayList<Tester> id1 = new ArrayList<>();
for (int j = 0; j<tst.length;j++)
{
id.get(j).setId(dev[j]);
}
Task temp = new Task(prjID,id,id1,start,end,stt1);
task.add(temp);
i++;
line = bfr.readLine();
}
if (line.equals("Employees "))
{
String []us = line.split(",");
if (us.length == 5)
{
User.TYPE tmp = null;
if(us[0].equals("Tester"))
{
tmp = User.TYPE.TESTER;
Tester t = new Tester(us[5],us[1],us[2],us[3]);
emp.add(t);
}
if (us[0].equals("Developer"))
{
tmp = User.TYPE.DEVELOPER;
Developer d = new Developer(us[5],us[1],us[2],us[3]);
emp.add(d);
}
}
}
}
Project ttp = new Project(pjName,prjID,mnID,task,emp);
this.listProject.add(ttp);
line = bfr.readLine();
}
}
private void loadTask() throws IOException{
FileInputStream fis = new FileInputStream(this.taskFile);
InputStreamReader isr = new InputStreamReader(fis);
BufferedReader bfr = new BufferedReader(isr);
this.listTask = new ArrayList<>();
String line = bfr.readLine();
int i = 0;
while(line != null)
{
String prjID = line;
line = bfr.readLine();
String []array = line.split(" ");
Date start = new Date();
if (array.length == 3 )
{
start.hour = array[0];
start.day = array[1];
start.month = array[2];
start.year = array[3];
}
line = bfr.readLine();
String []arr = line.split(" ");
Date end = new Date();
if (array.length == 4 )
{
end.hour = array[0];
end.day = array[1];
end.month = array[2];
end.year = array[3];
}
line = bfr.readLine();
String stt = line;
Task.STATUS stt1 = null ;
if(stt.equals("Created")) stt1 = Task.STATUS.CREATED;
if(stt.equals("Assigned")) stt1 = Task.STATUS.ASSIGNED;
if(stt.equals("Started")) stt1 = Task.STATUS.STARTED;
if(stt.equals("Done")) stt1 = Task.STATUS.DONE;
if(stt.equals("Completed")) stt1 = Task.STATUS.COMPLETED;
if(stt.equals("Reject")) stt1 = Task.STATUS.REJECT;
if(stt.equals("Closed")) stt1 = Task.STATUS.CLOSED;
line = bfr.readLine();
String []dev = line.split(",");
ArrayList<Developer> id = new ArrayList<>();
for (int j = 0; j<dev.length;j++)
{
id.get(j).setId(dev[j]);
}
line = bfr.readLine();
String []tst = line.split(",");
ArrayList<Tester> id1 = new ArrayList<>();
for (int j = 0; j<tst.length;j++)
{
id.get(j).setId(dev[j]);
}
Task temp = new Task(prjID,id,id1,start,end,stt1);
this.listTask.add(temp);
i++;
line = bfr.readLine();
}
}
private void saveUser() throws IOException {
FileWriter writer = new FileWriter(this.userFile);
BufferedWriter buffer = new BufferedWriter(writer);
for(int i= 0;i<this.userList.size();i++)
{
this.userList.get(i).saveInFile(buffer);
}
buffer.close();
}
private void loadUser () throws IOException {
FileInputStream fis = new FileInputStream(this.userFile);
InputStreamReader isr = new InputStreamReader(fis);
BufferedReader bfr = new BufferedReader(isr);
String line = bfr.readLine();
while(line != null){
String []array = line.split(",");
if(array.length == 5)
{
if(array[0].equals("CEO"))
{
CEO ceo = new CEO(array[4],array[1],array[2],array[3]);
this.userList.add(ceo);
}
if(array[0].equals("Developer"))
{
Developer dev = new Developer(array[4],array[1],array[2],array[3]);
this.userList.add(dev);
}
if(array[0].equals("Manager"))
{
Manager mn = new Manager(array[4],array[1],array[2],array[3]);
this.userList.add(mn);
}
if(array[0].equals("Tester"))
{
Tester tst = new Tester(array[4],array[1],array[2],array[3]);
this.userList.add(tst);
}
}
line = bfr.readLine();
}
fis.close();
isr.close();
bfr.close();
}
}
<file_sep>/project-2-master/project2/src/UserData/Tester.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package UserData;
import java.io.File;
/**
*
* @author nghun
*/
public class Tester extends User {
public Tester() {
super();
this.type = TYPE.TESTER;
}
public Tester(String id, String userName, String password, String fullName) {
super(id, userName, password, fullName);
}
}
<file_sep>/project2/src/UserData/User.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package UserData;
import java.io.File;
import java.util.Date;
import java.util.Objects;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.UUID;
/**
*
* @author nghun
*/
public class User {
public enum TYPE {
MANAGER, DEVELOPER, TESTER, CEO
}
protected String userName;
protected String password;
protected String fullName;
public String getId() {
return id;
}
protected String id;
public TYPE type;
public User() {
}
public User(String userName, String password, String fullName) {
this.userName = userName;
this.password = <PASSWORD>;
this.fullName = fullName;
this.id= UUID.randomUUID().toString();
}
public User(String id, String userName, String password, String fullName) {
this.userName = userName;
this.password = <PASSWORD>;
this.fullName = fullName;
this.id = id;
}
public void setPassword(String password) {
this.password = this.MD5(password);
}
public String getUserName() {
return userName;
}
public String getPassword() {
return password;
}
public String getFullName() {
return fullName;
}
public void setId(String id) {
this.id = id;
}
public TYPE getType() {
return type;
}
private String MD5(String md5) {
try {
java.security.MessageDigest md = java.security.MessageDigest.getInstance("MD5");
byte[] array = md.digest(md5.getBytes());
StringBuffer sb = new StringBuffer();
for (int i = 0; i < array.length; ++i) {
sb.append(Integer.toHexString((array[i] & 0xFF) | 0x100).substring(1,3));
}
return sb.toString();
} catch (java.security.NoSuchAlgorithmException e) {
}
return null;
}
public boolean checkPassword(String password) {
if (this.MD5(password).equals(this.password)) return true;
return false;
}
public void saveInFile(BufferedWriter buffer) throws IOException {
if(this.type == TYPE.CEO ) buffer.write("CEO,");
if(this.type == TYPE.DEVELOPER ) buffer.write("Develper,");
if(this.type == TYPE.MANAGER ) buffer.write("Manager,");
if(this.type == TYPE.TESTER ) buffer.write("Tester,");
buffer.write(this.getUserName());
buffer.write(",");
buffer.write(this.getPassword());
buffer.write(",");
buffer.write(this.getFullName());
buffer.write(",");
buffer.write(this.getId());
buffer.newLine();
buffer.close();
}
@Override
public String toString() {
return "User{" + "userName=" + userName + ", fullName=" + fullName + ", id=" + id + '}';
}
}
<file_sep>/project-2-master/project2/src/UserData/Project.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package UserData;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import UserData.Task;
/**
*
* @author nghun
*/
public class Project {
public String name;
public String prjID;
public String mnID;
public ArrayList<Task> task;
public ArrayList<User> user;
public Project(String name, String prjID, String mnID, ArrayList<Task> task, ArrayList<User> user) {
this.name = name;
this.prjID = prjID;
this.mnID = mnID;
this.task = task;
this.user = user;
}
public void saveinFile(BufferedWriter buffer) throws IOException {
buffer.write(this.prjID);
buffer.newLine();
buffer.write(this.name);
buffer.newLine();
buffer.write(this.mnID);
buffer.newLine();
buffer.write("Task: ");
buffer.newLine();
for(int i = 0 ; i< this.task.size();i++)
{
this.task.get(i).saveInFile(buffer);
buffer.newLine();
}
buffer.newLine();
buffer.write("Employees ");
buffer.newLine();
for(int i = 0 ; i< this.user.size();i++)
{
this.user.get(i).saveInFile(buffer);
}
buffer.newLine();
}
@Override
public String toString() {
return "Project{" + "name=" + name + ", prjID=" + prjID + ", mnID=" + mnID + '}';
}
}
|
d6c22e2fc0d8bc45b04613a60c8e32b1b3fb737b
|
[
"Java"
] | 4 |
Java
|
huvii174/project-2
|
df1f2e36ab7095429bf33b052af2992a95ac592f
|
86a623991717dbd531289a5fcfcdcffb05a37689
|
refs/heads/master
|
<repo_name>BykovIlya/web_course_library<file_sep>/public/javascripts/listOfBooks.js
var table = document.getElementById("m_table");
var currentCellIndex = -1;
var flagOnClick = false;
var flagFilter = false;
var realRows = [];
var secondFlagFilter = false;
table.onclick = (e) => {
if (e.target.tagName != 'TH') {
return;
}
if (e.target.cellIndex != currentCellIndex || flagOnClick === true) {
tableSort(e.target.cellIndex, e.target.getAttribute('datatype'),true);
currentCellIndex = e.target.cellIndex;
flagOnClick = false;
} else {
tableSort(e.target.cellIndex, e.target.getAttribute('datatype'),false);
currentCellIndex = e.target.cellIndex;
flagOnClick = true;
}
};
function tableSort(columns, type, flag) {
var mbody = document.getElementById("mbody");
var rows = [].slice.call(mbody.rows);
var comparator;
switch (type) {
case "title_name":
comparator = (rowA, rowB) => {
if (flag)
return rowA.cells[columns].textContent > rowB.cells[columns].textContent;
else return !(rowA.cells[columns].textContent > rowB.cells[columns].textContent)
};
break;
case "title_instock":
comparator = (rowA, rowB) => {
let dateA = (rowA.cells[columns].textContent !== "В наличии") ?
new Date(String(rowA.cells[columns].textContent).replace(/(\d+)-(\d+)-(\d+)/, '$2/$1/$3')) : new Date(null);
let dateB = (rowB.cells[columns].textContent !== "В наличии") ?
new Date(String(rowB.cells[columns].textContent).replace(/(\d+)-(\d+)-(\d+)/, '$2/$1/$3')) : new Date(null);
if (flag) {
return dateA > dateB;
}
else return !(dateA > dateB)
}
break;
default:
comparator = (rowA, rowB) => {
if (flag)
return rowA.cells[columns].innerHTML > rowB.cells[columns].innerHTML;
else return !(rowA.cells[columns].innerHTML > rowB.cells[columns].innerHTML)
};
}
rows.sort(comparator);
table.removeChild(mbody);
for (let i = 0; i < rows.length; i++) {
mbody.appendChild(rows[i]);
}
table.appendChild(mbody);
}
_filter.onclick = () => {
if (flagFilter === false && secondFlagFilter === false) {
realRows = deleteStock();
document.getElementById("_filter").innerHTML="Показать 'Возврат'";
flagFilter = true;
} else if (flagFilter === true && secondFlagFilter === false) {
deleteSecondStock(realRows);
document.getElementById("_filter").innerHTML="Вернуть";
secondFlagFilter = true;
}else {
goBack(realRows);
document.getElementById("_filter").innerHTML="Показать 'В наличии'";
flagFilter = false;
secondFlagFilter = false;
}
}
function deleteStock() {
var mbody = document.getElementById("mbody");
var rows = [].slice.call(mbody.rows);
var deletedRows = [];
for (let i = 0; i < rows.length; i++) {
if (rows[i].cells[3].innerHTML.localeCompare("В наличии") == 0) {
deletedRows.push(rows[i])
}
}
table.removeChild(mbody);
mbody.innerText = "";
for (let i = 0; i < deletedRows.length; i++) {
mbody.appendChild(deletedRows[i]);
}
table.appendChild(mbody);
return rows;
}
function deleteSecondStock(rows) {
var mbody = document.getElementById("mbody");
var deletedRows = [];
for (let i = 0; i < rows.length; i++) {
if (rows[i].cells[3].innerHTML.localeCompare("В наличии") != 0) {
deletedRows.push(rows[i])
}
}
table.removeChild(mbody);
mbody.innerText = "";
for (let i = 0; i < deletedRows.length; i++) {
mbody.appendChild(deletedRows[i]);
}
table.appendChild(mbody);
}
function goBack(rows) {
var mbody = document.getElementById("mbody");
table.removeChild(mbody);
mbody.innerText = "";
for (let i = 0; i < rows.length; i++) {
mbody.appendChild(rows[i]);
}
table.appendChild(mbody);
}
_save.onclick = () => {
addBook();
};
function addBook() {
var name = document.getElementById("newBookName");
var author = document.getElementById("newBookAuthor");
var year = document.getElementById("newBookYear");
if (name.value === "" || author.value === "" || year.value === "") {
alert("Ошибка! Не все поля заполнены");
return;
}
var newBook = {
id: null,
master: null,
name: name.value,
author: author.value,
date: year.value,
in_stock: true,
return_date: null
}
addRequest("get", newBook, cb=>{});
// getRequest("get", )
setTimeout('document.location.href="/listOfBooks";',200)
}
function addRequest(method, body, callback) {
const xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState === 4 && this.status === 200) {
callback(JSON.parse(this.responseText));
}
if(this.readyState===4 && this.status === 400) {
alert(JSON.parse(this.responseText).message);
}
};
//var url = "/addBook?name="+body.name+"&author="+body.author + "&date="+body.date
var url = "/addBook/name="+body.name+"&author="+body.author+"&date="+body.date
//alert(url)
xhttp.open(method, url, true);
// xhttp.setRequestHeader("Content-Type","application/json");
xhttp.send(body);
}<file_sep>/README.md
# web_course
Лабораторные работы по web-программированию. 5 семестр
## Лабораторная работа № 2
## Библиотека
### Установка `npm install`
### Запуск `npm run start`
Ссылка на отчет: [тут](https://github.com/BykovIlya/web_course_library/blob/master/lr2_report.odt)
## Задачи проекта
1) Разработка интерфейса web-приложения.
2) Задание стилей для отображения web-приложения.
3) Создание web-сервера на основе express.
4) Создание шаблонов web-страниц.
5) Настройка маршрутов.
6) Создание json-хранилища.
7) Обработка REST-запросов.
3. Основные теоретические положе
## Основные страницы проекта
### Список книг

### Карточка книги

|
a5e4a6f496afb0f66aa734f4c474c3f4fddb4f7f
|
[
"JavaScript",
"Markdown"
] | 2 |
JavaScript
|
BykovIlya/web_course_library
|
d9339edb6cadc394958173c4aefc21d450f61017
|
1cda87526c82d91f2b8076c41fba3d8f92257c5b
|
refs/heads/master
|
<repo_name>Balessss/EasyAlexaLampController<file_sep>/EasyAlexaLampController.ino
#include <Arduino.h>
#include "fauxmoESP.h"
#ifdef ESP32
#include <WiFi.h>
#else
#include <ESP8266WiFi.h>
#endif
#define WIFI_SSID "" //your wifi ssid
#define WIFI_PASS "" //your wifi password
#define LAMP "" //name of device - the name you will use to call from alexa
#define RELAY_PIN 13 //arduino pin where the relay is connected
fauxmoESP fauxmo;
bool stato = false;
void wifiSetup() {
WiFi.mode(WIFI_STA);
WiFi.begin(WIFI_SSID, WIFI_PASS);
while (WiFi.status() != WL_CONNECTED) {
delay(100);
}
}
void setup() {
Serial.println();
wifiSetup();
pinMode(RELAY_PIN, OUTPUT);
digitalWrite(RELAY_PIN, LOW);
fauxmo.createServer(true);
fauxmo.setPort(80);
fauxmo.enable(true);
fauxmo.addDevice(LAMP);
fauxmo.onSetState([](unsigned char device_id, const char * device_name, bool state, unsigned char value) {
if ( (strcmp(device_name, LAMP) == 0) ) {
stato = true;
}
});
}
void loop() {
fauxmo.handle();
static unsigned long last = millis();
/*if (millis() - last > 5000) {
last = millis();
}*/
if(stato)
{
digitalWrite(RELAY_PIN, HIGH);
delay(250);
digitalWrite(RELAY_PIN, LOW);
stato = false;
}
}
|
403c85432f7d79fd4e05a26b2f8fd6ee260b1941
|
[
"C++"
] | 1 |
C++
|
Balessss/EasyAlexaLampController
|
835bdb755dca2c076bb3075715df75925948e66e
|
297c00df2e6199a4a4ca80d875900c3ed2e592fd
|
refs/heads/main
|
<repo_name>lofidewanto/gwt-overloading<file_sep>/src/main/java/com/github/lofi/client/AppEntryPoint.java
package com.github.lofi.client;
import com.google.gwt.core.client.EntryPoint;
public class AppEntryPoint implements EntryPoint {
@Override
public void onModuleLoad() {
// Do nothing, just to load the Java classes
Calculator calculator = new Calculator();
calculator.setValue(10);
calculator.setValue(10F);
CalculatorNoExport calculatorNoExport = new CalculatorNoExport();
calculatorNoExport.setValue(10);
calculatorNoExport.setValue(10F);
}
}
<file_sep>/README.md
# gwt-overloading
- Overloading Java method only works if you don't export the method / class to JS like this class [CalculatorNoExport.java](https://github.com/lofidewanto/gwt-overloading/blob/main/src/main/java/com/github/lofi/client/CalculatorNoExport.java).
- If you want to export it you cannot use Java overloading method since JS doesn't support method / function overloading like this class [Calculator.java](https://github.com/lofidewanto/gwt-overloading/blob/main/src/main/java/com/github/lofi/client/Calculator.java). Adding @JsIgnore makes this class transpilable but the method won't be available on JS.
- Here is where CalculatorNoExport and Calculator are used: [AppEntryPoint.java](https://github.com/lofidewanto/gwt-overloading/blob/main/src/main/java/com/github/lofi/client/AppEntryPoint.java)
|
b8d39e2f0cbd78e999148598ad8d4a5f3c3e60a7
|
[
"Markdown",
"Java"
] | 2 |
Java
|
lofidewanto/gwt-overloading
|
5e7a70977860d1eb473cfcd58fa01e1e4013242d
|
ceb062b3d0444135986f0fb1600b4bdba4c73a66
|
refs/heads/master
|
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package br.estacio.aula.tdd.aula3;
import br.estacio.aula.tdd.model.Programa;
import org.junit.Test;
import org.junit.Assert;
/**
*
* @author Marcelo
*/
public class TesteCondicional {
@Test
public void testeVerdadeiroVerdadeiro(){
double a=2;
double b=0;
double x=1;
Programa p = new Programa();
double valorEsperado = p.exemplo(a, b, x);
Assert.assertEquals(valorEsperado, 1.5d,1);
}
@Test
public void testeVerdadeiroFalso(){
double a=2;
double b=1;
double x=1;
Programa p = new Programa();
double valorEsperado = p.exemplo(a, b, x);
Assert.assertEquals(valorEsperado, 2,1);
}
}
|
49e31cccaa9f6dcd401aa3e3b511681520b7fcbf
|
[
"Java"
] | 1 |
Java
|
igorprates/TesteDeSoftware-2017-1
|
a1d1145d42a55038f9e626f2935226f9ee84cfee
|
28ec3c7982843888d7539ce4bb4f6b13a7fcf407
|
refs/heads/master
|
<file_sep># hh-quatysvscoringavirus
Repo da equipe Quatys VS CoringaVirus
<file_sep>const express = require('express');
const app = express();
const helloWorld = require('./helloWorld')
app.use('/teste', helloWorld);
module.exports = app;<file_sep>-- MySQL Workbench Synchronization
-- Generated: 2020-05-23 18:36
-- Model: New Model
-- Version: 1.0
-- Project: Name of the project
-- Author: okita
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES';
ALTER TABLE `simoo`.`product`
ADD COLUMN `sugested_price` FLOAT(11) NOT NULL AFTER `delete_date`,
ADD COLUMN `minimum_price` FLOAT(11) NULL DEFAULT NULL AFTER `sugested_price`;
ALTER TABLE `simoo`.`consumer_buy_product`
ADD COLUMN `price` FLOAT(11) NOT NULL AFTER `date`;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
<file_sep>-- MySQL Workbench Synchronization
-- Generated: 2020-05-23 18:15
-- Model: New Model
-- Version: 1.0
-- Project: Name of the project
-- Author: okita
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES';
CREATE TABLE IF NOT EXISTS `simoo`.`product` (
`idproduct` INT(11) NOT NULL AUTO_INCREMENT,
`name` VARCHAR(45) NOT NULL,
`merchant_idmerchant` INT(11) NOT NULL,
`register_date` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
`delete_date` TIMESTAMP NULL DEFAULT NULL,
PRIMARY KEY (`idproduct`),
INDEX `fk_product_merchant_idx` (`merchant_idmerchant` ASC),
CONSTRAINT `fk_product_merchant`
FOREIGN KEY (`merchant_idmerchant`)
REFERENCES `simoo`.`merchant` (`idmerchant`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB
DEFAULT CHARACTER SET = utf8;
CREATE TABLE IF NOT EXISTS `simoo`.`consumer_buy_product` (
`consumer_idconsumer` INT(11) NOT NULL,
`product_idproduct` INT(11) NOT NULL,
`date` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`consumer_idconsumer`, `product_idproduct`),
INDEX `fk_consumer_has_product_product1_idx` (`product_idproduct` ASC),
INDEX `fk_consumer_has_product_consumer1_idx` (`consumer_idconsumer` ASC),
CONSTRAINT `fk_consumer_has_product_consumer1`
FOREIGN KEY (`consumer_idconsumer`)
REFERENCES `simoo`.`consumer` (`idconsumer`)
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `fk_consumer_has_product_product1`
FOREIGN KEY (`product_idproduct`)
REFERENCES `simoo`.`product` (`idproduct`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB
DEFAULT CHARACTER SET = utf8;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
<file_sep>import React, { Component } from "react" //always import
import './Signup.css';
import Logo from '../images/logo.png'
import {Button} from '@material-ui/core';
//Crete component
class Signup extends Component {
//COntructor, you must put in all cases
constructor(props){
super(props);
//put this just if you will need to use (I am using to send to api)
this.state = {name: '', email: ''};
//bind the functions that you will use
this.handleChangeName = this.handleChangeName.bind(this);
this.handleChangeEmail = this.handleChangeEmail.bind(this);
this.submitInfo = this.submitInfo.bind(this);
};
//handle on change name
handleChangeName(event) {
console.log(event.target.value);
this.setState({name: event.target.value});
}
//handle on chamge email
handleChangeEmail(event) {
this.setState({email: event.target.value});
}
//api call
async submitInfo(name, email) {
//try catch to get errors
try {
await fetch( 'http://34.95.183.232/hack', {
method: 'POST',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({
name: name,
email: email,
})
}).then(response => response.json())
} catch {
console.log("ERROR")
}
}
render() {
return (
<div class="container">
<div class="logoDiv">
<div class="imgDiv">
<img src = {Logo}/>
</div>
</div>
<div class="signupDiv">
<div class="signupContent">
<h2>Crie sua conta com a gente!</h2>
<form>
<ul>
<li>
<p>Nome: </p>
<label>
<input type="text" value={this.state.name} onChange={this.handleChangeName}/>
</label>
</li>
<li>
<p>Email:</p>
<label>
<input type="text" value={this.state.email} onChange={this.handleChangeEmail}/>
</label>
</li>
<li>
<p>Senha:</p>
<label>
<input type="password" name="name" />
</label>
</li>
<li>
<p>Confirmar Senha:</p>
<label>
<input type="password" name="name" />
</label>
</li>
<li class="signupBtn">
<Button variant="outlined" color="secondary" onClick={() => this.submitInfo(this.state.name, this.state.email)}>
Registrar-se
</Button>
</li>
</ul>
</form>
</div>
</div>
</div>
);
}
}
//export component
export default Signup;<file_sep>-- MySQL Workbench Synchronization
-- Generated: 2020-05-23 17:55
-- Model: New Model
-- Version: 1.0
-- Project: Name of the project
-- Author: okita
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES';
CREATE TABLE IF NOT EXISTS `simoo`.`merchant` (
`idmerchant` INT(11) NOT NULL AUTO_INCREMENT,
`name` VARCHAR(45) NOT NULL,
`email` VARCHAR(45) NOT NULL,
`register_date` TIMESTAMP NULL DEFAULT NULL,
PRIMARY KEY (`idmerchant`))
ENGINE = InnoDB
DEFAULT CHARACTER SET = utf8;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
|
a0377755895ab84f1010a74bc6decaea113b33d2
|
[
"Markdown",
"SQL",
"JavaScript"
] | 6 |
Markdown
|
Alexandre-Petrachini/hh-quatysvscoringavirus
|
40d231377dfacd0df8e86d8c951c77aeceb3a14d
|
396822978368df1cc0df94ea78999eeb5d81be63
|
refs/heads/master
|
<repo_name>jamesaspence/laravel-core-model<file_sep>/tests/TestCase.php
<?php
namespace Laracore\Tests;
class TestCase extends \PHPUnit_Framework_TestCase
{
/**
* Marks a method as a test stub.
*/
protected function stub()
{
$this->markTestIncomplete('Incomplete Test.');
}
/**
* {@inheritdoc}
*/
public function tearDown()
{
\Mockery::close();
return parent::tearDown();
}
}<file_sep>/tests/Stub/ModelStubWithScopes.php
<?php
namespace Laracore\Tests\Stub;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Query\Builder;
class ModelStubWithScopes extends Model
{
protected static function boot()
{
parent::boot();
static::addGlobalScope('test', function (Builder $builder) {
$builder->where('test', '>', 200);
});
static::addGlobalScope('test2', function (Builder $builder) {
$builder->where('test2', '<', 100);
});
}
}<file_sep>/lib/Exception/RelationInterfaceExceptionNotSetException.php
<?php
namespace Laracore\Exception;
class RelationInterfaceExceptionNotSetException extends \Exception
{
//
}<file_sep>/readme.md
# Laravel Core Model (Laracore)
A repository, factory, and criteria layer for Eloquent models,
providing a convenient repository interface that still allows
fully-featured, eloquent usage. Allows convenient testing and
dependency injection without sacrificing features or versatility.
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Requirements](#requirements)
- [Installation](#installation)
- [Usage](#usage)
- ["Magic" Methods](#magic-methods)
- [Relations](#relations)
- [Dependency Injection](#dependency-injection)
- [Model Factories](#model-factories)
- [Inheritance](#inheritance)
- [Future Plans](#future-plans)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Requirements
- Laravel 5.3+
## Installation
composer require jamesaspence/laravel-core-model
## Usage
Laracore (Laravel Core Model) is very simple to use.
It works exactly the same as the Eloquent models
you're already used to. This means you can use all
the model queries you're already used to.
```php
//First we instantiate the repository
$repository = new ModelRepository();
//We pass in the reference to the model class
//so we can query it
$repository->setModel(User::class);
//Now we use the repository to find a model
$user = $repository->find(1);
//Let's save some new attributes
$repository->fill($user, [
'name' => '<NAME>'
]);
//Finally, let's save!!
$repository->save($model);
```
More advanced queries are allowed as well,
including custom queries. Assuming the same
repository as before:
```php
//Let's query based on email AND name
$user = $repository
->query()
->where('name', '=', '<NAME>')
->where('email', '=', '<EMAIL>')
->first();
```
The repository's methods map to the model's
methods, allowing, in the above example, a
return of a query builder. This means we don't
lose any of the features we've come to love from
Eloquent.
### "Magic" Methods
Laracore's repositories support the calling of
magic methods, such as local scope queries. For
example, consider the following code:
```php
$model = User::active()->get();
```
You do not need to define a custom repository
with this method hardcoded.
```php
$repository = new ModelRepository(User::class);
$model = $repository->active()
->get();
```
Instead, we can call our scope queries and other
magic methods directly on the repository. The
repository will delegate them on to the model
class.
Our magic method handling also listens for a model
instance being the first argument of a magic method
called via this repository. If the first argument is
an instance of a model, it will instead call the method
on the model instance itself! See the below example:
```php
//This
$model = new User();
$repository->doThing($model, $stuff, $things);
//Is equivalent to this
$model->doThing($stuff, $things);
```
This is meant to catch missed repository methods that we would
want implemented. If this causes issues, feel free to reach out
via the issues on this repository!
### Relations
Laracore also allows retrieval of relations.
```php
$user = $repository
->with(['tokens', 'profile'])
->find(1);
//Let's also load a relation with an existing model.
$repository->load($existingUser, 'comments');
```
`ModelRepository` classes have a `RelationRepository`
set which allows even more advanced relation settings,
such as `sync` and `associate`.
```php
//You can also pass in the class definition into the constructor.
$profileRepository = new ModelRepository(Profile::class);
$profile = $profileRepository->newModel(['stuff' => 'things']);
//$repository is still set for User::class here
$user = $repository->find(1);
//Assuming a BelongsTo relation named profile()
//on User, let's associate it!
$repository
->getRelationRepository()
->associateRelation($user, 'profile', $profile);
//Dont forget to save!
$repository->save($user);
//Assuming comment IDs...
$commentIds = [1, 2, 3];
//Let's sync them to a comments relation!
$repository
->getRelationRepository()
->sync($user, 'comments', $commentIds);
```
All relation methods should be represented as well,
allowing versatile use.
### Dependency Injection
One of the best aspects of this library is the
ability to dependency inject your database access,
rather than using static methods.
```php
// Rather than doing this... bad!!
public function badControllerMethod()
{
$user = User::find(1);
}
//We can do this! Good!
public function goodControllerMethod(ModelRepository $repository)
{
$repository->setModel(User::class);
$user = $repository->find(1);
}
```
This allows easy dependency injection, which in
turn makes it very easy to isolate dependencies
for testing.
### Model Factories
Want to create models without using `new Model` all over your code? `ModelFactory` is here to help!
```php
$factory = new ModelFactory();
//We need to pass in a ModelRepository
//to be able to save
$factory->setRepository(new ModelRepository(User::class));
$user = $factory->make([
'name' => '<NAME>'
]);
```
This will save the model with the attributes specified.
You can also use the `ModelFactory` to save `BelongsTo`
relations:
```php
$user = $factory->make([
'name' => '<NAME>'
], [
'profile' => $profile
]);
```
### Inheritance
Another nice feature is the ability to extend
these classes at will. You can continue to use
`ModelRepository` on its own, but if you prefer,
you can extend the repositories and factories yourself.
Here, we'll extend `ModelRepository` so we don't have to
set the model every time. We'll also make it so default
criteria are set on the repository.
```php
class UserRepository extends ModelRepository
{
/**
* {@inheritdoc}
*/
public function getDefaultModel()
{
//We just need to pass in our default model
return User::class;
}
}
```
Then, we can use this without setting a model!
No `setModel` required!
```php
public function controllerMethod(UserRepository $repository)
{
$user = $repository->find(1);
}
```
This will perform the following query (if using MySQL):
```
SELECT * FROM `users` WHERE `name` = ? AND `id` = ?
```
with the two bound parameters of 'Test' and '1'.
## Future Plans
Short term, the plan is to keep this library compatible with major
versions of Laravel > 5. That means testing for new versions and
adding new methods that exist in newer versions.
I would love to add non-eloquent support to this repository.
The plan is to add both raw query as well as Doctrine repositories,
but that isn't coming quite yet.
Long-term plans are a little more unclear. After non-eloquent support,
I will probably decide on my next feature to implement. If you have any
ideas, I would love to hear them!<file_sep>/tests/RelationRepositoryTest.php
<?php
namespace Laracore\Tests;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
use Illuminate\Database\Eloquent\Relations\Relation;
use Laracore\Repository\Relation\RelationRepository;
use Illuminate\Database\Eloquent\Model;
use Mockery\Mock;
class RelationRepositoryTest extends TestCase
{
/**
* @var RelationRepository|Mock
*/
protected $relationRepository;
public function setUp()
{
parent::setUp();
$repository = \Mockery::mock(RelationRepository::class)->makePartial();
$this->relationRepository = $repository;
}
public function createMockModel()
{
return \Mockery::mock(Model::class);
}
public function createMockRelation()
{
return \Mockery::mock(BelongsTo::class);
}
public function testSetRelation()
{
$model = $this->createMockModel();
$model->shouldReceive('setRelation')
->once()
->andReturnSelf();
$result = $this->relationRepository->setRelation($model, 'stuff', 'things');
$this->assertInstanceOf(Model::class, $result);
}
public function testSetRelations()
{
$model = $this->createMockModel();
$relations = [
'relation' => $this->createMockModel()
];
$model->shouldReceive('setRelations')
->with($relations)
->once();
$this->relationRepository->setRelations($model, $relations);
}
public function testSetTouchedRelations()
{
$model = $this->createMockModel();
$relations = [
'relation'
];
$model->shouldReceive('setTouchedRelations')
->with($relations)
->once()
->andReturnSelf();
$result = $this->relationRepository->setTouchedRelations($model, $relations);
$this->assertInstanceOf(Model::class, $result);
}
public function testAssociateRelation()
{
$model = $this->createMockModel();
$relation = 'relation';
$value = $this->createMockModel();
$mockRelation = \Mockery::mock(BelongsTo::class);
$mockRelation
->shouldReceive('associate')
->with($value)
->once()
->andReturn($model);
$model
->shouldReceive($relation)
->once()
->andReturn($mockRelation);
$this->relationRepository->associateRelation($model, $relation, $value);
}
public function testAssociateMany()
{
$model = $this->createMockModel();
$relations = [
'relation' => $this->createMockModel(),
'otherRelation' => $this->createMockModel()
];
foreach ($relations as $relation => $value) {
$this
->relationRepository
->shouldReceive('associateRelation')
->with($model, $relation, $value)
->andReturn($model);
};
$this->relationRepository->associateMany($model, $relations);
}
public function testDissociateRelation()
{
$relation = 'relation';
$mockRelation = $this->createMockRelation();
$mockRelation
->shouldReceive('dissociate')
->once();
$model = $this->createMockModel();
$model
->shouldReceive($relation)
->once()
->andReturn($mockRelation);
$this->relationRepository->dissociateRelation($model, $relation);
}
public function testAttachRelation()
{
$relation = 'relation';
$modelId = 1;
$tableAttributes = ['stuff' => 'things'];
$mockRelation = $this->createMockRelation();
$mockRelation
->shouldReceive('attach')
->with($modelId, $tableAttributes)
->once();
$model = $this->createMockModel();
$model
->shouldReceive($relation)
->once()
->andReturn($mockRelation);
$this->relationRepository->attachRelation($model, $relation, $modelId, $tableAttributes);
}
public function testDetachRelation()
{
$relation = 'relation';
$modelId = 1;
$mockRelation = $this->createMockRelation();
$mockRelation
->shouldReceive('detach')
->with($modelId)
->once();
$model = $this->createMockModel();
$model
->shouldReceive($relation)
->once()
->andReturn($mockRelation);
$this->relationRepository->detachRelation($model, $relation, $modelId);
}
public function testUpdateExistingPivot()
{
$relation = 'relation';
$id = 1;
$tableAttributes = ['stuff' => 'things'];
$mockRelation = $this->createMockRelation();
$mockRelation
->shouldReceive('updateExistingPivot')
->with($id, $tableAttributes)
->once();
$model = $this->createMockModel();
$model
->shouldReceive($relation)
->once()
->andReturn($mockRelation);
$this->relationRepository->updateExistingPivot($model, $relation, $id, $tableAttributes);
}
public function testSync()
{
$relation = 'relation';
$ids = [1, 2];
$mockRelation = $this
->createMockRelation()
->shouldReceive('sync')
->with($ids)
->once()
->getMock();
$model = $this
->createMockModel()
->shouldReceive($relation)
->once()
->andReturn($mockRelation)
->getMock();
$this->relationRepository->sync($model, $relation, $ids);
}
public function testSaveMany()
{
$relation = 'relation';
$value = $this->createMockModel();
$mockRelation = $this
->createMockRelation()
->shouldReceive('saveMany')
->with($value)
->once()
->getMock();
$model = $this
->createMockModel()
->shouldReceive($relation)
->once()
->andReturn($mockRelation)
->getMock();
$this->relationRepository->saveMany($model, $relation, $value);
}
public function testSave()
{
$relation = 'relation';
$value = $this->createMockModel();
$tableAttributes = ['stuff' => 'things'];
$mockRelation = $this
->createMockRelation()
->shouldReceive('save')
->with($value, $tableAttributes)
->once()
->getMock();
$model = $this
->createMockModel()
->shouldReceive($relation)
->once()
->andReturn($mockRelation)
->getMock();
$this->relationRepository->save($model, $relation, $value, $tableAttributes);
}
public function testRelationsIsA()
{
$relation = \Mockery::mock(Relation::class);
$expected = Relation::class;
$this->assertTrue($this->relationRepository->relationsIsA($relation, $expected));
//Test for failure too
$this->assertFalse($this->relationRepository->relationsIsA($relation, BelongsTo::class));
}
}<file_sep>/tests/Stub/ModelRepositoryWithDefaultModel.php
<?php
namespace Laracore\Tests\Stub;
use Laracore\Repository\ModelRepository;
class ModelRepositoryWithDefaultModel extends ModelRepository
{
public function getDefaultModel()
{
return ModelStub::class;
}
}<file_sep>/lib/Repository/Relation/RelationInterface.php
<?php
namespace Laracore\Repository\Relation;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\Relation;
interface RelationInterface
{
/**
* Sets a relation on a model.
*
* @param Model $model
* @param $relation
* @param $value
* @return Model
*/
public function setRelation(Model $model, $relation, $value);
/**
* Sets relations on a model.
*
* @param Model $model
* @param $relations
* @return Model
*/
public function setRelations(Model $model, array $relations);
/**
* Sets touched relations on a model.
*
* @param Model $model
* @param array $touches
* @return mixed
*/
public function setTouchedRelations(Model $model, array $touches = []);
/**
* Associates a model with a relation.
*
* @param Model $model
* @param $relation
* @param Model $value
* @return Model
*/
public function associateRelation(Model $model, $relation, Model $value);
/**
* Associates many relations with a model.
*
* @param Model $model
* @param array $relations
* @return Model
*/
public function associateMany(Model $model, $relations);
/**
* Dissociates a model with a relation.
*
* @param Model $model
* @param $relation
* @return Model
*/
public function dissociateRelation(Model $model, $relation);
/**
* Attaches a relation based on id.
*
* @param Model $model
* @param $relation
* @param $modelId
* @param array $tableAttributes
* @return Model
*/
public function attachRelation(Model $model, $relation, $modelId, $tableAttributes = []);
/**
* Detaches a relation based on id.
*
* @param Model $model
* @param $relation
* @param null $modelId
* @return Model
*/
public function detachRelation(Model $model, $relation, $modelId = null);
/**
* Updates an existing pivot on a relation.
*
* @param Model $model
* @param $relation
* @param $id
* @param array $tableAttributes
* @return Model
*/
public function updateExistingPivot(Model $model, $relation, $id, $tableAttributes = []);
/**
* Syncs the relations on a model with ids.
*
* @param Model $model
* @param $relation
* @param array $ids
* @return Model
*/
public function sync(Model $model, $relation, $ids = []);
/**
* Saves many relations.
*
* @param Model $model
* @param $relation
* @param $value
* @return Relation
*/
public function saveMany(Model $model, $relation, $value);
/**
* Saves a relation.
*
* @param Model $model
* @param $relation
* @param Model $value
* @param array $tableAttributes
* @return Relation
*/
public function save(Model $model, $relation, Model $value, $tableAttributes = []);
/**
* Determines if a relation is of an expected type.
*
* @param Relation $relation
* @param $expected
* @return boolean
*/
public function relationsIsA(Relation $relation, $expected);
}<file_sep>/lib/Factory/ModelFactory.php
<?php
namespace Laracore\Factory;
use Laracore\Exception\RelationNotBelongsToException;
use Illuminate\Database\Eloquent\Model;
use Laracore\Exception\NoRepositoryToInstantiateException;
use Laracore\Repository\ModelRepository;
use Laracore\Repository\RepositoryInterface;
class ModelFactory implements FactoryInterface
{
/**
* Sets whether or not to use mass assignment during creation of models.
*
* @var bool
*/
protected $massAssign = false;
/**
* @var RepositoryInterface
*/
protected $repository;
/**
* Retrieves a repository.
* Fails if the repository cannot be instantiated.
*
* @return ModelRepository
* @throws NoRepositoryToInstantiateException
*/
public function getRepository()
{
if (!isset($this->repository)) {
$this->setRepository($this->instantiateRepository());
}
return $this->repository;
}
/**
* {@inheritdoc}
*/
public function setRepository(RepositoryInterface $repository)
{
$this->repository = $repository;
}
/**
* {@inheritdoc}
*/
public function setModel($className)
{
$this->getRepository()->setModel($className);
}
/**
* {@inheritdoc}
*/
public function make(array $attributes = [], array $associatedRelations = [])
{
$repository = $this->getRepository();
$model = $repository->newModel();
if ($this->massAssign) {
$repository->fill($model, $attributes);
} else {
foreach ($attributes as $key => $value) {
$repository->setAttribute($model, $key, $value);
}
}
$this->addAssociatedRelations($model, $associatedRelations);
return $this->getRepository()->save($model);
}
/**
* {@inheritdoc}
*/
public function instantiateRepository()
{
throw new NoRepositoryToInstantiateException('Can\'t instantiate repository for ModelFactory. Make sure to set repository via setRepository method.');
}
/**
* {@inheritdoc}
*/
public function addAssociatedRelations(Model $model, array $associatedRelations, $save = false)
{
$relationRepository = $this->getRepository()->getRelationRepository();
foreach ($associatedRelations as $relation => $value) {
if (!$relationRepository->relationIsBelongsTo($model, $relation)) {
throw new RelationNotBelongsToException('Only BelongsTo relations can be associated via addAssociatedRelations');
}
$relationRepository->associateRelation($model, $relation, $value);
}
if ($save) {
$this->getRepository()->save($model);
}
return $model;
}
}<file_sep>/lib/Factory/FactoryInterface.php
<?php
namespace Laracore\Factory;
use Illuminate\Database\Eloquent\Model;
use Laracore\Exception\NoRepositoryToInstantiateException;
use Laracore\Exception\RelationNotBelongsToException;
use Laracore\Repository\RepositoryInterface;
interface FactoryInterface
{
/**
* Retrieves a repository.
* Fails if the repository cannot be instantiated.
*
* @return RepositoryInterface
* @throws NoRepositoryToInstantiateException
*/
public function getRepository();
/**
* Sets the repository on the factory.
*
* @param RepositoryInterface $repository
*/
public function setRepository(RepositoryInterface $repository);
/**
* Sets the model on the repository.
*
* @param $className
*/
public function setModel($className);
/**
* Makes a new model with attributes
*
* @param array $attributes
* @param array $associatedRelations
* @return Model
*/
public function make(array $attributes = [], array $associatedRelations = []);
/**
* Adds the associated relations on a model.
* Will save and return if $save is set to true.
* This method should only be used for BelongsTo relations!!
*
* @param Model $model
* @param array $associatedRelations
* @param bool $save
* @return Model
* @throws RelationNotBelongsToException
*/
public function addAssociatedRelations(Model $model, array $associatedRelations, $save = false);
/**
* Instantiates the repository.
*
* @return RepositoryInterface
* @throws NoRepositoryToInstantiateException
*/
public function instantiateRepository();
}<file_sep>/tests/Stub/ModelStub.php
<?php
namespace Laracore\Tests\Stub;
use Illuminate\Database\Eloquent\Model;
class ModelStub extends Model
{
public $guarded = ['id'];
public function save(array $options = [])
{
return true;
}
}<file_sep>/tests/ModelFactoryTest.php
<?php
namespace Laracore\Tests;
use Laracore\Repository\ModelRepository;
use Laracore\Repository\RepositoryInterface;
use Laracore\Factory\ModelFactory;
use Mockery\Mock;
use Illuminate\Database\Eloquent\Model;
class ModelFactoryTest extends TestCase
{
/**
* @var ModelFactory|Mock
*/
private $factory;
public function setUp()
{
parent::setUp();
$factory = \Mockery::mock(ModelFactory::class)->makePartial();
$this->factory = $factory;
}
public function testSetAndGetRepository()
{
$repository = \Mockery::mock(RepositoryInterface::class);
$this->factory->setRepository($repository);
$this->assertEquals($this->factory->getRepository(), $repository);
}
/**
* @expectedException \Laracore\Exception\NoRepositoryToInstantiateException
*/
public function testInstantiateRepository()
{
$this->factory->instantiateRepository();
}
public function testMake()
{
$attributes = [
'stuff' => 'things'
];
$associatedRelations = [
'relation' => \Mockery::mock(Model::class)
];
$model = \Mockery::mock(Model::class);
$repository = \Mockery::mock(ModelRepository::class);
$repository
->shouldReceive('newModel')
->once()
->andReturn($model);
$repository
->shouldReceive('setAttribute')
->times(count($attributes));
$repository
->shouldReceive('save')
->with($model)
->once();
$this
->factory
->shouldReceive('getRepository')
->andReturn($repository);
$this
->factory
->shouldReceive('addAssociatedRelations')
->with($model, $associatedRelations)
->once();
$this->factory->make($attributes, $associatedRelations);
}
/**
* @expectedException \Laracore\Exception\RelationNotBelongsToException
*/
public function testAddAssociatedRelations()
{
$model = \Mockery::mock(Model::class);
$repository = \Mockery::mock(RepositoryInterface::class);
$repository->shouldReceive('getRelationRepository')
->andReturnSelf();
$repository->shouldReceive('relationIsBelongsTo')
->andReturn(false);
$this->factory->setRepository($repository);
$this->factory->addAssociatedRelations($model, ['stuff' => 'things'], false);
}
}<file_sep>/lib/Exception/RelationNotBelongsToException.php
<?php
namespace Laracore\Exception;
class RelationNotBelongsToException extends \Exception
{
//
}<file_sep>/lib/Exception/NoRepositoryToInstantiateException.php
<?php
namespace Laracore\Exception;
class NoRepositoryToInstantiateException extends \Exception
{
//
}<file_sep>/tests/ModelRepositoryTest.php
<?php
namespace Laracore\Tests;
use Illuminate\Database\Eloquent\Builder;
use Illuminate\Database\Eloquent\Model;
use Laracore\Exception\ModelClassNotSetException;
use Laracore\Repository\ModelRepository;
use Laracore\Repository\Relation\RelationInterface;
use Laracore\Tests\Stub\ModelRepositoryWithDefaultModel;
use Laracore\Tests\Stub\ModelStubWithScopes;
use Mockery\Mock;
use Mockery\MockInterface;
use Laracore\Tests\Stub\ModelStub;
class ModelRepositoryTest extends TestCase
{
/**
* @var ModelRepository|Mock
*/
private $repository;
/**
* {@inheritdoc}
*/
public function setUp()
{
parent::setUp();
$repository = \Mockery::mock(ModelRepository::class)->makePartial();
$this->repository = $repository;
}
/**
* Sets up the newModel mock on the repository.
*
* @param MockInterface $model
* @return ModelRepository|Mock
*/
public function setUpNewModelMock(MockInterface $model)
{
$this->repository->shouldReceive('newModel')->andReturn($model)->byDefault();
return $this->repository;
}
public function createMockModel()
{
$model = \Mockery::mock(Model::class);
return $model;
}
public function testSetModelAndGetModel()
{
$className = 'Test';
$this->repository->setModel($className);
$this->assertEquals($this->repository->getModel(), $className);
}
public function testConstructorSetsModelAndRelationInterface()
{
$relationMock = \Mockery::mock(RelationInterface::class);
$repository = new ModelRepository(ModelStub::class, $relationMock);
$this->assertEquals(ModelStub::class, $repository->getModel());
$this->assertEquals($relationMock, $repository->getRelationRepository());
}
public function testGetModelReturnsDefaultModel()
{
$repository = new ModelRepositoryWithDefaultModel();
$this->assertEquals($repository->getModel(), ModelStub::class);
}
/**
* @expectedException \Laracore\Exception\ModelClassNotSetException
*/
public function testGetModelThrowsExceptionWithNoDefaultSet()
{
$this->repository->getModel();
}
public function testSetAndGetRelationRepository()
{
/** @var RelationInterface $repository */
$repository = \Mockery::mock(RelationInterface::class);
$this->repository->setRelationRepository($repository);
$this->assertEquals($repository, $this->repository->getRelationRepository());
}
/**
* @expectedException \Laracore\Exception\RelationInterfaceExceptionNotSetException
*/
public function testGetRelationRepositoryNoRepositorySet()
{
$this->repository->getRelationRepository();
}
public function testNewModel()
{
$this->repository->setModel(ModelStub::class);
$model = $this->repository->newModel([
'stuff' => 'things'
]);
$this->assertInstanceOf(ModelStub::class, $model);
$data = $model->getAttributes();
$this->assertArrayHasKey('stuff', $data);
$this->assertTrue($data['stuff'] == 'things');
}
public function testLoad()
{
$relations = ['stuff'];
$model = $this->createMockModel();
$model->shouldReceive('load')->once()->with($relations);
$result = $this->repository->load($model, $relations);
$this->assertInstanceOf(Model::class, $result);
}
public function testFind()
{
$id = 1;
$model = $this->createMockModel();
$model->shouldReceive('with')->once()->andReturnSelf();
$model->shouldReceive('find')->with($id)->once()->andReturnSelf();
$this->setUpNewModelMock($model);
$result = $this->repository->find($id);
$this->assertInstanceOf(Model::class, $result);
}
public function testFindOrFail()
{
$id = 1;
$model = $this->createMockModel();
$model->shouldReceive('with')->once()->andReturnSelf();
$model->shouldReceive('findOrFail')->with($id)->once()->andReturnSelf();
$this->setUpNewModelMock($model);
$result = $this->repository->findOrFail($id);
$this->assertInstanceOf(Model::class, $result);
}
public function testFindOrNew()
{
$id = 1;
$columns = ['unique', 'columns'];
$model = $this->createMockModel();
$model->shouldReceive('findOrNew')
->with($id, $columns)
->once()
->andReturnSelf();
$this->setUpNewModelMock($model);
$result = $this->repository->findOrNew($id, $columns);
$this->assertInstanceOf(Model::class, $result);
}
public function testCreate()
{
$data = ['stuff' => 'things'];
$builder = \Mockery::mock(Builder::class);
$model = $this->createMockModel();
$model
->shouldReceive('query')
->once()
->andReturn($builder);
$builder
->shouldReceive('create')
->with($data)
->once()
->andReturn($model);
$this->setUpNewModelMock($model);
$this->repository->create($data);
}
public function testForceCreate()
{
$data = ['stuff' => 'things'];
$builder = \Mockery::mock(Builder::class);
$model = $this->createMockModel();
$model
->shouldReceive('query')
->once()
->andReturn($builder);
$builder
->shouldReceive('forceCreate')
->with($data)
->once()
->andReturn($model);
$this->setUpNewModelMock($model);
$this->repository->forceCreate($data);
}
public function testFirstOrCreate()
{
$attributes = ['stuff' => 'things'];
$with = ['relation.subRelation'];
$model = $this->createMockModel();
$model
->shouldReceive('firstOrCreate')
->with($attributes)
->once()
->andReturnSelf();
$this->repository
->shouldReceive('load')
->with($model, $with)
->once()
->andReturn($model);
$this->setUpNewModelMock($model);
$this->repository->firstOrCreate($attributes, $with);
}
public function testFirstOrNew()
{
$attributes = ['stuff' => 'things'];
$model = $this->createMockModel();
$model
->shouldReceive('firstOrNew')
->with($attributes)
->andReturnSelf();
$this->setUpNewModelMock($model);
$this->repository->firstOrNew($attributes);
}
public function testAll()
{
$columns = ['columns'];
$model = $this->createMockModel();
$model
->shouldReceive('all')
->with($columns)
->once();
$this->setUpNewModelMock($model);
$this->repository->all($columns);
}
public function testWith()
{
$with = ['relation.subRelation'];
$model = $this->createMockModel();
$model
->shouldReceive('with')
->with($with)
->once();
$this->setUpNewModelMock($model);
$this->repository->with($with);
}
public function testQuery()
{
$model = $this->createMockModel();
$model
->shouldReceive('query')
->once();
$this->setUpNewModelMock($model);
$this->repository->query();
}
public function testSave()
{
$model = $this->createMockModel();
$options = [
'stuff' => 'things'
];
$model
->shouldReceive('save')
->with($options)
->once();
$this->repository->save($model, $options);
}
public function testFill()
{
$model = $this->createMockModel();
$model
->shouldReceive('fill')
->once();
$this->repository->fill($model);
}
public function testFillAndSave()
{
$model = $this->createMockModel();
$model
->shouldReceive('fill')
->once();
$this->repository
->shouldReceive('save')
->once()
->andReturn($model);
$this->repository->fillAndSave($model);
}
public function testSelect()
{
$columns = ['column'];
$model = $this->createMockModel();
$model
->shouldReceive('select')
->with($columns)
->once();
$this->setUpNewModelMock($model);
$this->repository->select($columns);
}
public function testUpdate()
{
$model = $this->createMockModel();
$updatedValues = ['updated' => 'value'];
$this
->repository
->shouldReceive('fillAndSave')
->with($model, $updatedValues)
->once();
$this->repository->update($model, $updatedValues);
}
public function testDelete()
{
$model = $this->createMockModel();
$model
->shouldReceive('delete')
->once();
$this->repository->delete($model);
}
public function testPaginate()
{
$with = ['relation.subRelation'];
$perPage = 20;
$model = $this->createMockModel();
$model
->shouldReceive('with')
->with($with)
->once()
->andReturnSelf();
$model
->shouldReceive('paginate')
->with($perPage)
->once();
$this->setUpNewModelMock($model);
$this->repository->paginate($perPage, $with);
}
public function testWhereFirst()
{
$column = 'column';
$operator = '';
$value = 'value';
$with = ['relation.subRelation'];
$model = $this->createMockModel();
$model
->shouldReceive('with')
->with($with)
->once()
->andReturnSelf();
$model
->shouldReceive('where')
->with($column, $operator, $value)
->once()
->andReturnSelf();
$model
->shouldReceive('first')
->once();
$this->setUpNewModelMock($model);
$this->repository->whereFirst($column, $operator, $value, $with);
}
public function testWhereGet()
{
$column = 'column';
$operator = '';
$value = 'value';
$with = ['relation.subRelation'];
$model = $this->createMockModel();
$model
->shouldReceive('with')
->with($with)
->once()
->andReturnSelf();
$model
->shouldReceive('where')
->with($column, $operator, $value)
->once()
->andReturnSelf();
$model
->shouldReceive('get')
->once();
$this->setUpNewModelMock($model);
$this->repository->whereGet($column, $operator, $value, $with);
}
public function testWithoutGlobalScopes()
{
$methodName = 'withoutGlobalScopes';
$model = \Mockery::mock(ModelStubWithScopes::class);
$firstArgument = 'test';
$secondArgument = 'test2';
$thirdArgument = [$firstArgument, $secondArgument];
$model->shouldReceive($methodName)
->with(null)
->once();
$model->shouldReceive($methodName)
->with($firstArgument)
->once();
$model->shouldReceive($methodName)
->with($secondArgument)
->once();
$model->shouldReceive($methodName)
->with($thirdArgument)
->once();
$this->setUpNewModelMock($model);
$this->repository->withoutGlobalScopes();
$this->repository->withoutGlobalScopes($firstArgument);
$this->repository->withoutGlobalScopes($secondArgument);
$this->repository->withoutGlobalScopes($thirdArgument);
}
}<file_sep>/lib/Repository/RepositoryInterface.php
<?php
namespace Laracore\Repository;
use Illuminate\Database\Eloquent\Collection;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Builder;
use Illuminate\Pagination\LengthAwarePaginator;
use Laracore\Repository\Relation\RelationInterface;
interface RepositoryInterface
{
/**
* Sets the model.
* @param $model
*/
public function setModel($model);
/**
* Retrieves the default model class.
* Used in the constructor if the model class is not set properly.
*
* @return Model
*/
public function getDefaultModel();
/**
* Retrieves the class name of the model this repository is meant to represent.
* @return Model
*/
public function getModel();
/**
* Sets an attribute on the model.
*
* @param Model $model
* @param $key
* @param $value
* @return Model
*/
public function setAttribute(Model $model, $key, $value);
/**
* Finds a model by its ID.
* @param $id
* @param array $with
* @return Model
*/
public function find($id, $with = []);
/**
* Finds a model, or fails and throws an exception.
* @param $id
* @param array $with
* @return Model
*/
public function findOrFail($id, $with = []);
/**
* Finds a model, or creates a new one.
* @param $id
* @param array $columns
* @return \Illuminate\Support\Collection|static
*/
public function findOrNew($id, array $columns = ['*']);
/**
* Creates a new model.
* Delegates from the model to the query builder.
* @see Builder::create()
*
* @param array $attributes
* @return Model
*/
public function create(array $attributes = []);
/**
* Force creates a new model.
* @see Builder::forceCreate()
*
* @param array $attributes
* @return mixed
*/
public function forceCreate(array $attributes = []);
/**
* Finds the first instance, or creates a new model (immediately saving it)
* @param array $attributes
* @param array $with
* @return Model
*/
public function firstOrCreate(array $attributes, $with = []);
/**
* Finds the first instance, or creates a new model (without saving it)
* @param array $attributes
* @return Model
*/
public function firstOrNew(array $attributes);
/**
* Retrieves all records from a database.
*
* @param array $columns
* @return \Illuminate\Database\Eloquent\Collection|static[]
*/
public function all($columns = ['*']);
/**
* Instantiates a new model, and returns it.
*
* @param array $attrs
* @return Model
*/
public function newModel(array $attrs = []);
/**
* @param array $with
* @return Builder
*/
public function with($with = []);
/**
* Creates a query builder instance, and returns it.
*
* @return Builder
*/
public function query();
/**
* Saves a model.
*
* @param Model $model
* @param array $options
* @return Model
*/
public function save(Model $model, array $options = []);
/**
* Fills a model with attributes.
*
* @param Model $model
* @param array $attributes
* @return Model
*/
public function fill(Model $model, array $attributes = []);
/**
* Fills a model, then saves it.
*
* @param Model $model
* @param array $attributes
* @return Model
*/
public function fillAndSave(Model $model, array $attributes = []);
/**
* Retrieves the relation repository.
*
* @return RelationInterface
*/
public function getRelationRepository();
/**
* Sets the relation repository.
*
* @param RelationInterface $repository
* @return mixed
*/
public function setRelationRepository(RelationInterface $repository);
/**
* Creates a query builder for select.
*
* @param string $columns
* @return Builder
*/
public function select($columns = '*');
/**
* Updates a model.
*
* @param Model $model
* @param array $updatedValues
* @return Model
*/
public function update(Model $model, array $updatedValues);
/**
* Updates or creates a model based on conditions.
* @see Builder::updateOrCreate()
*
* @param array $attributes
* @param array $values
* @return Model
*/
public function updateOrCreate(array $attributes, array $values = []);
/**
* Deletes a model.
*
* @param Model $model
*/
public function delete(Model $model);
/**
* Deletes the models based on id.
* @see Model::destroy()
*
* @param array|int $ids
* @return mixed
*/
public function destroy($ids);
/**
* Retrieves paginated results.
*
* @param int $perPage
* @param mixed $with
* @return LengthAwarePaginator
*/
public function paginate($perPage = 10, $with = []);
/**
* Retrieves the first result based on a single-column search.
*
* @param $column
* @param $operator
* @param $value
* @param mixed $with
* @return Model |null
*/
public function whereFirst($column, $operator, $value, $with = []);
/**
* Retrieves a collection of results based on a single-column search.
*
* @param $column
* @param $operator
* @param $value
* @param mixed $with
* @return Collection
*/
public function whereGet($column, $operator, $value, $with = []);
/**
* Loads relations on a model.
*
* @param Model $model
* @param array $relations
* @return Model
*/
public function load(Model $model, $relations = []);
/**
* Builds a query with soft-deleted models.
*
* @return Builder
*/
public function withTrashed();
/**
* Starts a query without global scopes.
* @see Model::newQueryWithoutScope()
* @see Model::newQueryWithoutScopes()
*
* @param mixed $scopes
* @return Builder
*/
public function withoutGlobalScopes($scopes);
}<file_sep>/lib/Repository/Relation/RelationRepository.php
<?php
namespace Laracore\Repository\Relation;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
use Illuminate\Database\Eloquent\Relations\BelongsToMany;
use Illuminate\Database\Eloquent\Relations\HasMany;
use Illuminate\Database\Eloquent\Relations\HasOne;
use Illuminate\Database\Eloquent\Relations\Relation;
class RelationRepository implements RelationInterface
{
/**
* {@inheritdoc}
*/
public function setRelation(Model $model, $relation, $value)
{
$model->setRelation($relation, $value);
return $model;
}
/**
* {@inheritdoc}
*/
public function setRelations(Model $model, array $relations)
{
$model->setRelations($relations);
return $model;
}
/**
* {@inheritdoc}
*/
public function setTouchedRelations(Model $model, array $touches = [])
{
$model->setTouchedRelations($touches);
return $model;
}
/**
* {@inheritdoc}
*/
public function associateRelation(Model $model, $relation, Model $value)
{
$model->$relation()->associate($value);
return $model;
}
/**
* {@inheritdoc}
*/
public function associateMany(Model $model, $relations)
{
foreach ($relations as $relation => $value) {
$model = $this->associateRelation($model, $relation, $value);
}
return $model;
}
/**
* {@inheritdoc}
*/
public function dissociateRelation(Model $model, $relation)
{
$model->$relation()->dissociate();
return $model;
}
/**
* {@inheritdoc}
*/
public function attachRelation(Model $model, $relation, $modelId, $tableAttributes = [])
{
$model->$relation()->attach($modelId, $tableAttributes);
return $model;
}
/**
* {@inheritdoc}
*/
public function detachRelation(Model $model, $relation, $modelId = null)
{
$model->$relation()->detach($modelId);
return $model;
}
/**
* {@inheritdoc}
*/
public function updateExistingPivot(Model $model, $relation, $id, $tableAttributes = [])
{
$model->$relation()->updateExistingPivot($id, $tableAttributes);
return $model;
}
/**
* {@inheritdoc}
*/
public function sync(Model $model, $relation, $ids = [])
{
$model->$relation()->sync($ids);
return $model;
}
/**
* {@inheritdoc}
*/
public function saveMany(Model $model, $relation, $value)
{
$model->$relation()->saveMany($value);
return $model;
}
/**
* {@inheritdoc}
*/
public function save(Model $model, $relation, Model $value, $tableAttributes = [])
{
$model->$relation()->save($value, $tableAttributes);
return $model;
}
/**
* {@inheritdoc}
*/
public function relationsIsA(Relation $relation, $expected)
{
return is_a($relation, $expected);
}
/**
* Checks if a relation is a BelongsTo relation.
*
* @param Model $model
* @param string $relationKey - the key of the relation on the model.
* @return bool
*/
public function relationIsBelongsTo(Model $model, $relationKey)
{
return $this->relationsIsA($model->$relationKey(), BelongsTo::class);
}
/**
* Checks if a relation is a BelongsTo relation.
*
* @param Model $model
* @param string $relationKey - the key of the relation on the model.
* @return bool
*/
public function relationIsBelongsToMany(Model $model, $relationKey)
{
return $this->relationsIsA($model->$relationKey(), BelongsToMany::class);
}
/**
* Checks if a relation is a BelongsTo relation.
*
* @param Model $model
* @param string $relationKey - the key of the relation on the model.
* @return bool
*/
public function relationIsHasOne(Model $model, $relationKey)
{
return $this->relationsIsA($model->$relationKey(), HasOne::class);
}
/**
* Checks if a relation is a BelongsTo relation.
*
* @param Model $model
* @param string $relationKey - the key of the relation on the model.
* @return bool
*/
public function relationIsHasMany(Model $model, $relationKey)
{
return $this->relationsIsA($model->$relationKey(), HasMany::class);
}
}<file_sep>/lib/Exception/ModelClassNotSetException.php
<?php
namespace Laracore\Exception;
class ModelClassNotSetException extends \Exception
{
//
}<file_sep>/lib/Repository/ModelRepository.php
<?php
namespace Laracore\Repository;
use Illuminate\Database\Eloquent\Model;
use Laracore\Exception\ModelClassNotSetException;
use Laracore\Exception\RelationInterfaceExceptionNotSetException;
use Laracore\Repository\Relation\RelationInterface;
use Laracore\Repository\Relation\RelationRepository;
class ModelRepository implements RepositoryInterface
{
/**
* @var Model
*/
protected $className;
/**
* @var RelationInterface
*/
protected $relationRepository;
public function __construct($model = null, RelationInterface $repository = null)
{
$model = (is_null($model) ? $this->getDefaultModel() : $model);
$this->setModel($model);
if (is_null($repository)) {
$repository = new RelationRepository();
}
$this->setRelationRepository($repository);
}
/**
* {@inheritdoc}
*/
public function setModel($model)
{
if (!is_null($model)) {
$this->className = $model;
}
}
/**
* {@inheritdoc}
*/
public function getDefaultModel()
{
return null;
}
/**
* {@inheritdoc}
*/
public function getModel()
{
if (is_null($this->className)) {
throw new ModelClassNotSetException('A model class must be set on this ModelRepository instance.');
}
return $this->className;
}
/**
* {@inheritdoc}
*/
public function setAttribute(Model $model, $key, $value)
{
$model->$key = $value;
return $model;
}
/**
* {@inheritdoc}
*/
public function find($id, $with = [])
{
return $this
->newModel()
->with($with)
->find($id);
}
/**
* {@inheritdoc}
*/
public function findOrFail($id, $with = [])
{
return $this
->newModel()
->with($with)
->findOrFail($id);
}
/**
* {@inheritdoc}
*/
public function findOrNew($id, array $columns = ['*'])
{
return $this
->newModel()
->findOrNew($id, $columns);
}
/**
* {@inheritdoc}
*/
public function create(array $attributes = [])
{
return $this
->query()
->create($attributes);
}
/**
* {@inheritdoc}
*/
public function forceCreate(array $attributes = [])
{
return $this
->query()
->forceCreate($attributes);
}
/**
* {@inheritdoc}
*/
public function firstOrCreate(array $attributes, $with = [])
{
$model = $this
->newModel()
->firstOrCreate($attributes);
$this->load($model, $with);
return $model;
}
/**
* {@inheritdoc}
*/
public function firstOrNew(array $attributes)
{
return $this
->newModel()
->firstOrNew($attributes);
}
/**
* {@inheritdoc}
*/
public function all($columns = ['*'])
{
return $this
->newModel()
->all($columns);
}
/**
* {@inheritdoc}
*/
public function newModel(array $attrs = [])
{
$className = $this->getModel();
return new $className($attrs);
}
/**
* {@inheritdoc}
*/
public function with($with = [])
{
return $this
->newModel()
->with($with);
}
/**
* {@inheritdoc}
*/
public function query()
{
return $this
->newModel()
->query();
}
/**
* {@inheritdoc}
*/
public function save(Model $model, array $options = [])
{
$model->save($options);
return $model;
}
/**
* {@inheritdoc}
*/
public function fill(Model $model, array $attributes = [])
{
$model->fill($attributes);
return $model;
}
/**
* {@inheritdoc}
*/
public function fillAndSave(Model $model, array $attributes = [])
{
$model = $this->fill($model, $attributes);
return $this->save($model);
}
/**
* {@inheritdoc}
*/
public function getRelationRepository()
{
if (!isset($this->relationRepository)) {
throw new RelationInterfaceExceptionNotSetException;
}
return $this->relationRepository;
}
/**
* {@inheritdoc}
*/
public function setRelationRepository(RelationInterface $repository)
{
$this->relationRepository = $repository;
}
/**
* {@inheritdoc}
*/
public function select($columns = '*')
{
return $this
->newModel()
->select($columns);
}
/**
* {@inheritdoc}
*/
public function update(Model $model, array $updatedValues)
{
return $this->fillAndSave($model, $updatedValues);
}
/**
* {@inheritdoc}
*/
public function delete(Model $model)
{
$model->delete();
}
/**
* {@inheritdoc}
*/
public function paginate($perPage = 10, $with = [])
{
return $this
->newModel()
->with($with)
->paginate($perPage);
}
/**
* {@inheritdoc}
*/
public function whereFirst($column, $operator, $value, $with = [])
{
return $this
->newModel()
->with($with)
->where($column, $operator, $value)
->first();
}
/**
* {@inheritdoc}
*/
public function whereGet($column, $operator, $value, $with = [])
{
return $this
->newModel()
->with($with)
->where($column, $operator, $value)
->get();
}
/**
* {@inheritdoc}
*/
public function load(Model $model, $relations = [])
{
$model->load($relations);
return $model;
}
/**
* {@inheritdoc}
*/
public function withoutGlobalScopes($scopes = null)
{
return $this->newModel()->withoutGlobalScopes($scopes);
}
/**
* {@inheritdoc}
*/
public function updateOrCreate(array $attributes, array $values = [])
{
return $this->newModel()->updateOrCreate($attributes, $values);
}
/**
* {@inheritdoc}
*/
public function destroy($ids)
{
$className = $this->getModel();
return $className::destroy($ids);
}
/**
* {@inheritdoc}
*/
public function withTrashed()
{
return $this->newModel()->withTrashed();
}
/**
* Our default method caller.
* Delegates our method calls off to the model class itself,
* ensuring that custom functions (like query scopes) are
* supported.
*
* @param $name
* @param $arguments
* @return mixed
*/
public function __call($name, $arguments)
{
/*
* If our first argument is an instance of a model, we
* invoke our method on the model instance, passing in the
* remaining arguments.
* Likewise, if we have a singular argument and it's an
* instance of a mode, we invoke our method on that instance.
*/
if (is_array($arguments) && $arguments[0] instanceof Model) {
/** @var Model $model */
$model = $arguments[0];
unset($arguments[0]);
return $model->$name(...$arguments);
} elseif ($arguments instanceof Model) {
/** @var Model $model */
$model = $arguments;
return $model->$name();
}
return $this->newModel()->$name(...$arguments);
}
}
|
ffa36f478baa9ee60cd33a9f5baf7707ef58a79d
|
[
"Markdown",
"PHP"
] | 18 |
PHP
|
jamesaspence/laravel-core-model
|
7530b150385aad10cc59d8dfeb60de5b8cbc1997
|
d7e3cb93e96b1bae78f4937b4f236ef86bd2b42b
|
refs/heads/main
|
<file_sep>#include <windows.h>
int main()
{
HANDLE hMutex = CreateMutex(0, 0 , L"MutexName");
if (GetLastError() == ERROR_ALREADY_EXISTS)
return 0;
// your code
ReleaseMutex(hMutex);
CloseHandle(hMutex);
return 0;
}
<file_sep># Single instance applications
Some applications are built to allow users to run as many instances of the application as the user desires. Other applications allow only a single instance of the application to be run.
## Using a mutex
The term mutex comes from the words "mutually exclusive." A mutex is a synchronization object typically used to insure that two or more threads do not attempt to simultaneously access shared memory.
Using a mutex is relatively straightforward. When used in this context, the mutex is used in the main() function as follows:
* Attempt to Create the mutex.
* If the mutex does not exist then this is the first instance of the application.
* If the mutex exist then terminate the second instance by returning from main().
* Release the mutex before returns. This only happens when the application closes.
* close Pointer (HANDLE/void*)
The following code is the simplest main() that can be written given the above steps:
```cpp
#include <windows.h>
int main(int argc, char* argv[])
{
// you can use void* Instead of HANDLE
HANDLE hMutexHandle = CreateMutex(NULL, // Ignored. Must be set to NULL.
TRUE, //Specifies the initial owner of the mutex object:
// TRUE : the caller created the mutex Calling thread obtains ownership of the mutex object.
// FALSE : Calling thread does not obtain ownership of the mutex.
L"MutexName"
);
// Nonzero indicates success. Zero indicates failure. To get extended error information, call GetLastError
if (GetLastError() == ERROR_ALREADY_EXISTS)
return 0;
// Your Code
ReleaseMutex(hMutexHandle); // Releases ownership of the specified mutex object.
// you can use delete(hMutexHandle) Instead of CloseHandle(hMutexHandle)
CloseHandle(hMutexHandle);
return 0;
}
```
* [CreateMutex Function](https://docs.microsoft.com/en-us/windows/win32/api/synchapi/nf-synchapi-createmutexa)
* [GetLastError Function](https://docs.microsoft.com/en-us/windows/win32/api/errhandlingapi/nf-errhandlingapi-getlasterror).
|
57cf0fc013ea1fd62e2ba626207c35e1afacd703
|
[
"Markdown",
"C++"
] | 2 |
C++
|
amirrezatav/Single-instance-applications
|
efec7d3041753e864b845d45cc83038ac67ca1f7
|
0958727fdb776843f44f0b3d4d33600669a163a3
|
refs/heads/master
|
<repo_name>bastolatanuja/lab2<file_sep>/lab exercise 2/question no 1.py
#check whether 5 is in the list of first natural numbers or not .hint;list=>[1,2,3,4,5]
list=[1,2,3,4,5]
if 5 in list:
print(f"5 is a natural number")
else:
print(f"5 is not a natural number")<file_sep>/README.md
# lab2!
[Untitled Session01183](https://user-images.githubusercontent.com/78782475/112866865-0076fb80-90da-11eb-9d7c-54f59c56cffe.jpg)
|
a1ba682227374222a20ff543eda9a5a434feabf7
|
[
"Markdown",
"Python"
] | 2 |
Python
|
bastolatanuja/lab2
|
79e9377d468fda0958b2b04363919098a8cb6b31
|
e8d595aea11dc9b5969eac242cfacf9ef045387c
|
refs/heads/main
|
<file_sep>package com.example.task101final;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.PopupMenu;
import com.google.firebase.auth.FirebaseAuth;
public class UserData extends AppCompatActivity implements PopupMenu.OnMenuItemClickListener{
Button logoutbutton;
FirebaseAuth mFirebaseAuth;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_user_data);
logoutbutton = findViewById(R.id.logout);
logoutbutton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
FirebaseAuth.getInstance().signOut();
Intent intoMain= new Intent(UserData.this,MainActivity.class);
startActivity(intoMain);
}
});
}
public void showMenu(View view)
{
PopupMenu popupMenu = new PopupMenu(this,view);
popupMenu.setOnMenuItemClickListener(this);
popupMenu.inflate(R.menu.all_list_menu);
popupMenu.show();
}
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId())
{
case R.id.item1:
Intent intent = new Intent(UserData.this,MainActivity2.class);
startActivity(intent);
return true;
case R.id.item2:
Intent intent1 = new Intent(UserData.this,UserData.class);
startActivity(intent1);
return true;
case R.id.item3:
Intent intent2 = new Intent(UserData.this,MainActivity4.class);
startActivity(intent2);
return true;
default:
return false;
}
}
}<file_sep>package com.example.task101final;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.location.Address;
import android.location.LocationListener;
import android.location.LocationManager;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.text.TextUtils;
import android.view.View;
import android.widget.Button;
import android.widget.CalendarView;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import com.google.android.gms.common.api.Status;
import com.google.android.libraries.places.api.Places;
import com.google.android.libraries.places.api.model.Place;
import com.google.android.libraries.places.widget.Autocomplete;
import com.google.android.libraries.places.widget.AutocompleteActivity;
import com.google.android.libraries.places.widget.model.AutocompleteActivityMode;
import java.util.Arrays;
import java.util.List;
public class mylist extends AppCompatActivity {
EditText title, description, time, quantity, location;
CalendarView calendarView;
ImageView image;
Button saveNote;
String date;
List<Address> addresses;
double lat,log;
String name;
LocationManager locationManager;
LocationListener locationListener;
int PICK_IMAGE_REQUEST = 200;
Uri imageFilePath;
Bitmap imageToStore;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_myList);
image = findViewById(R.id.newimage);
title = findViewById(R.id.newtitle);
description = findViewById(R.id.count);
calendarView = findViewById(R.id.calendarView1);
time = findViewById(R.id.time1);
quantity = findViewById(R.id.quantity1);
location = findViewById(R.id.location1);
saveNote = findViewById(R.id.saveNote1);
Places.initialize(getApplicationContext(),"AIzaSyAgTdbeuP-pygzosek_pdimeGBTr1GfETg");
location.setFocusable(false);
location.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
List<Place.Field> fieldList = Arrays.asList(Place.Field.ADDRESS,Place.Field.LAT_LNG,Place.Field.NAME);
Intent intent1 = new Autocomplete.IntentBuilder(AutocompleteActivityMode.OVERLAY,fieldList).build(mylist.this);
startActivityForResult(intent1,100);
}
});
saveNote.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
calendarView.setOnDateChangeListener(new CalendarView.OnDateChangeListener() {
@Override
public void onSelectedDayChange(@NonNull CalendarView view, int year, int month, int dayOfMonth) {
date = dayOfMonth + "/" + (month + 1) + "/" + year;
}
});
if (TextUtils.isEmpty(title.getText().toString())) {
Toast.makeText(mylist.this, "Title Field is Empty", Toast.LENGTH_SHORT).show();
} else if (TextUtils.isEmpty(description.getText().toString())) {
Toast.makeText(mylist.this, "Description Field is Empty", Toast.LENGTH_SHORT).show();
} else if (date == null) {
Toast.makeText(mylist.this, "Date Field is Empty", Toast.LENGTH_SHORT).show();
} else if (TextUtils.isEmpty(time.getText().toString())) {
Toast.makeText(mylist.this, "Time Field is Empty", Toast.LENGTH_SHORT).show();
} else if (TextUtils.isEmpty(quantity.getText().toString())) {
Toast.makeText(mylist.this, "Quantity Field is Empty", Toast.LENGTH_SHORT).show();
} else if (TextUtils.isEmpty(location.getText().toString())) {
Toast.makeText(mylist.this, "Location Field is Empty", Toast.LENGTH_SHORT).show();
} else {
DatabaseClass1 db = new DatabaseClass1(mylist.this);
db.addNotes(imageToStore, title.getText().toString(), description.getText().toString(), date, time.getText().toString(), quantity.getText().toString(), location.getText().toString());
Intent intent = new Intent(mylist.this, MainActivity4.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
finish();
}
}
});
}
public void chooseImage1(View objectView) {
confirmDialog();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
try {
if (requestCode == PICK_IMAGE_REQUEST && resultCode == RESULT_OK && data != null && data.getData() != null);
{
imageFilePath = data.getData();
imageToStore = MediaStore.Images.Media.getBitmap(getContentResolver(), imageFilePath);
image.setImageBitmap(imageToStore);
}
} catch (Exception e) {
}
if (requestCode == 100 && resultCode == RESULT_OK)
{
Place place = Autocomplete.getPlaceFromIntent(data);
location.setText(place.getAddress());
lat = place.getLatLng().latitude;
log = place.getLatLng().longitude;
name = place.getName();
}
else if (resultCode == AutocompleteActivity.RESULT_ERROR)
{
Status status = Autocomplete.getStatusFromIntent(data);
Toast.makeText(getApplicationContext(),status.getStatusMessage(),Toast.LENGTH_SHORT).show();
}
}
void confirmDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage("Allow the app to access photos,media and files on your device?");
builder.setPositiveButton("ALLOW", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Intent objectIntent = new Intent();
objectIntent.setType("image/*");
objectIntent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(objectIntent, PICK_IMAGE_REQUEST);
}
});
builder.setNegativeButton("DENY", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
builder.create().show();
}
}
<file_sep>package com.example.task101final;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.view.MenuItem;
import android.view.View;
import android.widget.PopupMenu;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
public class MainActivity2 extends AppCompatActivity implements PopupMenu.OnMenuItemClickListener{
RecyclerView recyclerView;
RecyclerViewAdapter adapter;
List<Model> notesList;
DatabaseClass databaseClass;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
recyclerView = findViewById(R.id.recyclerView);
notesList = new ArrayList<>();
databaseClass = new DatabaseClass(this);
fetchAllNotesFromDatabase();
recyclerView.setLayoutManager(new LinearLayoutManager(this));
adapter = new RecyclerViewAdapter(this, MainActivity2.this,notesList);
recyclerView.setAdapter(adapter);
}
public void showMenu(View view)
{
PopupMenu popupMenu = new PopupMenu(this,view);
popupMenu.setOnMenuItemClickListener(this);
popupMenu.inflate(R.menu.all_list_menu);
popupMenu.show();
}
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId())
{
case R.id.item1:
Intent intent = new Intent(MainActivity2.this,MainActivity2.class);
startActivity(intent);
return true;
case R.id.item2:
Intent intent1 = new Intent(MainActivity2.this,UserData.class);
startActivity(intent1);
return true;
case R.id.item3:
Intent intent2 = new Intent(MainActivity2.this,MainActivity4.class);
startActivity(intent2);
return true;
case R.id.item4:
Intent intent4 = new Intent(MainActivity2.this, AddingCart.class);
startActivity(intent4);
return true;
default:
return false;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == 1){
recreate();
}
}
void fetchAllNotesFromDatabase()
{
Cursor cursor = databaseClass.readAllData();
if (cursor.getCount() == 0)
{
Toast.makeText(this, "No data to show", Toast.LENGTH_SHORT).show();
}
else
{
while(cursor.moveToNext())
{
byte [] imageBytes = cursor.getBlob(1);
Bitmap objectBitmap = BitmapFactory.decodeByteArray(imageBytes,0,imageBytes.length);
notesList.add(new Model(cursor.getString(0),objectBitmap,cursor.getString(2), cursor.getString(3),cursor.getString(4),cursor.getString(5),cursor.getString(6),cursor.getString(7)));
Toast.makeText(this," values in Database",Toast.LENGTH_SHORT).show();
}
}
}
public void addNewNote(View view) {
Intent intent1 = new Intent(MainActivity2.this, DiscoverItems.class);
startActivity(intent1);
}
}<file_sep>package com.example.task101final;
public class PaypalClientIdConfigClass {
public static final String PAYPAL_CLIENT_ID = "Aeg6wPM3aNxufQS0n8Qu0PbBErLBBZYCL4iSDtacPFu4o38AI07wDC1PZpsKwGH5qJ8Al8AKdUnHo-m1";
}
<file_sep>package com.example.task101final;
import android.app.Activity;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import java.util.List;
public class RecyclerViewAdapter3 extends RecyclerView.Adapter<RecyclerViewAdapter3.MyViewHolder> {
Context context;
Activity activity;
List<Model3> myCart;
public RecyclerViewAdapter3(Context context, Activity activity, List<Model3> myCart) {
this.context = context;
this.activity = activity;
this.myCart = myCart;
}
@NonNull
@Override
public RecyclerViewAdapter3.MyViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.recycler_view_layout_cart, parent, false);
return new MyViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull RecyclerViewAdapter3.MyViewHolder holder, int position) {
holder.id.setText(myCart.get(position).getId());
holder.title.setText(myCart.get(position).getTitle());
}
@Override
public int getItemCount() {
return myCart.size();
}
public class MyViewHolder extends RecyclerView.ViewHolder {
TextView id, title;
RelativeLayout layout;
public MyViewHolder(@NonNull View itemView) {
super(itemView);
title = itemView.findViewById(R.id.title_cart);
id = itemView.findViewById(R.id.counts);
layout = itemView.findViewById(R.id.note_layout2);
}
}
}
|
45a05edcd874e46554d6e54ef0c6256d2da511d1
|
[
"Java"
] | 5 |
Java
|
avneetsag/Task10.1
|
ebd93797fb0aa332ddd068d61d873dea73c5cd76
|
4c160443f4f483798450436e1db8be01e2014a00
|
refs/heads/master
|
<file_sep>
# Ridge Regression {#ridge_regression}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
> We should provide an example in Stan.
## Introduction
```{r, eval=FALSE}
# Settings
library(R2OpenBUGS)
bugslocation <- "C:/Program Files/OpenBUGS323/OpenBugs.exe" # location of OpenBUGS
bugsworkingdir <- file.path(getwd(), "BUGS") # Bugs working directory
#-------------------------------------------------------------------------------
# Simulate fake data
#-------------------------------------------------------------------------------
library(MASS)
n <- 50 # sample size
b0 <- 1.2
b <- rnorm(5, 0, 2)
Sigma <- matrix(c(10,3,3,2,1,
3,2,3,2,1,
3,3,5,3,2,
2,2,3,10,3,
1,1,2,3,15),5,5)
Sigma
x <- mvrnorm(n = n, rep(0, 5), Sigma)
simresid <- rnorm(n, 0, sd=3) # residuals
x.z <- x
for(i in 1:ncol(x)) x.z[,i] <- (x[,i]-mean(x[,i]))/sd(x[,i])
y <- b0 + x.z%*%b + simresid # calculate y, i.e. the data
#-------------------------------------------------------------------------------
# Function to generate initial values
#-------------------------------------------------------------------------------
inits <- function() {
list(b0=runif(1, -2, 2),
b=runif(5, -2, 2),
sigma=runif(1, 0.1, 2))
}
#-------------------------------------------------------------------------------
# Run OpenBUGS
#-------------------------------------------------------------------------------
parameters <- c("b0", "b", "sigma")
lambda <- c(1, 2, 10, 25, 50, 100, 500, 1000, 10000)
bs <- matrix(ncol=length(lambda), nrow=length(b))
bse <- matrix(ncol=length(lambda), nrow=length(b))
for(j in 1:length(lambda)){
datax <- list(y=as.numeric(y), x=x, n=n, mb=rep(0, 5), lambda=lambda[j])
fit <- bugs(datax, inits, parameters, model.file="ridge_regression.txt",
n.thin=1, n.chains=2, n.burnin=5000, n.iter=10000,
debug=FALSE, OpenBUGS.pgm = bugslocation,
working.directory=bugsworkingdir)
bs[,j] <- fit$mean$b
bse[,j] <- fit$sd$b
}
range(bs)
plot(1:length(lambda), seq(-2, 1, length=length(lambda)), type="n")
colkey <- rainbow(length(b))
for(j in 1:nrow(bs)){
lines(1:length(lambda), bs[j,], col=colkey[j], lwd=2)
lines(1:length(lambda), bs[j,]-2*bse[j,], col=colkey[j], lty=3)
lines(1:length(lambda), bs[j,]+2*bse[j,], col=colkey[j], lty=3)
}
abline(h=0)
round(fit$summary,2)
#-------------------------------------------------------------------------------
# Run WinBUGS
#-------------------------------------------------------------------------------
library(R2WinBUGS)
bugsdir <- "C:/Users/fk/WinBUGS14" #
mod <- bugs(datax, inits= inits, parameters,
model.file="normlinreg.txt", n.chains=2, n.iter=1000,
n.burnin=500, n.thin=1, debug=TRUE,
bugs.directory=bugsdir, program="WinBUGS", working.directory=bugsworkingdir)
#-------------------------------------------------------------------------------
# Test convergence and make inference
#-------------------------------------------------------------------------------
library(blmeco)
# Make Figure 12.2
par(mfrow=c(3,1))
historyplot(fit, "beta0")
historyplot(fit, "beta1")
historyplot(fit, "sigmaRes")
# Parameter estimates
print(fit$summary, 3)
# Make predictions for covariate values between 10 and 30
newdat <- data.frame(x=seq(10, 30, length=100))
Xmat <- model.matrix(~x, data=newdat)
predmat <- matrix(ncol=fit$n.sim, nrow=nrow(newdat))
for(i in 1:fit$n.sim) predmat[,i] <- Xmat%*%c(fit$sims.list$beta0[i], fit$sims.list$beta1[i])
newdat$lower.bugs <- apply(predmat, 1, quantile, prob=0.025)
newdat$upper.bugs <- apply(predmat, 1, quantile, prob=0.975)
plot(y~x, pch=16, las=1, cex.lab=1.4, cex.axis=1.2, type="n", main="")
polygon(c(newdat$x, rev(newdat$x)), c(newdat$lower.bugs, rev(newdat$upper.bugs)), col=grey(0.7), border=NA)
abline(c(fit$mean$beta0, fit$mean$beta1), lwd=2)
box()
points(x,y)
```
<file_sep>
# Reproducible research {#reproducibleresearch}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Summary
## Further reading
- [Rmarkdown](https://bookdown.org/yihui/rmarkdown/): The first official book authored by the core R Markdown developers that provides a comprehensive and accurate reference to the R Markdown ecosystem. With R Markdown, you can easily create reproducible data analysis reports, presentations, dashboards, interactive applications, books, dissertations, websites, and journal articles, while enjoying the simplicity of Markdown and the great power of R and other languages.
- [Bookdown by <NAME>](https://bookdown.org/yihui/bookdown/): A guide to authoring books with R Markdown, including how to generate figures and tables, and insert cross-references, citations, HTML widgets, and Shiny apps in R Markdown. The book can be exported to HTML, PDF, and e-books (e.g. EPUB). The book style is customizable. You can easily write and preview the book in RStudio IDE or other editors, and host the book wherever you want (e.g. bookdown.org). Our book is written using bookdown. <file_sep># (PART) BAYESIAN DATA ANALYSIS {-}
# Introduction to PART II {#PART-II}
<a href="" target="_blank"><img src="images/part_II.jpg" width="410" style="display: block; margin: auto;" /></a>
------
## Further reading {-}
A really good introductory book to Bayesian data analyses is [@McElreath2016]. This book starts with a thorough introduction to applying the Bayes theorem for drawing inference from data. In addition, it carefully discusses what can and what cannot be concluded from statistical results. We like this very much.
We like looking up statistical methods in papers and books written by <NAME> [e.g. @Gelman2014] and <NAME> (e.g. [@Hastie2009, @Efron2016]) because both explain complicated things in a concise and understandable way.
<file_sep># MCMC using Stan {#stan}
## Background
Markov chain Monte Carlo (MCMC) simulation techniques were developed in the mid-1950s by physicists (Metropolis et al., 1953). Later, statisticians discovered MCMC (Hastings, 1970; Geman & Geman, 1984; Tanner & Wong, 1987; Gelfand et al., 1990; Gelfand & Smith, 1990). MCMC methods make it possible to obtain posterior distributions for parameters and latent variables (unobserved variables) of complex models. In parallel, personal computer capacities increased in the 1990s and user-friendly software such as the different programs based on the programming language BUGS (Spiegelhalter et al., 2003) came out. These developments boosted the use of Bayesian data analyses, particularly in genetics and ecology.
## Install `rstan`
In this book we use the program [Stan](http://mc-stan.org) to draw random samples from the joint posterior distribution of the model parameters given a model, the data, prior distributions, and initial values. To do so, it uses the “no-U-turn sampler,” which is a type of Hamiltonian Monte Carlo simulation [@Hoffman2014; @Betancourt2013_b], and optimization-based point estimation. These algorithms are more efficient than the ones implemented in BUGS programs and they can handle larger data sets. Stan works particularly well for hierar- chical models [@Betancourt2013]. Stan runs on Windows, Mac, and Linux and can be used via the R interface `rstan`. Stan is automatically installed when the R package `rstan` is installed. For [installing rstan](https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started), it is advised to follow closely the system-specific instructions.
## Writing a Stan model {#firststanmod}
The statistical model is written in the Stan language and saved in a text file. The Stan language is rather strict, forcing the user to write unambiguous models. Stan is very well documented and the [Stan Documentation](http://mc-stan.org/users/documentation/index.html) contains a comprehensive Language Manual, a Wiki documentation and various tutorials.
We here provide a normal regression with one predictor variable as a worked example. The entire Stan model is as following (saved as `linreg.stan`)
```stan
data {
int<lower=0> n;
vector[n] y;
vector[n] x;
}
parameters {
vector[2] beta;
real<lower=0> sigma;
}
model {
//priors
beta ~ normal(0,5);
sigma ~ cauchy(0,5);
// likelihood
y ~ normal(beta[1] + beta[2] * x, sigma);
}
```
A Stan model consists of different named blocks. These blocks are (from first to last): data, transformed data, parameters, trans- formed parameters, model, and generated quantities. The blocks must appear in this order. The model block is mandatory; all other blocks are optional.
In the *data* block, the type, dimension, and name of every variable has to be declared. Optionally, the range of possible values can be specified. For example, `vector[N] y;` means that y is a vector (type real) of length N, and `int<lower=0> N;` means that N is an integer with nonnegative values (the bounds, here 0, are included). Note that the restriction to a possible range of values is not strictly necessary but this will help specifying the correct model and it will improve speed. We also see that each line needs to be closed by a column sign. In the parameters block, all model parameters have to be defined. The coefficients of the linear predictor constitute a vector of length 2, `vector[2] beta;`. Alternatively, `real beta[2];` could be used. The sigma parameter is a one-number parameter that has to be positive, therefore `real<lower=0> sigma;`.
The *model* block contains the model specification. Stan functions can handle vectors and we do not have to loop over all observations as typical for BUGS . Here, we use a [Cauchy distribution](#cauchydistri) as a prior distribution for sigma. This distribution can have negative values, but because we defined the lower limit of sigma to be 0 in the parameters block, the prior distribution actually used in the model is a truncated Cauchy distribution (truncated at zero). In Chapter \@ref(choosepriors) we explain how to choose prior distributions.
Further characteristics of the Stan language that are good to know include: The variance parameter for the normal distribution is specified as the standard deviation (like in R but different from BUGS, where the precision is used). If no prior is specified, Stan uses a uniform prior over the range of possible values as specified in the parameter block. Variable names must not contain periods, for example, `x.z` would not be allowed, but `x_z` is allowed. To comment out a line, use double forward-slashes `//`.
## Run Stan from R
We fit the model to simulated data. Stan needs a vector containing the names of the data objects. In our case, `x`, `y,` and `N` are objects that exist in the R console.
The function `stan()` starts Stan and returns an object containing MCMCs for every model parameter. We have to specify the name of the file that contains the model specification, the data, the number of chains, and the number of iterations per chain we would like to have. The first half of the iterations of each chain is declared as the warm-up. During the warm-up, Stan is not simulating a Markov chain, because in every step the algorithm is adapted. After the warm-up the algorithm is fixed and Stan simulates Markov chains.
```r
library(rstan)
# Simulate fake data
n <- 50 # sample size
sigma <- 5 # standard deviation of the residuals
b0 <- 2 # intercept
b1 <- 0.7 # slope
x <- runif(n, 10, 30) # random numbers of the covariate
simresid <- rnorm(n, 0, sd=sigma) # residuals
y <- b0 + b1*x + simresid # calculate y, i.e. the data
# Bundle data into a list
datax <- list(n=length(y), y=y, x=x)
# Run STAN
fit <- stan(file = "stanmodels/linreg.stan", data=datax, verbose = FALSE)
```
```
##
## SAMPLING FOR MODEL 'anon_model' NOW (CHAIN 1).
## Chain 1:
## Chain 1: Gradient evaluation took 2.5e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.25 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1:
## Chain 1:
## Chain 1: Iteration: 1 / 2000 [ 0%] (Warmup)
## Chain 1: Iteration: 200 / 2000 [ 10%] (Warmup)
## Chain 1: Iteration: 400 / 2000 [ 20%] (Warmup)
## Chain 1: Iteration: 600 / 2000 [ 30%] (Warmup)
## Chain 1: Iteration: 800 / 2000 [ 40%] (Warmup)
## Chain 1: Iteration: 1000 / 2000 [ 50%] (Warmup)
## Chain 1: Iteration: 1001 / 2000 [ 50%] (Sampling)
## Chain 1: Iteration: 1200 / 2000 [ 60%] (Sampling)
## Chain 1: Iteration: 1400 / 2000 [ 70%] (Sampling)
## Chain 1: Iteration: 1600 / 2000 [ 80%] (Sampling)
## Chain 1: Iteration: 1800 / 2000 [ 90%] (Sampling)
## Chain 1: Iteration: 2000 / 2000 [100%] (Sampling)
## Chain 1:
## Chain 1: Elapsed Time: 0.055 seconds (Warm-up)
## Chain 1: 0.043 seconds (Sampling)
## Chain 1: 0.098 seconds (Total)
## Chain 1:
##
## SAMPLING FOR MODEL 'anon_model' NOW (CHAIN 2).
## Chain 2:
## Chain 2: Gradient evaluation took 5e-06 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.05 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2:
## Chain 2:
## Chain 2: Iteration: 1 / 2000 [ 0%] (Warmup)
## Chain 2: Iteration: 200 / 2000 [ 10%] (Warmup)
## Chain 2: Iteration: 400 / 2000 [ 20%] (Warmup)
## Chain 2: Iteration: 600 / 2000 [ 30%] (Warmup)
## Chain 2: Iteration: 800 / 2000 [ 40%] (Warmup)
## Chain 2: Iteration: 1000 / 2000 [ 50%] (Warmup)
## Chain 2: Iteration: 1001 / 2000 [ 50%] (Sampling)
## Chain 2: Iteration: 1200 / 2000 [ 60%] (Sampling)
## Chain 2: Iteration: 1400 / 2000 [ 70%] (Sampling)
## Chain 2: Iteration: 1600 / 2000 [ 80%] (Sampling)
## Chain 2: Iteration: 1800 / 2000 [ 90%] (Sampling)
## Chain 2: Iteration: 2000 / 2000 [100%] (Sampling)
## Chain 2:
## Chain 2: Elapsed Time: 0.049 seconds (Warm-up)
## Chain 2: 0.043 seconds (Sampling)
## Chain 2: 0.092 seconds (Total)
## Chain 2:
##
## SAMPLING FOR MODEL 'anon_model' NOW (CHAIN 3).
## Chain 3:
## Chain 3: Gradient evaluation took 5e-06 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.05 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3:
## Chain 3:
## Chain 3: Iteration: 1 / 2000 [ 0%] (Warmup)
## Chain 3: Iteration: 200 / 2000 [ 10%] (Warmup)
## Chain 3: Iteration: 400 / 2000 [ 20%] (Warmup)
## Chain 3: Iteration: 600 / 2000 [ 30%] (Warmup)
## Chain 3: Iteration: 800 / 2000 [ 40%] (Warmup)
## Chain 3: Iteration: 1000 / 2000 [ 50%] (Warmup)
## Chain 3: Iteration: 1001 / 2000 [ 50%] (Sampling)
## Chain 3: Iteration: 1200 / 2000 [ 60%] (Sampling)
## Chain 3: Iteration: 1400 / 2000 [ 70%] (Sampling)
## Chain 3: Iteration: 1600 / 2000 [ 80%] (Sampling)
## Chain 3: Iteration: 1800 / 2000 [ 90%] (Sampling)
## Chain 3: Iteration: 2000 / 2000 [100%] (Sampling)
## Chain 3:
## Chain 3: Elapsed Time: 0.049 seconds (Warm-up)
## Chain 3: 0.048 seconds (Sampling)
## Chain 3: 0.097 seconds (Total)
## Chain 3:
##
## SAMPLING FOR MODEL 'anon_model' NOW (CHAIN 4).
## Chain 4:
## Chain 4: Gradient evaluation took 6e-06 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.06 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4:
## Chain 4:
## Chain 4: Iteration: 1 / 2000 [ 0%] (Warmup)
## Chain 4: Iteration: 200 / 2000 [ 10%] (Warmup)
## Chain 4: Iteration: 400 / 2000 [ 20%] (Warmup)
## Chain 4: Iteration: 600 / 2000 [ 30%] (Warmup)
## Chain 4: Iteration: 800 / 2000 [ 40%] (Warmup)
## Chain 4: Iteration: 1000 / 2000 [ 50%] (Warmup)
## Chain 4: Iteration: 1001 / 2000 [ 50%] (Sampling)
## Chain 4: Iteration: 1200 / 2000 [ 60%] (Sampling)
## Chain 4: Iteration: 1400 / 2000 [ 70%] (Sampling)
## Chain 4: Iteration: 1600 / 2000 [ 80%] (Sampling)
## Chain 4: Iteration: 1800 / 2000 [ 90%] (Sampling)
## Chain 4: Iteration: 2000 / 2000 [100%] (Sampling)
## Chain 4:
## Chain 4: Elapsed Time: 0.051 seconds (Warm-up)
## Chain 4: 0.046 seconds (Sampling)
## Chain 4: 0.097 seconds (Total)
## Chain 4:
```
## Further reading {-}
- [Stan-Homepage](http://mc-stan.org): It contains the documentation for Stand a a lot of tutorials.
<file_sep>
# Structural equation models {#SEM}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
> We should provide an example in Stan.
## Introduction
```{r, eval=FALSE}
------------------------------------------------------------------------------------------------------
# General settings
#------------------------------------------------------------------------------------------------------
library(MASS)
library(rjags)
library(MCMCpack)
#------------------------------------------------------------------------------------------------------
# Simulation
#------------------------------------------------------------------------------------------------------
n <- 100
heffM <- 0.6 # effect of H on M
heffCS <- 0.0 # effect of H on Clutch size
meffCS <- 0.6 # effect of M on Clutch size
SigmaM <- matrix(c(0.1,0.04,0.04,0.1),2,2)
meffm1 <- 0.6
meffm2 <- 0.7
SigmaH <- matrix(c(0.1,0.04,0.04,0.1),2,2)
meffh1 <- 0.6
meffh2 <- -0.7
# Latente Variablen
H <- rnorm(n, 0, 1)
M <- rnorm(n, heffM * H, 0.1)
# Clutch size
CS <- rnorm(n, heffCS * H + meffCS * M, 0.1)
# Indicators
eM <- cbind(meffm1 * M, meffm2 * M)
datM <- matrix(NA, ncol = 2, nrow = n)
eH <- cbind(meffh1 * H, meffh2 * H)
datH <- matrix(NA, ncol = 2, nrow = n)
for(i in 1:n) {
datM[i,] <- mvrnorm(1, eM[i,], SigmaM)
datH[i,] <- mvrnorm(1, eH[i,], SigmaH)
}
#------------------------------------------------------------------------------
# JAGS Model
#------------------------------------------------------------------------------
dat <- list(datM = datM, datH = datH, n = n, CS = CS, #H = H, M = M,
S3 = matrix(c(1,0,0,1),nrow=2)/1)
# Function to create initial values
inits <- function() {
list(
meffh = runif(2, 0, 0.1),
meffm = runif(2, 0, 0.1),
heffM = runif(1, 0, 0.1),
heffCS = runif(1, 0, 0.1),
meffCS = runif(1, 0, 0.1),
tauCS = runif(1, 0.1, 0.3),
tauMH = runif(1, 0.1, 0.3),
tauH = rwish(3,matrix(c(.02,0,0,.04),nrow=2)),
tauM = rwish(3,matrix(c(.02,0,0,.04),nrow=2))
# M = as.numeric(rep(0, n))
)
}
t.n.thin <- 50
t.n.chains <- 2
t.n.burnin <- 20000
t.n.iter <- 50000
# Run JAGS
jagres <- jags.model('JAGS/BUGSmod1.R',data = dat, n.chains = t.n.chains, inits = inits, n.adapt = t.n.burnin)
params <- c("meffh", "meffm", "heffM", "heffCS", "meffCS")
mod <- coda.samples(jagres, params, n.iter=t.n.iter, thin=t.n.thin)
res <- round(data.frame(summary(mod)$quantiles[, c(3, 1, 5)]), 3)
res$TRUEVALUE <- c(heffCS, heffM, meffCS, meffh1, meffh2, meffm1, meffm2)
res
# Traceplots
post <- data.frame(rbind(mod[[1]], mod[[2]]))
names(post) <- dimnames(mod[[1]])[[2]]
par(mfrow = c(3,3))
param <- c("meffh[1]", "meffh[2]", "meffm[1]", "meffm[2]", "heffM", "heffCS", "meffCS")
traceplot(mod[, match(param, names(post))])
```
<file_sep># Assessing Model Assumptions {#residualanalysis}
## Model Assumptions
Every statistical model makes assumptions. We try to build models that reflect the data-generating process as realistically as possible. However, a model never is the truth. Yet, all inferences drawn from a model, such as estimates of effect size or derived quantities with credible intervals, are based on the assumption that the model is true. However, if a model captures the datagenerating process poorly, for example, because it misses important structures (predictors, interactions, polynomials), inferences drawn from the model are probably biased and results become unreliable. In a (hypothetical) model that captures all important structures of the data generating process, the stochastic part, the difference between the observation and the fitted value (the residuals), should only show random variation. Analyzing residuals is a very important part of the data analysis process.
Residual analysis can be very exciting, because the residuals show what remains unexplained by the present model. Residuals can sometimes show surprising patterns and, thereby, provide deeper insight into the system. However, at this step of the analysis it is important not to forget the original research questions that motivated the study. Because these questions have been asked without knowledge of the data, they protect against data dredging. Of course, residual analysis may raise interesting new questions. Nonetheless, these new questions have emerged from patterns in the data, which might just be random, not systematic, patterns. The search for a model with good fit should be guided by thinking about the process that generated the data, not by trial and error (i.e., do not try all possible variable combinations until the residuals look good; that is data dredging). All changes done to the model should be scientifically justified. Usually, model complexity increases, rather than decreases, during the analysis.
## Independent and Identically Distributed
Usually, we model an outcome variable as independent and identically distributed (iid) given the model parameters. This means that all observations with the same predictor values behave like independent random numbers from the identical distribution. As a consequence, residuals should look iid. Independent means that:
- The residuals do not correlate with other variables (those that are included in the model as well as any other variable not included in the model).
- The residuals are not grouped (i.e., the means of any set of residuals should all be equal).
- The residuals are not autocorrelated (i.e., no temporal or spatial autocorrelation exist; Sections \@ref(tempautocorrelation) and \@ref(spatialautocorrelation)).
Identically distributed means that:
- All residuals come from the same distribution.
In the case of a linear model with normal error distribution (Chapter \@ref(lm)) the residuals are assumed to come from the same normal distribution. Particularly:
- The residual variance is homogeneous (homoscedasticity), that is, it does not depend on any predictor variable, and it does not change with the fitted value.
- The mean of the residuals is zero over the whole range of predictor values. When numeric predictors (covariates) are present, this implies that the relationship between x and y can be adequately described by a straight line.
Residual analysis is mainly done graphically. R makes it very easy to plot residuals to look at the different aspects just listed. As a first example, we use the coal tit example from Chapter \@ref(lm):
> Hier fehlt noch ein Teil aus dem BUCH.
## The QQ-Plot {#qqplot}
xxx
## Temporal Autocorrelation {#tempautocorrelation}
## Spatial Autocorrelation {#spatialautocorrelation}
## Heteroscedasticity {#Heteroscedasticity}
<file_sep>
# Further topics {#furthertopics}
This is a collection of short introductions or links with commented R code that cover other topics that might be useful for ecologists.
## Bioacoustic analyse
Bioacoustic analyses are nicely covered in a blog by [<NAME>](https://marce10.github.io).
## Python {#python}
Like R, python is a is a high-level programming language that is used by many ecologists. The [reticulate](https://rstudio.github.io/reticulate/index.html) package provides a comprehensive set of tools for interoperability between Python and R.
<file_sep># Daily nest survival {#dailynestsurv}
<!-- todo: explain why we need to start with first at day of detection, give literatur to describe the known fate model (e.g. King's book) -->
## Background
Analyses of nest survival is important for understanding the mechanisms of population dynamics. The life-span of a nest could be used as a measure of nest survival. However, this measure very often is biased towards nests that survived longer because these nests are detected by the ornithologists with higher probability [@Mayfield1975]. In order not to overestimate nest survival, daily nest survival conditional on survival to the previous day can be estimated.
## Models for estimating daily nest survival
What model is best used depends on the type of data available. Data may look:
1. Regular (e.g. daily) nest controls, all nests monitored from their first egg onward
2. Regular nest controls, nests found during the course of the study at different stages and nestling ages
3. Irregular nest controls, all nests monitored from their first egg onward
4. Irregular nest controls, nests found during the course of the study at different stages and nestling ages
Table: (\#tab:nestsurvmod) Models useful for estimating daily nest survival. Data numbers correspond to the descriptions above.
Model | Data | Software, R-code |
:-------|:------------------|:------------------|
Binomial or Bernoulli model | 1, (3) | `glm`, `glmer`,... |
Cox proportional hazard model | 1,2,3,4 | `brm`, soon: `stan_cox` |
Known fate model | 1, 2 | Stan code below |
Known fate model | 3, 4 | Stan code below |
Logistic exposure model | 1,2,3,4 | `glm`, `glmer`using a link function that depends on exposure time |
@Shaffer2004 explains how to adapt the link function in a Bernoulli model to account for having found the nests at different nest ages (exposure time). <NAME> explains how to implement the logistic exposure model in R [here](https://rpubs.com/bbolker/logregexp).
## Known fate model
A natural model that allows estimating daily nest survival is the known-fate survival model. It is a Markov model that models the state of a nest $i$ at day $t$ (whether it is alive, $y_{it}=1$ or not $y_{it}=0$) as a Bernoulli variable dependent on the state of the nest the day before.
$$ y_{it} \sim Bernoulli(y_{it-1}S_{it})$$
The daily nest survival $S_{it}$ can be linearly related to predictor variables that are measured on the nest or on the day level.
$$logit(S_{it}) = \textbf{X} \beta$$
It is also possible to add random effects if needed.
## The Stan model {#dailynestsurvstan}
The following Stan model code is saved as `daily_nest_survival.stan`.
```{r engine='cat', engine.opts=list(file="stanmodels/daily_nest_survival.stan",lang="stan")}
data {
int<lower=0> Nnests; // number of nests
int<lower=0> last[Nnests]; // day of last observation (alive or dead)
int<lower=0> first[Nnests]; // day of first observation (alive or dead)
int<lower=0> maxage; // maximum of last
int<lower=0> y[Nnests, maxage]; // indicator of alive nests
real cover[Nnests]; // a covariate of the nest
real age[maxage]; // a covariate of the date
}
parameters {
vector[3] b; // coef of linear pred for S
}
model {
real S[Nnests, maxage-1]; // survival probability
for(i in 1:Nnests){
for(t in first[i]:(last[i]-1)){
S[i,t] = inv_logit(b[1] + b[2]*cover[i] + b[3]*age[t]);
}
}
// priors
b[1]~normal(0,5);
b[2]~normal(0,3);
b[3]~normal(0,3);
// likelihood
for (i in 1:Nnests) {
for(t in (first[i]+1):last[i]){
y[i,t]~bernoulli(y[i,t-1]*S[i,t-1]);
}
}
}
```
## Prepare data and run Stan
Data is from @Grendelmeier2018.
```{r, echo=TRUE}
load("RData/nest_surv_data.rda")
str(datax)
datax$y[is.na(datax$y)] <- 0 # Stan does not allow for NA's in the outcome
```
```{r, echo=TRUE, cache=TRUE, results=FALSE}
# Run STAN
library(rstan)
mod <- stan(file = "stanmodels/daily_nest_survival.stan", data=datax,
chains=5, iter=2500, control=list(adapt_delta=0.9), verbose = FALSE)
```
## Check convergence
We love exploring the performance of the Markov chains by using the function `launch_shinystan` from the package `shinystan`.
## Look at results
It looks like cover does not affect daily nest survival, but daily nest survival decreases with the age of the nestlings.
```{r printmodel, echo=TRUE}
#launch_shinystan(mod)
print(mod)
```
```{r effplots, echo=TRUE, fig.cap="Estimated daily nest survival probability in relation to nest age. Dotted lines are 95% uncertainty intervals of the regression line."}
# effect plot
bsim <- as.data.frame(mod)
nsim <- nrow(bsim)
newdat <- data.frame(age=seq(1, datax$maxage, length=100))
newdat$age.z <- (newdat$age-mean(1:datax$maxage))/sd((1:datax$maxage))
Xmat <- model.matrix(~age.z, data=newdat)
fitmat <- matrix(ncol=nsim, nrow=nrow(newdat))
for(i in 1:nsim) fitmat[,i] <- plogis(Xmat%*%as.numeric(bsim[i,c(1,3)]))
newdat$fit <- apply(fitmat, 1, median)
newdat$lwr <- apply(fitmat, 1, quantile, prob=0.025)
newdat$upr <- apply(fitmat, 1, quantile, prob=0.975)
plot(newdat$age, newdat$fit, ylim=c(0.8,1), type="l",
las=1, ylab="Daily nest survival", xlab="Age [d]")
lines(newdat$age, newdat$lwr, lty=3)
lines(newdat$age, newdat$upr, lty=3)
```
## Known fate model for irregular nest controls
When nest are controlled only irregularly, it may happen that a nest is found predated or dead after a longer break in controlling. In such cases, we know that the nest was predated or it died due to other causes some when between the last control when the nest was still alive and when it was found dead. In such cases, we need to tell the model that the nest could have died any time during the interval when we were not controlling.
To do so, we create a variable that indicates the time (e.g. day since first egg) when the nest was last seen alive (`lastlive`). A second variable indicates the time of the last check which is either the equal to `lastlive` when the nest survived until the last check, or it is larger than `lastlive` when the nest failure has been recorded. A last variable, `gap`, measures the time interval in which the nest failure occurred. A `gap` of zero means that the nest was still alive at the last control, a `gap`of 1 means that the nest failure occurred during the first day after `lastlive`, a `gap` of 2 means that the nest failure either occurred at the first or second day after `lastlive`.
```{r, echo=TRUE, cache=TRUE, results=FALSE}
# time when nest was last observed alive
lastlive <- apply(datax$y, 1, function(x) max(c(1:length(x))[x==1]))
# time when nest was last checked (alive or dead)
lastcheck <- lastlive+1
# here, we turn the above data into a format that can be used for
# irregular nest controls. WOULD BE NICE TO HAVE A REAL DATA EXAMPLE!
# when nest was observed alive at the last check, then lastcheck equals lastlive
lastcheck[lastlive==datax$last] <- datax$last[lastlive==datax$last]
datax1 <- list(Nnests=datax$Nnests,
lastlive = lastlive,
lastcheck= lastcheck,
first=datax$first,
cover=datax$cover,
age=datax$age,
maxage=datax$maxage)
# time between last seen alive and first seen dead (= lastcheck)
datax1$gap <- datax1$lastcheck-datax1$lastlive
```
In the Stan model code, we specify the likelihood for each gap separately.
```{r engine='cat', engine.opts=list(file="stanmodels/daily_nest_survival_irreg.stan",lang="stan")}
data {
int<lower=0> Nnests; // number of nests
int<lower=0> lastlive[Nnests]; // day of last observation (alive)
int<lower=0> lastcheck[Nnests]; // day of observed death or, if alive, last day of study
int<lower=0> first[Nnests]; // day of first observation (alive or dead)
int<lower=0> maxage; // maximum of last
real cover[Nnests]; // a covariate of the nest
real age[maxage]; // a covariate of the date
int<lower=0> gap[Nnests]; // obsdead - lastlive
}
parameters {
vector[3] b; // coef of linear pred for S
}
model {
real S[Nnests, maxage-1]; // survival probability
for(i in 1:Nnests){
for(t in first[i]:(lastcheck[i]-1)){
S[i,t] = inv_logit(b[1] + b[2]*cover[i] + b[3]*age[t]);
}
}
// priors
b[1]~normal(0,1.5);
b[2]~normal(0,3);
b[3]~normal(0,3);
// likelihood
for (i in 1:Nnests) {
for(t in (first[i]+1):lastlive[i]){
1~bernoulli(S[i,t-1]);
}
if(gap[i]==1){
target += log(1-S[i,lastlive[i]]); //
}
if(gap[i]==2){
target += log((1-S[i,lastlive[i]]) + S[i,lastlive[i]]*(1-S[i,lastlive[i]+1])); //
}
if(gap[i]==3){
target += log((1-S[i,lastlive[i]]) + S[i,lastlive[i]]*(1-S[i,lastlive[i]+1]) +
prod(S[i,lastlive[i]:(lastlive[i]+1)])*(1-S[i,lastlive[i]+2])); //
}
if(gap[i]==4){
target += log((1-S[i,lastlive[i]]) + S[i,lastlive[i]]*(1-S[i,lastlive[i]+1]) +
prod(S[i,lastlive[i]:(lastlive[i]+1)])*(1-S[i,lastlive[i]+2]) +
prod(S[i,lastlive[i]:(lastlive[i]+2)])*(1-S[i,lastlive[i]+3])); //
}
}
}
```
```{r, echo=TRUE, cache=TRUE, results=FALSE}
# Run STAN
mod1 <- stan(file = "stanmodels/daily_nest_survival_irreg.stan", data=datax1,
chains=5, iter=2500, control=list(adapt_delta=0.9), verbose = FALSE)
```
## Further reading {-}
Helpful links:
https://deepai.org/publication/bayesian-survival-analysis-using-the-rstanarm-r-package [@Brilleman.2020]
https://www.hammerlab.org/2017/06/26/introducing-survivalstan/
<file_sep>
# Ridge Regression {#ridge_regression}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
> We should provide an example in Stan.
## Introduction
```r
# Settings
library(R2OpenBUGS)
bugslocation <- "C:/Program Files/OpenBUGS323/OpenBugs.exe" # location of OpenBUGS
bugsworkingdir <- file.path(getwd(), "BUGS") # Bugs working directory
#-------------------------------------------------------------------------------
# Simulate fake data
#-------------------------------------------------------------------------------
library(MASS)
n <- 50 # sample size
b0 <- 1.2
b <- rnorm(5, 0, 2)
Sigma <- matrix(c(10,3,3,2,1,
3,2,3,2,1,
3,3,5,3,2,
2,2,3,10,3,
1,1,2,3,15),5,5)
Sigma
x <- mvrnorm(n = n, rep(0, 5), Sigma)
simresid <- rnorm(n, 0, sd=3) # residuals
x.z <- x
for(i in 1:ncol(x)) x.z[,i] <- (x[,i]-mean(x[,i]))/sd(x[,i])
y <- b0 + x.z%*%b + simresid # calculate y, i.e. the data
#-------------------------------------------------------------------------------
# Function to generate initial values
#-------------------------------------------------------------------------------
inits <- function() {
list(b0=runif(1, -2, 2),
b=runif(5, -2, 2),
sigma=runif(1, 0.1, 2))
}
#-------------------------------------------------------------------------------
# Run OpenBUGS
#-------------------------------------------------------------------------------
parameters <- c("b0", "b", "sigma")
lambda <- c(1, 2, 10, 25, 50, 100, 500, 1000, 10000)
bs <- matrix(ncol=length(lambda), nrow=length(b))
bse <- matrix(ncol=length(lambda), nrow=length(b))
for(j in 1:length(lambda)){
datax <- list(y=as.numeric(y), x=x, n=n, mb=rep(0, 5), lambda=lambda[j])
fit <- bugs(datax, inits, parameters, model.file="ridge_regression.txt",
n.thin=1, n.chains=2, n.burnin=5000, n.iter=10000,
debug=FALSE, OpenBUGS.pgm = bugslocation,
working.directory=bugsworkingdir)
bs[,j] <- fit$mean$b
bse[,j] <- fit$sd$b
}
range(bs)
plot(1:length(lambda), seq(-2, 1, length=length(lambda)), type="n")
colkey <- rainbow(length(b))
for(j in 1:nrow(bs)){
lines(1:length(lambda), bs[j,], col=colkey[j], lwd=2)
lines(1:length(lambda), bs[j,]-2*bse[j,], col=colkey[j], lty=3)
lines(1:length(lambda), bs[j,]+2*bse[j,], col=colkey[j], lty=3)
}
abline(h=0)
round(fit$summary,2)
#-------------------------------------------------------------------------------
# Run WinBUGS
#-------------------------------------------------------------------------------
library(R2WinBUGS)
bugsdir <- "C:/Users/fk/WinBUGS14" #
mod <- bugs(datax, inits= inits, parameters,
model.file="normlinreg.txt", n.chains=2, n.iter=1000,
n.burnin=500, n.thin=1, debug=TRUE,
bugs.directory=bugsdir, program="WinBUGS", working.directory=bugsworkingdir)
#-------------------------------------------------------------------------------
# Test convergence and make inference
#-------------------------------------------------------------------------------
library(blmeco)
# Make Figure 12.2
par(mfrow=c(3,1))
historyplot(fit, "beta0")
historyplot(fit, "beta1")
historyplot(fit, "sigmaRes")
# Parameter estimates
print(fit$summary, 3)
# Make predictions for covariate values between 10 and 30
newdat <- data.frame(x=seq(10, 30, length=100))
Xmat <- model.matrix(~x, data=newdat)
predmat <- matrix(ncol=fit$n.sim, nrow=nrow(newdat))
for(i in 1:fit$n.sim) predmat[,i] <- Xmat%*%c(fit$sims.list$beta0[i], fit$sims.list$beta1[i])
newdat$lower.bugs <- apply(predmat, 1, quantile, prob=0.025)
newdat$upper.bugs <- apply(predmat, 1, quantile, prob=0.975)
plot(y~x, pch=16, las=1, cex.lab=1.4, cex.axis=1.2, type="n", main="")
polygon(c(newdat$x, rev(newdat$x)), c(newdat$lower.bugs, rev(newdat$upper.bugs)), col=grey(0.7), border=NA)
abline(c(fit$mean$beta0, fit$mean$beta1), lwd=2)
box()
points(x,y)
```
<file_sep># Probability distributions {#distributions}
## Introduction
In Bayesian statistics, probability distributions are used for two fundamentally different purposes. First, they are used to describe distributions of data. These distributions are also called data distributions. Second, probability distributions are used to express information or knowledge about parameters. Such distributions are called prior or posterior distributions. The data distributions are part of descriptive statistics, whereas prior and posterior distributions are part of inferential statistics. The usage of probability distributions for describing data does not differ between frequentist and Bayesian statistics. Classically, the data distribution is known as "model assumption". Specifically to Bayesian statistics is the formal expression of statistical uncertainty (or "information" or "knowledge") by prior and posterior distributions. We here introduce some of the most often used probability distributions and present how they are used in statistics.
Probability distributions are grouped into discrete and continuous distributions. Discrete distributions define for any discrete value the probability that exactly this value occurs. They are usually used as data distributions for discrete data such as counts. The function that describes a discrete distribution is called a probability function (their values are probabilities, i.e. a number between 0 and 1). Continuous distributions describe how continuous values are distributed. They are used as data distributions for continuous measurements such as body size and also as prior or posterior distributions for parameters such as the mean body size. Most parameters are measured on a continuous scale. The function that describes continuous distributions is called density function. Its values are non-negative and the area under the density function equals one. The area under a density function corresponds to probabilities. For example, the area under the density function above the value 2 corresponds to the proportion of data with values above 2 if the density function describes data, or it corresponds to the probability that the parameter takes on a value bigger than 2 if the density function is a posterior distribution.
## Discrete distributions
### Bernoulli distribution
Bernoulli distributed data take on the exact values 0 or 1. The value 1 occurs with probability $p$.
$x \sim Bernoulli(p)$
The probability function is $p(x) = p^x(1-p)^{1-x}$.
The expected value is $E(x) = p$ and the variance is $Var(x) = p(1-p)$.
The flipping experiment of a fair coin produces Bernoulli distributed data with $p=0.5$ if head is taken as one and tail is taken as zero. The Bernoulli distribution is usually used as a data model for binary data such as whether a nest box is used or not, whether a seed germinated or not, whether a species occurs or not in a plot etc.
### Binomial distribution
The binomial distribution describes the number of ones among a predefined number of Bernoulli trials. For example, the number of heads among 20 coin flips, the number of used nest boxes among the 50 nest boxes of the study area, or the number of seed that germinated among the 10 seeds in the pot. Binomially distributed data are counts with an upper limit ($n$).
$x \sim binomial(p,n)$
The probability function is $p(x) = {n\choose x} p^x(1-p)^{(n-x)}$.
The expected value is $E(x) = np$ and the variance is $Var(x) = np(1-p)$.
### Poisson distribution
The Poisson distribution describes the distribution of counts without upper boundary, i.e., when we know how many times something happened but we do not know how many times it did not happen. A typical Poisson distributed variable is the number of raindrops in equally-sized grid cells on the floor, if we can assume that every rain drop falls down completely independent of the other raindrops and at a completely random point.
$x \sim Poisson(\lambda)$
The probability function is $p(x) = \frac{1}{x!}\lambda^xexp(-\lambda)$. It is implemented in the R-function `dpois`.
The expected values is $E(x) = \lambda$ and the variance is $Var(x) = \lambda$.
An important property of the Poisson distribution is that it has only one parameter $\lambda$. As a consequence, it does not allow for any combination of means and variances. In fact, they are assumed to be the same. In the real world, most count data do not behave like rain drops, that means variances of count data are in most real world examples not equal to the mean as assumed by the Poisson distribution. Therefore, when using the Poisson distribution as a data model, it is important to check for overdispersion.
Further, note that not all variables measured as an integer number are count data! For example, the number of days an animal spends in a specific area before moving away looks like a count. However, it is a continuous measurement. The duration an animal spends in a specific areas could also be measured in hours or minutes. The Poisson model assumes that the counts are all events that happened. However, an emigration of an animal is just one event, independent of how long it stayed.
### Negative-binomial distribution
The negative-binomial distribution represents the number of zeros which occur in a sequence of Bernoulli trials before a target number of ones is reached. It is hard to see this situation in, e.g., the number of individuals counted on plots. Therefore, we were reluctant to introduce this distribution in our old book [@KornerNievergelt2015]. However, the negative-binomial distribution often fits much better to count data than the Poisson model because it has two parameters and therefore allows for fitting both the mean and the variance to the data. Therefore, we started using the negative-binomial distribution as a data model more often.
$x \sim negative-binomial(p,n)$
Its probability function is rather complex:
$p(x) = \frac{\Gamma(x+n)}{\Gamma(n) x!} p^n (1-p)^x$ with $\Gamma$ being the Gamma-function. Luckily, the negative-binomial probability function is implemented in the R-function `dnegbin`.
The expected value of the negative-binomial distribution is $E(x) = n\frac{(1-p)}{p}$ and the variance is $Var(x) = n\frac{(1-p)}{p^2}$.
We like to specify the distribution using the mean and the scale parameter $x \sim negativ-binomial(\mu,\theta)$, because in practice we often specify a linear predictor for the logarithm of the mean $\mu$.
## Continuous distributions
### Beta distribution
The beta distribution is restricted to the range [0,1]. It describes the knowledge about a probability parameter. Therefore, it is usually used as a prior or posterior distribution for probabilities. The beta distribution sometimes is used as a data model for continuous probabilities, However, it is difficult to get a good fit of such models, because measured proportions often take on values of zero and ones which is not allowed in most (but not all) beta distributions, thus this distribution does not describe the variance of measured proportions correctly. However, for describing knowledge of a proportion parameter, it is a very convenient distribution with two parameters.
$x \sim beta(a,b)$
Its density function is $p(x) = \frac{\Gamma(a+b)}{\Gamma(a)\Gamma(b)}x^{a-1}(1-x)^{b-1}$. The R-function `dbeta`does the rather complicated calculations for us.
The expected value of a beta distribution is $E(x) = \frac{a}{(a+b)}$ and the variance is $Var(x) = \frac{ab}{(a+b)^2(a+b+1)}$. The $beta(1,1)$ distribution is equal to the $uniform(0,1)$ distribution. The higher the sum of $a$ and $b$, the more narrow is the distribution (Figure \@ref(fig:betadist)).
<div class="figure">
<img src="1.3-distributions_files/figure-html/betadist-1.png" alt="Beta distributions with different parameter values." width="672" />
<p class="caption">(\#fig:betadist)Beta distributions with different parameter values.</p>
</div>
### Normal distribution {#normdist}
The normal, or Gaussian distribution is widely used since a long time in statistics. It describes the distribution of measurements that vary because of a sum of random errors. Based on the central limit theorem, sample averages are approximately normally distributed (\@ref(basics)).
$x \sim normal(\mu, \sigma^2)$
The density function is $p(x) = \frac{1}{\sqrt{2\pi}\sigma}exp(-\frac{1}{2\sigma^2}(x -\mu)^2)$ and it is implemented in the R-function `dnorm`.
The expected value is $E(x) = \mu$ and the variance is $Var(x) = \sigma^2$.
The variance parameter can be specified to be a variance, a standard deviation or a precision. Different software (or authors) have different habits, e.g., R and Stan use the standard deviation sigma $\sigma$, whereas BUGS (WinBugs, OpenBUGS or jags) use the precision, which is the inverse of the variance $\tau= \frac{1}{\sigma^2} $.
The normal distribution is used as a data model for measurements that scatter symmetrically around a mean, such as body size (in m), food consumption (in g), or body temperature (°C).
The normal distribution also serves as prior distribution for parameters that can take on negative or positive values. The larger the variance, the flatter (less informative) is the distribution.
The standard normal distribution is a normal distribution with a mean of zero and a variance of one, $z \sim normal(0, 1)$. The standard normal distribution is also called the z-distribution. Or, a z-variable is a variable with a mean of zero and a standard deviation of one.
```r
x <- seq(-3, 3, length=100)
y <- dnorm(x) # density function of a standard normal distribution
dat <- tibble(x=x,y=y)
plot(x,y, type="l", lwd=2, col="#d95f0e", las=1, ylab="normal density of x")
segments(0, dnorm(1), 1, dnorm(1), lwd=2)
segments(0, dnorm(0), 0, 0)
text(0.5, 0.23, expression(sigma))
```
<div class="figure">
<img src="1.3-distributions_files/figure-html/normdistplot-1.png" alt="Standard normal distribution" width="672" />
<p class="caption">(\#fig:normdistplot)Standard normal distribution</p>
</div>
Plus minus one times the standard deviation ($\sigma$) from the mean includes around 68% of the area under the curve (corresponding to around 68% of the data points in case the normal distribution is used as a data model, or 68% of the prior or posterior mass if the normal distribution is used to describe the knowledge about a parameter). Plus minus two times the standard deviation includes around 95% of the area under the curve.
### Gamma distribution
The gamma distribution is a continuous probability distribution for strictly positive values (zero is not included). The shape of the gamma distribution is right skewed with a long upper tail, whereas most of the mass is centered around a usually small value. It has two parameters, the shape $\alpha$ and the inverse scale $\beta$.
$x \sim gamma(\alpha,\beta)$
Its density function is $p(x) = \frac{\beta^{\alpha}}{\Gamma(\alpha)} x^{(\alpha-1)} exp(-\beta x)$, or `dgamma` in R. The expected value is $E(x) = \frac{\alpha}{\beta}$ and the variance is $Var(x) = \frac{\alpha}{\beta^2}$.
The gamma distribution is becoming more and more popular as a data model for durations (time to event) or other highly right skewed continuous measurements that do not have values of zero.
The gamma distribution is a conjugate prior distribution for the mean of a Poisson distribution and for the precision parameter of a normal distribution. However, in hierarchical models with normally distributed random effects, it is not recommended to use the gamma distribution as a prior distribution for the among-group variance [@Gelman.2006]. The Cauchy or folded t-distribution seem to have less influence on the posterior distributions of the variance parameters.
### Cauchy distribution {#cauchydistri}
The Cauchy distribution is a symmetric distribution with much heavier tails compared to the normal distribution.
$ x \sim Cauchy(a,b)$
Its probability density function is $p(x) = \frac{1}{\pi b[1+(\frac{x-a}{b})^2]}$. The mean and the variance of the Cauchy distribution are not defined. The median is $a$.
The part of the Cauchy distribution for positive values, i.e., half of the Cauchy distribution, is often used as a prior distribution for variance parameters.
### t-distribution
The t-distribution is the marginal posterior distribution of a the mean of a sample with unknown variance when conjugate prior distributions are used to obtain the posterior distribution. The t-distribution has three parameters, the degrees of freedom $v$, the location $\mu$ and the scale $\sigma$.
$x \sim t(v, \mu, \sigma)$
Its density function is $p(x) = \frac{\Gamma((v+1)/2)}{\Gamma(v/2)\sqrt{v\pi}\sigma}(1+\frac{1}{v}(\frac{x-\mu}{\sigma})^2)^{-(v+1)/2}$. Its expected value is $E(x) = \mu$ for $v>1$ and the variance is $Var(x) = \frac{v}{v-2}\sigma ^2$ for $v>2$.
The t-distribution is sometimes used as data model. Because of its heavier tails compared to the normal model, the model parameters are less influenced by measurement errors when a t-distribution is used instead of a normal distribution. This is called "robust statistics".
Similar to the Cauchy distribution, the folded t-distribution, i.e., the positive part of the t-distribution, can serve as a prior distribution for variance parameters.
### F-distribution
The F-distribution is not important in Bayesian statistics.
Ratios of sample variances drawn from populations with equal variances follow an F-distribution. The density function of the F-distribution is even more complicated than the one of the t-distribution! We do not copy it here. Further, we have not yet met any Bayesian example where the F-distribution is used (that does not mean that there is no). It is used in frequentist analyses in order to compare variances, e.g. within ANOVAs. If two variances only differ because of natural variance in the data (nullhypothesis) then $\frac{Var(X_1)}{Var(X_2)}\sim F_{df_1,df_2}$.
<div class="figure">
<img src="1.3-distributions_files/figure-html/unnamed-chunk-1-1.png" alt="Different density functions of the F statistics" width="672" />
<p class="caption">(\#fig:unnamed-chunk-1)Different density functions of the F statistics</p>
</div>
<file_sep>
# Additional basic material {#addbasics}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Correlations among categorical variables
### Chisquare test
When testing for correlations between two categorical variables, then the nullhypothesis is "there is no correlation". The data can be displayed in cross-tables.
```r
# Example: correlation between birthday preference and car ownership
load("RData/datacourse.RData")
table(dat$birthday, dat$car)
```
```
##
## N Y
## flowers 6 1
## wine 9 6
```
Given the nullhypothesis was true, we expect that the distribution of the data in each column of the cross-table is similar to the distribution of the row-sums. And, the distribution of the data in each row should be similar to the distribution of the column-sums. The chisquare test statistics $\chi^2$ measures the deviation of the data from this expected distribution of the data in the cross-table.
For calculating the chisquare test statistics $\chi^2$, we first have to obtain for each cell in the cross-table the expected value $E_{ij}$ = rowsum*colsum/total.
$\chi^2$ measures the difference between the observed $O_{ij}$ and expected $E_{ij}$ values as:
$\chi^2=\sum_{i=1}^{m}\sum_{j=1}^{k}\frac{(O_{ij}-E_{ij})^2}{E_{ij}}$ where $m$ is the number of rows and $k$ is the number of columns.
The $\chi^2$-distribution has 1 parameter, the degrees of freedom $v$ = $(m-1)(k-1)$.
<div class="figure">
<img src="1.4-additional_basic_material_files/figure-html/chisqdist-1.png" alt="Two examples of Chisquare distributions." width="672" />
<p class="caption">(\#fig:chisqdist)Two examples of Chisquare distributions.</p>
</div>
R is calculating the $\chi^2$ value for specific cross-tables, and it is also giving the p-values, i.e., the probability of obtaining the observed or a higher $\chi^2$ value given the nullhypothesis was true by comparing the observed $\chi^2$ with the corresponding chisquare distribution.
```r
chisq.test(table(dat$birthday, dat$car))
```
```
##
## Pearson's Chi-squared test with Yates' continuity correction
##
## data: table(dat$birthday, dat$car)
## X-squared = 0.51084, df = 1, p-value = 0.4748
```
The warning (that is suppressed in the rmarkdown version, but that you will see if you run the code on your own computer) is given, because in our example some cells have counts less than 5. In such cases, the Fisher's exact test should be preferred. This test calculates the p-value analytically using probability theory, whereas the chisquare test relies on the assumption that the $\chi^2$ value follows a chisquare distribution. The latter assumption holds better for larger sample sizes.
```r
fisher.test(table(dat$birthday, dat$car))
```
```
##
## Fisher's Exact Test for Count Data
##
## data: table(dat$birthday, dat$car)
## p-value = 0.3501
## alternative hypothesis: true odds ratio is not equal to 1
## 95 percent confidence interval:
## 0.3153576 213.8457248
## sample estimates:
## odds ratio
## 3.778328
```
### Correlations among categorical variables using Bayesian methods
For a Bayesian analysis of cross-table data, a data model has to be found. There are several possibilities that could be used:
* a so-called log-linear model (Poisson model) for the counts in each cell of the cross-table.
* a binomial or a multinomial model for obtaining estimates of the proportions of data in each cell
These models provide possibilities to explore the patterns in the data in more details than a chisquare test.
```r
# We arrange the data into a cross-table in a data-frame
# format. That is, the counts in each cell of the
# cross-table become a variable and the row and column names
# are also given in separate variables
datagg <- aggregate(dat$name_fictive, list(birthday=dat$birthday, car=dat$car),
length, drop=FALSE)
datagg$x[is.na(datagg$x)] <- 0
names(datagg) <- c("birthday", "car", "count")
datagg
```
```
## birthday car count
## 1 flowers N 6
## 2 wine N 9
## 3 flowers Y 1
## 4 wine Y 6
```
```r
# log-linear model
library(arm)
nsim <- 5000
mod <- glm(count~birthday+car + birthday:car,
data=datagg, family=poisson)
bsim <- sim(mod, n.sim=nsim)
round(t(apply(bsim@coef, 2, quantile,
prob=c(0.025, 0.5, 0.975))),2)
```
```
## 2.5% 50% 97.5%
## (Intercept) 1.00 1.79 2.58
## birthdaywine -0.64 0.41 1.48
## carY -3.94 -1.79 0.29
## birthdaywine:carY -0.94 1.41 3.76
```
The interaction parameter measures the strength of the correlation. To quantitatively understand what a parameter value of 1.39 means, we have to look at the interpretation of all parameter values. We do that here quickly without a thorough explanation, because we already explained the Poisson model in chapter 8 of [@KornerNievergelt2015].
The intercept 1.79 corresponds to the logarithm of the count in the cell "flowers" and "N" (number of students who prefer flowers as a birthday present and who do not have a car), i.e., $exp(\beta_0)$ = 6. The exponent of the second parameter corresponds to the multiplicative difference between the counts in the cells "flowers and N" and "wine and N", i.e., count in the cell "wine and N" = $exp(\beta_0)exp(\beta_1)$ = exp(1.79)exp(0.41) = 9. The third parameter measures the multiplicative difference in the counts between the cells "flowers and N" and "flowers and Y", i.e., count in the cell "flowers and Y" = $exp(\beta_0)exp(\beta_2)$ = exp(1.79)exp(-1.79) = 1. Thus, the third parameter is the difference in the logarithm of the counts between the car owners and the car-free students for those who prefer flowers. The interaction parameter is the difference of this difference between the students who prefer wine and those who prefer flowers. This is difficult to intuitively understand. Here is another try to formulate it: The interaction parameter measures the difference in the logarithm of the counts in the cross-table between the row-differences between the columns. Maybe it becomes clear, when we extract the count in the cell "wine and Y" from the model parameters: $exp(\beta_0)exp(\beta_1)exp(\beta_2)exp(\beta_3)$ = exp(1.79)exp(0.41)exp(-1.79)exp(1.39) = 6.
Alternatively, we could estimate the proportions of students prefering flower and wine within each group of car owners and car-free students using a binomial model. For an explanation of the binomial model, see chapter 8 of [@KornerNievergelt2015].
```r
# binomial model
tab <- table(dat$car,dat$birthday)
mod <- glm(tab~rownames(tab), family=binomial)
bsim <- sim(mod, n.sim=nsim)
```
<div class="figure">
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-7-1.png" alt="Estimated proportion of students that prefer flowers over wine as a birthday present among the car-free students (N) and the car owners (Y). Given are the median of the posterior distribution (circle). The bar extends between the 2.5% and 97.5% quantiles of the posterior distribution." width="672" />
<p class="caption">(\#fig:unnamed-chunk-7)Estimated proportion of students that prefer flowers over wine as a birthday present among the car-free students (N) and the car owners (Y). Given are the median of the posterior distribution (circle). The bar extends between the 2.5% and 97.5% quantiles of the posterior distribution.</p>
</div>
## 3 methods for getting the posterior distribution
* analytically
* approximation
* Monte Carlo simulation
### Monte Carlo simulation (parametric bootstrap)
Monte Carlo integration: numerical solution of $\int_{-1}^{1.5} F(x) dx$
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-8-1.png" width="672" />
sim is solving a mathematical problem by simulation
How sim is simulating to get the marginal distribution of $\mu$:
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-9-1.png" width="672" />
### Grid approximation
$p(\theta|y) = \frac{p(y|\theta)p(\theta)}{p(y)}$
For example, one coin flip (Bernoulli model)
data: y=0 (a tail)
likelihood: $p(y|\theta)=\theta^y(1-\theta)^{(1-y)}$
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-10-1.png" width="672" />
### Markov chain Monte Carlo simulations
* Markov chain Monte Carlo simulation (BUGS, Jags)
* Hamiltonian Mont<NAME> (Stan)
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-11-1.png" width="672" />
## Analysis of variance ANOVA
The aim of an ANOVA is to compare means of groups. In a frequentist analysis, this is done by comparing the between-group with the within-group variance. The result of a Bayesian analysis is the joint posterior distribution of the group means.
<div class="figure">
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-12-1.png" alt="Number of stats courses students have taken before starting a PhD in relation to their feeling about statistics." width="672" />
<p class="caption">(\#fig:unnamed-chunk-12)Number of stats courses students have taken before starting a PhD in relation to their feeling about statistics.</p>
</div>
In the frequentist ANOVA, the following three sum of squared distances (SS) are used to calculate the total, the between- and within-group variances:
Total sum of squares = SST = $\sum_1^n{(y_i-\bar{y})^2}$
Within-group SS = SSW = $\sum_1^n{(y_i-\bar{y_g})^2}$: unexplained variance
Between-group SS = SSB = $\sum_1^g{n_g(\bar{y_g}-\bar{y})^2}$: explained variance
The between-group and within-group SS sum to the total sum of squares: SST=SSB+SSW. Attention: this equation is only true in any case for a simple one-way ANOVA (just one grouping factor). If the data are grouped according to more than one factor (such as in a two- or three-way ANOVA), then there is one single solution for the equation only when the data is completely balanced, i.e. when there are the same number of observations in all combinations of factor levels. For non-balanced data with more than one grouping factor, there are different ways of calculating the SSBs, and the result of the F-test described below depends on the order of the predictors in the model.
<div class="figure">
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-13-1.png" alt="Visualisation of the total, between-group and within-group sum of squares. Points are observations; long horizontal line is the overall mean; short horizontal lines are group specific means." width="672" />
<p class="caption">(\#fig:unnamed-chunk-13)Visualisation of the total, between-group and within-group sum of squares. Points are observations; long horizontal line is the overall mean; short horizontal lines are group specific means.</p>
</div>
In order to make SSB and SSW comparable, we have to divide them by their degrees of freedoms. For the within-group SS, SSW, the degrees of freedom is the number of obervations minus the number of groups ($g$), because $g$ means have been estimated from the data. If the $g$ means are fixed and $n-g$ data points are known, then the last $g$ data points are defined, i.e., they cannot be chosen freely. For the between-group SS, SSB, the degrees of freedom is the number of groups minus 1 (the minus 1 stands for the overall mean).
* MSB = SSB/df_between, MSW = SSW/df_within
It can be shown (by mathematicians) that, given the nullhypothesis, the mean of all groups are equal $m_1 = m_2 = m_3$, then the mean squared errors between groups (MSB) is expected to be equal to the mean squared errors within the groups (MSW). Therefore, the ration MSB/MSW is expected to follow an F-distribution given the nullhypothesis is true.
* MSB/MSW ~ F(df_between, df_within)
The Bayesian analysis for comparing group means consists of calculating the posterior distribution for each group mean and then drawing inference from these posterior distributions.
A Bayesian one-way ANOVA involves the following steps:
1. Decide for a data model: We, here, assume that the measurements are normally distributed around the group means. In this example here, we transform the outcome variable in order to better meet the normal assumption. Note: the frequentist ANOVA makes exactly the same assumptions. We can write the data model: $y_i\sim Norm(\mu_i,\sigma)$ with $mu_i= \beta_0 + \beta_1I(group=2) +\beta_1I(group=3)$, where the $I()$-function is an indicator function taking on 1 if the expression is true and 0 otherwise. This model has 4 parameters: $\beta_0$, $\beta_1$, $\beta_2$ and $\sigma$.
```r
# fit a normal model with 3 different means
mod <- lm(log(nrcourses+1)~statsfeeling, data=dat)
```
2. Choose a prior distribution for each model parameter: In this example, we choose flat prior distributions for each parameter. By using these priors, the result should not remarkably be affected by the prior distributions but almost only reflect the information in the data. We choose so-called improper prior distributions. These are completely flat distributions that give all parameter values the same probability. Such distributions are called improper because the area under the curve is not summing to 1 and therefore, they cannot be considered to be proper probability distributions. However, they can still be used to solve the Bayesian theorem.
3. Solve the Bayes theorem: The solution of the Bayes theorem for the above priors and model is implemented in the function sim of the package arm.
```r
# calculate numerically the posterior distributions of the model
# parameters using flat prior distributions
nsim <- 5000
set.seed(346346)
bsim <- sim(mod, n.sim=nsim)
```
4. Display the joint posterior distributions of the group means
```r
# calculate group means from the model parameters
newdat <- data.frame(statsfeeling=levels(factor(dat$statsfeeling)))
X <- model.matrix(~statsfeeling, data=newdat)
fitmat <- matrix(ncol=nsim, nrow=nrow(newdat))
for(i in 1:nsim) fitmat[,i] <- X%*%bsim@coef[i,]
hist(fitmat[1,], freq=FALSE, breaks=seq(-2.5, 4.2, by=0.1), main=NA, xlab="Group mean of log(number of courses +1)", las=1, ylim=c(0, 2.2))
hist(fitmat[2,], freq=FALSE, breaks=seq(-2.5, 4.2, by=0.1), main=NA, xlab="", las=1, add=TRUE, col=rgb(0,0,1,0.5))
hist(fitmat[3,], freq=FALSE, breaks=seq(-2.5, 4.2, by=0.1), main=NA, xlab="", las=1, add=TRUE, col=rgb(1,0,0,0.5))
legend(2,2, fill=c("white",rgb(0,0,1,0.5), rgb(1,0,0,0.5)), legend=levels(factor(dat$statsfeeling)))
```
<div class="figure">
<img src="1.4-additional_basic_material_files/figure-html/unnamed-chunk-16-1.png" alt="Posterior distributions of the mean number of stats courses PhD students visited before starting the PhD grouped according to their feelings about statistics." width="672" />
<p class="caption">(\#fig:unnamed-chunk-16)Posterior distributions of the mean number of stats courses PhD students visited before starting the PhD grouped according to their feelings about statistics.</p>
</div>
Based on the posterior distributions of the group means, we can extract derived quantities depending on our interest and questions. Here, for example, we could extract the posterior probability of the hypothesis that students with a positive feeling about statistics have a better education in statistics than those with a neutral or negative feeling about statistics.
```r
# P(mean(positive)>mean(neutral))
mean(fitmat[3,]>fitmat[2,])
```
```
## [1] 0.8754
```
```r
# P(mean(positive)>mean(negative))
mean(fitmat[3,]>fitmat[1,])
```
```
## [1] 0.9798
```
## Summary
<file_sep>---
title: "Bayesian Data Analysis in Ecology with R and Stan"
author: "<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>"
date: "2023-08-09"
site: bookdown::bookdown_site
documentclass: book
bibliography: [references/References_fk.bib, references/References_new.bib, references/References_svf.bib]
link-citations: yes
github-repo: TobiasRoth/BDAEcology
cover-image: images/cover.jpg
description: "This GitHub-book is collection of updates and additional material to the book Bayesian Data Analysis in Ecology Using Linear Models with R, BUGS, and STAN."
---
# Preface {-}
<img src="images/cover.jpg" width="655" style="display: block; margin: auto;" />
## Why this book? {-}
In 2015, we wrote a statistics book for Master/PhD level Bayesian data analyses in ecology [@KornerNievergelt2015]. You can order it [here](https://www.elsevier.com/books/bayesian-data-analysis-in-ecology-using-linear-models-with-r-bugs-and-stan/korner-nievergelt/978-0-12-801370-0). People seemed to like it (e.g. [@Harju2016]). Since then, two parallel processes happen. First, we learn more and we become more confident in what we do, or what we do not, and why we do what we do. Second, several really clever people develop software that broaden the spectrum of ecological models that now easily can be applied by ecologists used to work with R. With this e-book, we open the possibility to add new or substantially revised material. In most of the time, it should be in a state that it can be printed and used together with the book as handout for our stats courses.
## About this book {-}
We do not copy text from the book into the e-book. Therefore, we refer to the book [@KornerNievergelt2015] for reading about the basic theory on doing Bayesian data analyses using linear models. However, Chapters 1 to 17 of this dynamic e-book correspond to the book chapters. In each chapter, we may provide updated R-codes and/or additional material. The following chapters contain completely new material that we think may be useful for ecologists.
While we show the R-code behind most of the analyses, we sometimes choose not to show all the code in the html version of the book. This is particularly the case for some of the illustrations. An intrested reader can always consult the [public GitHub repository](https://github.com/TobiasRoth/BDAEcology) with the rmarkdown-files that were used to generate the book.
## How to contribute? {-}
It is open so that everybody with a [GitHub](https://github.com) account can make comments and suggestions for improvement. Readers can contribute in two ways. One way is to add an [issue](https://github.com/TobiasRoth/BDAEcology/issues). The second way is to contribute content directly through the edit button at the top of the page (i.e. a symbol showing a pencil in a square). That button is linked to the rmarkdown source file of each page. You can correct typos or add new text and then submit a [GitHub pull request](https://help.github.com/articles/about-pull-requests/). We try to respond to you as quickly as possible. We are looking forward to your contribution!
## Acknowledgments {-}
We thank *Yihui Xie* for providing [bookdown](bhttps://bookdown.org/yihui/bookdown/) which makes it much fun to write open books such as ours.
We thank many anonymous students and collaborators who searched information on new software, reported updates and gave feedback on earlier versions of the book. Specifically, we thank <NAME> for looking up the difference between the bulk and tail ESS in the brm output, <NAME> for using the conditional logistic regression in rstanarm, ...
<file_sep>
# The Bayesian paradigm {#bayesian_paradigm}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Introduction
## Summary
xxx
<file_sep># (PART) BASIC STATISTICS FOR ECOLOGISTS {-}
# Introduction to PART I {#PART-I}
<a href="" target="_blank"><img src="images/part_I.jpg" width="410" style="display: block; margin: auto;" /></a>
------
During our courses we are sometimes asked to give an introduction to some R-related stuff covering data analysis, presentation of results or rather specialist topics in ecology. In this part we present collected these introduction and try to keep them updated. This is also a commented collection of R-code that we documented for our own work. We hope this might be useful olso for other readers.
## Further reading
- [R for Data Science by <NAME> and <NAME>](http://r4ds.had.co.nz): Introduces the tidyverse framwork. It explains how to get data into R, get it into the most useful structure, transform it, visualise it and model it.
<file_sep># Important R-functions {#rfunctions}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Data preparation
## Figures
## Summary
<file_sep># Daily nest survival {#dailynestsurv}
<!-- todo: explain why we need to start with first at day of detection, give literatur to describe the known fate model (e.g. King's book) -->
## Background
Analyses of nest survival is important for understanding the mechanisms of population dynamics. The life-span of a nest could be used as a measure of nest survival. However, this measure very often is biased towards nests that survived longer because these nests are detected by the ornithologists with higher probability [@Mayfield1975]. In order not to overestimate nest survival, daily nest survival conditional on survival to the previous day can be estimated.
## Models for estimating daily nest survival
What model is best used depends on the type of data available. Data may look:
1. Regular (e.g. daily) nest controls, all nests monitored from their first egg onward
2. Regular nest controls, nests found during the course of the study at different stages and nestling ages
3. Irregular nest controls, all nests monitored from their first egg onward
4. Irregular nest controls, nests found during the course of the study at different stages and nestling ages
Table: (\#tab:nestsurvmod) Models useful for estimating daily nest survival. Data numbers correspond to the descriptions above.
Model | Data | Software, R-code |
:-------|:------------------|:------------------|
Binomial or Bernoulli model | 1, (3) | `glm`, `glmer`,... |
Cox proportional hazard model | 1,2,3,4 | `brm`, soon: `stan_cox` |
Known fate model | 1, 2 | Stan code below |
Known fate model | 3, 4 | Stan code below |
Logistic exposure model | 1,2,3,4 | `glm`, `glmer`using a link function that depends on exposure time |
@Shaffer2004 explains how to adapt the link function in a Bernoulli model to account for having found the nests at different nest ages (exposure time). <NAME> explains how to implement the logistic exposure model in R [here](https://rpubs.com/bbolker/logregexp).
## Known fate model
A natural model that allows estimating daily nest survival is the known-fate survival model. It is a Markov model that models the state of a nest $i$ at day $t$ (whether it is alive, $y_{it}=1$ or not $y_{it}=0$) as a Bernoulli variable dependent on the state of the nest the day before.
$$ y_{it} \sim Bernoulli(y_{it-1}S_{it})$$
The daily nest survival $S_{it}$ can be linearly related to predictor variables that are measured on the nest or on the day level.
$$logit(S_{it}) = \textbf{X} \beta$$
It is also possible to add random effects if needed.
## The Stan model {#dailynestsurvstan}
The following Stan model code is saved as `daily_nest_survival.stan`.
```stan
data {
int<lower=0> Nnests; // number of nests
int<lower=0> last[Nnests]; // day of last observation (alive or dead)
int<lower=0> first[Nnests]; // day of first observation (alive or dead)
int<lower=0> maxage; // maximum of last
int<lower=0> y[Nnests, maxage]; // indicator of alive nests
real cover[Nnests]; // a covariate of the nest
real age[maxage]; // a covariate of the date
}
parameters {
vector[3] b; // coef of linear pred for S
}
model {
real S[Nnests, maxage-1]; // survival probability
for(i in 1:Nnests){
for(t in first[i]:(last[i]-1)){
S[i,t] = inv_logit(b[1] + b[2]*cover[i] + b[3]*age[t]);
}
}
// priors
b[1]~normal(0,5);
b[2]~normal(0,3);
b[3]~normal(0,3);
// likelihood
for (i in 1:Nnests) {
for(t in (first[i]+1):last[i]){
y[i,t]~bernoulli(y[i,t-1]*S[i,t-1]);
}
}
}
```
## Prepare data and run Stan
Data is from @Grendelmeier2018.
```r
load("RData/nest_surv_data.rda")
str(datax)
```
```
## List of 7
## $ y : int [1:156, 1:31] 1 NA 1 NA 1 NA NA 1 1 1 ...
## $ Nnests: int 156
## $ last : int [1:156] 26 30 31 27 31 30 31 31 31 31 ...
## $ first : int [1:156] 1 14 1 3 1 24 18 1 1 1 ...
## $ cover : num [1:156] -0.943 -0.215 0.149 0.149 -0.215 ...
## $ age : num [1:31] -1.65 -1.54 -1.43 -1.32 -1.21 ...
## $ maxage: int 31
```
```r
datax$y[is.na(datax$y)] <- 0 # Stan does not allow for NA's in the outcome
```
```r
# Run STAN
library(rstan)
mod <- stan(file = "stanmodels/daily_nest_survival.stan", data=datax,
chains=5, iter=2500, control=list(adapt_delta=0.9), verbose = FALSE)
```
## Check convergence
We love exploring the performance of the Markov chains by using the function `launch_shinystan` from the package `shinystan`.
## Look at results
It looks like cover does not affect daily nest survival, but daily nest survival decreases with the age of the nestlings.
```r
#launch_shinystan(mod)
print(mod)
```
```
## Inference for Stan model: anon_model.
## 5 chains, each with iter=2500; warmup=1250; thin=1;
## post-warmup draws per chain=1250, total post-warmup draws=6250.
##
## mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
## b[1] 4.04 0.00 0.15 3.76 3.94 4.04 4.14 4.35 3828 1
## b[2] 0.00 0.00 0.13 -0.25 -0.09 -0.01 0.08 0.25 4524 1
## b[3] -0.70 0.00 0.16 -1.02 -0.81 -0.69 -0.59 -0.39 3956 1
## lp__ -298.98 0.03 1.30 -302.39 -299.52 -298.65 -298.05 -297.53 2659 1
##
## Samples were drawn using NUTS(diag_e) at Thu Jan 19 22:33:33 2023.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
```
```r
# effect plot
bsim <- as.data.frame(mod)
nsim <- nrow(bsim)
newdat <- data.frame(age=seq(1, datax$maxage, length=100))
newdat$age.z <- (newdat$age-mean(1:datax$maxage))/sd((1:datax$maxage))
Xmat <- model.matrix(~age.z, data=newdat)
fitmat <- matrix(ncol=nsim, nrow=nrow(newdat))
for(i in 1:nsim) fitmat[,i] <- plogis(Xmat%*%as.numeric(bsim[i,c(1,3)]))
newdat$fit <- apply(fitmat, 1, median)
newdat$lwr <- apply(fitmat, 1, quantile, prob=0.025)
newdat$upr <- apply(fitmat, 1, quantile, prob=0.975)
plot(newdat$age, newdat$fit, ylim=c(0.8,1), type="l",
las=1, ylab="Daily nest survival", xlab="Age [d]")
lines(newdat$age, newdat$lwr, lty=3)
lines(newdat$age, newdat$upr, lty=3)
```
<div class="figure">
<img src="3.2_daily_nest_survival_files/figure-html/effplots-1.png" alt="Estimated daily nest survival probability in relation to nest age. Dotted lines are 95% uncertainty intervals of the regression line." width="672" />
<p class="caption">(\#fig:effplots)Estimated daily nest survival probability in relation to nest age. Dotted lines are 95% uncertainty intervals of the regression line.</p>
</div>
## Known fate model for irregular nest controls
When nest are controlled only irregularly, it may happen that a nest is found predated or dead after a longer break in controlling. In such cases, we know that the nest was predated or it died due to other causes some when between the last control when the nest was still alive and when it was found dead. In such cases, we need to tell the model that the nest could have died any time during the interval when we were not controlling.
To do so, we create a variable that indicates the time (e.g. day since first egg) when the nest was last seen alive (`lastlive`). A second variable indicates the time of the last check which is either the equal to `lastlive` when the nest survived until the last check, or it is larger than `lastlive` when the nest failure has been recorded. A last variable, `gap`, measures the time interval in which the nest failure occurred. A `gap` of zero means that the nest was still alive at the last control, a `gap`of 1 means that the nest failure occurred during the first day after `lastlive`, a `gap` of 2 means that the nest failure either occurred at the first or second day after `lastlive`.
```r
# time when nest was last observed alive
lastlive <- apply(datax$y, 1, function(x) max(c(1:length(x))[x==1]))
# time when nest was last checked (alive or dead)
lastcheck <- lastlive+1
# here, we turn the above data into a format that can be used for
# irregular nest controls. WOULD BE NICE TO HAVE A REAL DATA EXAMPLE!
# when nest was observed alive at the last check, then lastcheck equals lastlive
lastcheck[lastlive==datax$last] <- datax$last[lastlive==datax$last]
datax1 <- list(Nnests=datax$Nnests,
lastlive = lastlive,
lastcheck= lastcheck,
first=datax$first,
cover=datax$cover,
age=datax$age,
maxage=datax$maxage)
# time between last seen alive and first seen dead (= lastcheck)
datax1$gap <- datax1$lastcheck-datax1$lastlive
```
In the Stan model code, we specify the likelihood for each gap separately.
```stan
data {
int<lower=0> Nnests; // number of nests
int<lower=0> lastlive[Nnests]; // day of last observation (alive)
int<lower=0> lastcheck[Nnests]; // day of observed death or, if alive, last day of study
int<lower=0> first[Nnests]; // day of first observation (alive or dead)
int<lower=0> maxage; // maximum of last
real cover[Nnests]; // a covariate of the nest
real age[maxage]; // a covariate of the date
int<lower=0> gap[Nnests]; // obsdead - lastlive
}
parameters {
vector[3] b; // coef of linear pred for S
}
model {
real S[Nnests, maxage-1]; // survival probability
for(i in 1:Nnests){
for(t in first[i]:(lastcheck[i]-1)){
S[i,t] = inv_logit(b[1] + b[2]*cover[i] + b[3]*age[t]);
}
}
// priors
b[1]~normal(0,1.5);
b[2]~normal(0,3);
b[3]~normal(0,3);
// likelihood
for (i in 1:Nnests) {
for(t in (first[i]+1):lastlive[i]){
1~bernoulli(S[i,t-1]);
}
if(gap[i]==1){
target += log(1-S[i,lastlive[i]]); //
}
if(gap[i]==2){
target += log((1-S[i,lastlive[i]]) + S[i,lastlive[i]]*(1-S[i,lastlive[i]+1])); //
}
if(gap[i]==3){
target += log((1-S[i,lastlive[i]]) + S[i,lastlive[i]]*(1-S[i,lastlive[i]+1]) +
prod(S[i,lastlive[i]:(lastlive[i]+1)])*(1-S[i,lastlive[i]+2])); //
}
if(gap[i]==4){
target += log((1-S[i,lastlive[i]]) + S[i,lastlive[i]]*(1-S[i,lastlive[i]+1]) +
prod(S[i,lastlive[i]:(lastlive[i]+1)])*(1-S[i,lastlive[i]+2]) +
prod(S[i,lastlive[i]:(lastlive[i]+2)])*(1-S[i,lastlive[i]+3])); //
}
}
}
```
```r
# Run STAN
mod1 <- stan(file = "stanmodels/daily_nest_survival_irreg.stan", data=datax1,
chains=5, iter=2500, control=list(adapt_delta=0.9), verbose = FALSE)
```
## Further reading {-}
Helpful links:
https://deepai.org/publication/bayesian-survival-analysis-using-the-rstanarm-r-package [@Brilleman.2020]
https://www.hammerlab.org/2017/06/26/introducing-survivalstan/
<file_sep># MCMC using Stan {#stan}
## Background
Markov chain Monte Carlo (MCMC) simulation techniques were developed in the mid-1950s by physicists (Metropolis et al., 1953). Later, statisticians discovered MCMC (Hastings, 1970; Geman & Geman, 1984; Tanner & Wong, 1987; Gelfand et al., 1990; Gelfand & Smith, 1990). MCMC methods make it possible to obtain posterior distributions for parameters and latent variables (unobserved variables) of complex models. In parallel, personal computer capacities increased in the 1990s and user-friendly software such as the different programs based on the programming language BUGS (Spiegelhalter et al., 2003) came out. These developments boosted the use of Bayesian data analyses, particularly in genetics and ecology.
## Install `rstan`
In this book we use the program [Stan](http://mc-stan.org) to draw random samples from the joint posterior distribution of the model parameters given a model, the data, prior distributions, and initial values. To do so, it uses the “no-U-turn sampler,” which is a type of Hamiltonian Monte Carlo simulation [@Hoffman2014; @Betancourt2013_b], and optimization-based point estimation. These algorithms are more efficient than the ones implemented in BUGS programs and they can handle larger data sets. Stan works particularly well for hierar- chical models [@Betancourt2013]. Stan runs on Windows, Mac, and Linux and can be used via the R interface `rstan`. Stan is automatically installed when the R package `rstan` is installed. For [installing rstan](https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started), it is advised to follow closely the system-specific instructions.
## Writing a Stan model {#firststanmod}
The statistical model is written in the Stan language and saved in a text file. The Stan language is rather strict, forcing the user to write unambiguous models. Stan is very well documented and the [Stan Documentation](http://mc-stan.org/users/documentation/index.html) contains a comprehensive Language Manual, a Wiki documentation and various tutorials.
We here provide a normal regression with one predictor variable as a worked example. The entire Stan model is as following (saved as `linreg.stan`)
```{r engine='cat', engine.opts=list(file="stanmodels/linreg.stan",lang="stan")}
data {
int<lower=0> n;
vector[n] y;
vector[n] x;
}
parameters {
vector[2] beta;
real<lower=0> sigma;
}
model {
//priors
beta ~ normal(0,5);
sigma ~ cauchy(0,5);
// likelihood
y ~ normal(beta[1] + beta[2] * x, sigma);
}
```
A Stan model consists of different named blocks. These blocks are (from first to last): data, transformed data, parameters, trans- formed parameters, model, and generated quantities. The blocks must appear in this order. The model block is mandatory; all other blocks are optional.
In the *data* block, the type, dimension, and name of every variable has to be declared. Optionally, the range of possible values can be specified. For example, `vector[N] y;` means that y is a vector (type real) of length N, and `int<lower=0> N;` means that N is an integer with nonnegative values (the bounds, here 0, are included). Note that the restriction to a possible range of values is not strictly necessary but this will help specifying the correct model and it will improve speed. We also see that each line needs to be closed by a column sign. In the parameters block, all model parameters have to be defined. The coefficients of the linear predictor constitute a vector of length 2, `vector[2] beta;`. Alternatively, `real beta[2];` could be used. The sigma parameter is a one-number parameter that has to be positive, therefore `real<lower=0> sigma;`.
The *model* block contains the model specification. Stan functions can handle vectors and we do not have to loop over all observations as typical for BUGS . Here, we use a [Cauchy distribution](#cauchydistri) as a prior distribution for sigma. This distribution can have negative values, but because we defined the lower limit of sigma to be 0 in the parameters block, the prior distribution actually used in the model is a truncated Cauchy distribution (truncated at zero). In Chapter \@ref(choosepriors) we explain how to choose prior distributions.
Further characteristics of the Stan language that are good to know include: The variance parameter for the normal distribution is specified as the standard deviation (like in R but different from BUGS, where the precision is used). If no prior is specified, Stan uses a uniform prior over the range of possible values as specified in the parameter block. Variable names must not contain periods, for example, `x.z` would not be allowed, but `x_z` is allowed. To comment out a line, use double forward-slashes `//`.
## Run Stan from R
We fit the model to simulated data. Stan needs a vector containing the names of the data objects. In our case, `x`, `y,` and `N` are objects that exist in the R console.
The function `stan()` starts Stan and returns an object containing MCMCs for every model parameter. We have to specify the name of the file that contains the model specification, the data, the number of chains, and the number of iterations per chain we would like to have. The first half of the iterations of each chain is declared as the warm-up. During the warm-up, Stan is not simulating a Markov chain, because in every step the algorithm is adapted. After the warm-up the algorithm is fixed and Stan simulates Markov chains.
```{r, cache=TRUE}
library(rstan)
# Simulate fake data
n <- 50 # sample size
sigma <- 5 # standard deviation of the residuals
b0 <- 2 # intercept
b1 <- 0.7 # slope
x <- runif(n, 10, 30) # random numbers of the covariate
simresid <- rnorm(n, 0, sd=sigma) # residuals
y <- b0 + b1*x + simresid # calculate y, i.e. the data
# Bundle data into a list
datax <- list(n=length(y), y=y, x=x)
# Run STAN
fit <- stan(file = "stanmodels/linreg.stan", data=datax, verbose = FALSE)
```
## Further reading {-}
- [Stan-Homepage](http://mc-stan.org): It contains the documentation for Stand a a lot of tutorials.
<file_sep>
# Normal Linear Models{#lm}
<a href="" target="_blank"><img src="images/snowfinch3.jpg" width="1658" style="display: block; margin: auto;" /></a>
------
## Linear regression
### Background
Linear regression is the basis of a large part of applied statistical analysis. Analysis of variance (ANOVA) and analysis of covariance (ANCOVA) can be considered special cases of linear regression, and generalized linear models are extensions of linear regression.
Typical questions that can be answered using linear regression are: How does $y$ change with changes in $x$? How is y predicted from $x$? An ordinary linear regression (i.e., one numeric $x$ and one numeric $y$ variable) can be represented by a scatterplot of $y$ against $x$. We search for the line that fits best and describe how the observations scatter around this regression line (see Fig. \@ref(fig:figlm) for an example). The model formula of a simple linear regression with one continuous predictor variable $x_i$ (the subscript $i$ denotes the $i=1,\dots,n$ data points) is:
\begin{align}
\mu_i &=\beta_0 + \beta_1 x_i \\
y_i &\sim normal(\mu_i, \sigma^2)
(\#eq:lm)
\end{align}
While the first part of Equation \@ref(eq:lm) describes the regression line, the second part describes how the data points, also called observations, are distributed around the regression line (Figure \@ref(fig:illlm)). In other words: the observation $y_i$ stems from a normal distribution with mean $\mu_i$ and variance $\sigma^2$. The mean of the normal distribution, $\mu_i$ , equals the sum of the intercept ($b_0$ ) and the product of the slope ($b_1$) and the continuous predictor value, $x_i$.
Equation \@ref(eq:lm) is called the data model, because it describes mathematically the process that has (or, better, that we think has) produced the data. This nomenclature also helps to distinguish data models from models for parameters such as prior or posterior distributions.
The differences between observation $y_i$ and the predicted values $\mu_i$ are the residuals (i.e., $\epsilon_i=y_i-\mu_i$). Equivalently to Equation \@ref(eq:lm), the regression could thus be written as:
\begin{align}
y_i &= \beta_0 + \beta_1 x_i + \epsilon_i\\
\epsilon_i &\sim normal(0, \sigma^2)
(\#eq:lmalternativ)
\end{align}
We prefer the notation in Equation \@ref(eq:lm) because, in this formula, the stochastic part (second row) is nicely separated from the deterministic part (first row) of the model, whereas, in the second notation \@ref(eq:lmalternativ) the first row contains both stochastic and deterministic parts.
For illustration, we here simulate a data set and below fit a linear regression to these simulated data. The advantage of simulating data is that the following analyses can be reproduced without having to read data into R. Further, for simulating data, we need to translate the algebraic model formula into R language which helps us understanding the model structure.
```r
set.seed(34) # set a seed for the random number generator
# define the data structure
n <- 50 # sample size
x <- runif(n, 10, 30) # sample values of the predictor variable
# define values for each model parameter
sigma <- 5 # standard deviation of the residuals
b0 <- 2 # intercept
b1 <- 0.7 # slope
# simulate y-values from the model
mu <- b0 + b1 * x # define the regression line (deterministic part)
y <- rnorm(n, mu, sd = sigma) # simulate y-values
# save data in a data.frame
dat <- tibble(x = x, y = y)
```
<div class="figure">
<img src="2.03-lm_files/figure-html/illlm-1.png" alt="Illustration of a linear regression. The blue line represents the deterministic part of the model, i.e., here regression line. The stochastic part is represented by a probability distribution, here the normal distribution. The normal distribution changes its mean but not the variance along the x-axis, and it describes how the data are distributed. The blue line and the orange distribution together are a statistical model, i.e., an abstract representation of the data which is given in black." width="672" />
<p class="caption">(\#fig:illlm)Illustration of a linear regression. The blue line represents the deterministic part of the model, i.e., here regression line. The stochastic part is represented by a probability distribution, here the normal distribution. The normal distribution changes its mean but not the variance along the x-axis, and it describes how the data are distributed. The blue line and the orange distribution together are a statistical model, i.e., an abstract representation of the data which is given in black.</p>
</div>
Using matrix notation equation \@ref(eq:lm) can also be written in one row:
$$\boldsymbol{y} \sim
Norm(\boldsymbol{X} \boldsymbol{\beta}, \sigma^2\boldsymbol{I})$$
where $\boldsymbol{ I}$ is the $n \times n$ identity matrix (it transforms the variance parameter to a $n \times n$ matrix with its diagonal elements equal $\sigma^2$ ; $n$ is the sample size). The multiplication by $\boldsymbol{ I}$ is necessary because we use vector notation, $\boldsymbol{y}$ instead of $y_{i}$ . Here, $\boldsymbol{y}$ is the vector of all observations, whereas $y_{i}$ is a single observation, $i$. When using vector notation, we can write the linear predictor of the model, $\beta_0 + \beta_1 x_i$ , as a multiplication of the vector of the model coefficients
$$\boldsymbol{\beta} =
\begin{pmatrix}
\beta_0 \\
\beta_1
\end{pmatrix}$$
times the model matrix
$$\boldsymbol{X} =
\begin{pmatrix}
1 & x_1 \\
\dots & \dots \\
1 & x_n
\end{pmatrix}$$
where $x_1 , \dots, x_n$ are the observed values for the predictor variable, $x$. The first column of $\boldsymbol{X}$ contains only ones because the values in this column are multiplied with the intercept, $\beta_0$ . To the intercept, the product of the second element of $\boldsymbol{\beta}$, $\beta_1$ , with each element in the second column of $\boldsymbol{X}$ is added to obtain the predicted value for each observation, $\boldsymbol{\mu}$:
\begin{align}
\boldsymbol{X \beta}=
\begin{pmatrix}
1 & x_1 \\
\dots & \dots \\
1 & x_n
\end{pmatrix}
\times
\begin{pmatrix}
\beta_0 \\
\beta_1
\end{pmatrix} =
\begin{pmatrix}
\beta_0 + \beta_1x_1 \\
\dots \\
\beta_0 + \beta_1x_n
\end{pmatrix}=
\begin{pmatrix}
\hat{y}_1 \\
\dots \\
\hat{y}_n
\end{pmatrix} =
\boldsymbol{\mu}
(\#eq:lmmatrix)
\end{align}
### Fitting a Linear Regression in R
In Equation \@ref(eq:lm), the fitted values $\mu_i$ are directly defined by the model coefficients, $\beta_{0}$ and $\beta_{1}$ . Therefore, when we can estimate $\beta_{0}$, $\beta_{1}$ , and $\sigma^2$, the model is fully defined. The last parameter $\sigma^2$ describes how the observations scatter around the regression line and relies on the assumption that the residuals are normally distributed. The estimates for the model parameters of a linear regression are obtained by searching for the best fitting regression line. To do so, we search for the regression line that minimizes the sum of the squared residuals. This model fitting method is called the least-squares method, abbreviated as LS. It has a very simple solution using matrix algebra [see e.g., @Aitkin.2009].
The least-squares estimates for the model parameters of a linear regression are obtained in R using the function `lm`.
```r
mod <- lm(y ~ x, data = dat)
coef(mod)
```
```
## (Intercept) x
## 2.0049517 0.6880415
```
```r
summary(mod)$sigma
```
```
## [1] 5.04918
```
The object “mod” produced by `lm` contains the estimates for the intercept, $\beta_0$ , and the slope, $\beta_1$. The residual standard deviation $\sigma^2$ is extracted using the function `summary`. We can show the result of the linear regression as a line in a scatter plot with the covariate (`x`) on the x-axis and the observations (`y`) on the y-axis (Fig. \@ref(fig:figlm)).
<div class="figure">
<img src="2.03-lm_files/figure-html/figlm-1.png" alt="Linear regression. Black dots = observations, blue solid line = regression line, orange dotted lines = residuals. The fitted values lie where the orange dotted lines touch the blue regression line." width="672" />
<p class="caption">(\#fig:figlm)Linear regression. Black dots = observations, blue solid line = regression line, orange dotted lines = residuals. The fitted values lie where the orange dotted lines touch the blue regression line.</p>
</div>
Conclusions drawn from a model depend on the model assumptions. When model assumptions are violated, estimates usually are biased and inappropriate conclusions can be drawn. We devote Chapter \@ref(residualanalysis) to the assessment of model assumptions, given its importance.
### Drawing Conclusions
To answer the question about how strongly $y$ is related to $x$ taking into account statistical uncertainty we look at the joint posterior distribution of $\boldsymbol{\beta}$ (vector that contains $\beta_{0}$ and $\beta_{1}$ ) and $\sigma^2$ , the residual variance. The function `sim` calculates the joint posterior distribution and renders a simulated values from this distribution.
<font size="1">
<div style="border: 2px solid grey;">
What does `sim` do?
It simulates parameter values from the joint posterior distribution of a model assuming flat prior distributions. For a normal linear regression, it first draws a random value, $\sigma^*$ from the marginal posterior distribution of $\sigma$, and then draws random values from the conditional posterior distribution for $\boldsymbol{\beta}$ given $\sigma^*$ [@Gelman.2014].
The conditional posterior distribution of the parameter vector $\boldsymbol{\beta}$, $p(\boldsymbol{\beta}|\sigma^*,\boldsymbol{y,X})$ can be analytically derived. With flat prior distributions, it is a uni- or multivariate normal distribution $p(\boldsymbol{\beta}|\sigma^*,\boldsymbol{y,X})=normal(\boldsymbol{\hat{\beta}},V_\beta,(\sigma^*)^2)$ with:
\begin{align}
\boldsymbol{\hat{\beta}=(\boldsymbol{X^TX})^{-1}X^Ty}
(\#eq:sim)
\end{align}
and $V_\beta = (\boldsymbol{X^T X})^{-1}$.
The marginal posterior distribution of $\sigma^2$ is independent of specific values of $\boldsymbol{\beta}$. It is, for flat prior distributions, an inverse chi-square distribution $p(\sigma^2|\boldsymbol{y,X})=Inv-\chi^2(n-k,\sigma^2)$, where $\sigma^2 = \frac{1}{n-k}(\boldsymbol{y}-\boldsymbol{X,\hat{\beta}})^T(\boldsymbol{y}-\boldsymbol{X,\hat{\beta}})$, and $k$ is the number of parameters. The marginal posterior distribution of $\boldsymbol{\beta}$ can be obtained by integrating the conditional posterior distribution $p(\boldsymbol{\beta}|\sigma^2,\boldsymbol{y,X})=normal(\boldsymbol{\hat{\beta}},V_\beta\sigma^2)$ over the distribution of $\sigma^2$ . This results in a uni- or multivariate $t$-distribution.
Because `sim` simulates values $\beta_0^*$ and $\beta_1^*$ always conditional on $\sigma^*$, a triplet of values ($\beta_0^*$, $\beta_1^*$, $\sigma^*$) is one draw of the joint posterior distribution. When we visualize the distribution of the simulated values for one parameter only, ignoring the values for the other, we display the marginal posterior distribution of that parameter. Thus, the distribution of all simulated values for the parameter $\beta_0$ is a $t$-distribution even if a normal distribution has been used for simulating the values. The $t$-distribution is a consequence of using a different $\sigma^2$-value for every draw of $\beta_0$.
</div>
</font>
Using the function `sim` from the package, we can draw values from the joint posterior distribution of the model parameters and describe the marginal posterior distribution of each model parameter using these simulated values.
```r
library(arm)
nsim <- 1000
bsim <- sim(mod, n.sim = nsim)
```
The function `sim` simulates (in our example) 1000 values from the joint posterior distribution of the three model parameters $\beta_0$ , $\beta_1$, and $\sigma$. These simulated values are shown in Figure \@ref(fig:simfirstexample).
<div class="figure">
<img src="2.03-lm_files/figure-html/simfirstexample-1.png" alt="Joint (scatterplots) and marginal (histograms) posterior distribution of the model parameters. The six scatterplots show, using different axes, the three-dimensional cloud of 1000 simulations from the joint posterior distribution of the three parameters." width="768" />
<p class="caption">(\#fig:simfirstexample)Joint (scatterplots) and marginal (histograms) posterior distribution of the model parameters. The six scatterplots show, using different axes, the three-dimensional cloud of 1000 simulations from the joint posterior distribution of the three parameters.</p>
</div>
The posterior distribution describes, given the data and the model, which values relative to each other are more likely to correspond to the parameter value we aim at measuring. It expresses the uncertainty of the parameter estimate. It shows what we know about the model parameter after having looked at the data and given the model is realistic.
The 2.5% and 97.5% quantiles of the marginal posterior distributions can be used as 95% uncertainty intervals of the model parameters. The function `coef` extracts the simulated values for the beta coefficients, returning a matrix with *nsim* rows and the number of columns corresponding to the number of parameters. In our example, the first column contains the simulated values from the posterior distribution of the intercept and the second column contains values from the posterior distribution of the slope. The "2" in the second argument of the apply-function (see Chapter \@ref(rmisc)) indicates that the `quantile` function is applied columnwise.
```r
apply(X = coef(bsim), MARGIN = 2, FUN = quantile, probs = c(0.025, 0.975)) %>%
round(2)
```
```
## (Intercept) x
## 2.5% -2.95 0.44
## 97.5% 7.17 0.92
```
We also can calculate an uncertainty interval of the estimated residual standard deviation, $\hat{\sigma}$.
```r
quantile(bsim@sigma, probs = c(0.025, 0.975)) %>%
round(1)
```
```
## 2.5% 97.5%
## 4.2 6.3
```
We can further get a posterior probability for specific hypotheses, such as “The slope parameter is larger than 1” or “The slope parameter is larger than 0.5”. These probabilities are the proportion of simulated values from the posterior distribution that are larger than 1 and 0.5, respectively.
```r
sum(coef(bsim)[,2] > 1) / nsim # alternatively: mean(coef(bsim)[,2]>1)
```
```
## [1] 0.008
```
```r
sum(coef(bsim)[,2] > 0.5) / nsim
```
```
## [1] 0.936
```
From this, there is very little evidence in the data that the slope is larger than 1, but we are quite confident that the slope is larger than 0.5 (assuming that our model is realistic).
We often want to show the effect of $x$ on $y$ graphically, with information about the uncertainty of the parameter estimates included in the graph. To draw such effect plots, we use the simulated values from the posterior distribution of the model parameters. From the deterministic part of the model, we know the regression line $\mu = \beta_0 + \beta_1 x_i$. The simulation from the joint posterior distribution of $\beta_0$ and $\beta_1$ gives 1000 pairs of intercepts and slopes that describe 1000 different regression lines. We can draw these regression lines in an x-y plot (scatter plot) to show the uncertainty in the regression line estimation (Fig. \@ref(fig:figlmer1), left). Note, that in this case it is not advisable to use `ggplot` because we draw many lines in one plot, which makes `ggplot` rather slow.
```r
par(mar = c(4, 4, 0, 0))
plot(x, y, pch = 16, las = 1,
xlab = "Outcome (y)")
for(i in 1:nsim) {
abline(coef(bsim)[i,1], coef(bsim)[i,2], col = rgb(0, 0, 0, 0.05))
}
```
<div class="figure">
<img src="2.03-lm_files/figure-html/figlmer1-1.png" alt="Regression with 1000 lines based on draws form the joint posterior distribution for the intercept and slope parameters to visualize the uncertainty of the estimated regression line." width="672" />
<p class="caption">(\#fig:figlmer1)Regression with 1000 lines based on draws form the joint posterior distribution for the intercept and slope parameters to visualize the uncertainty of the estimated regression line.</p>
</div>
A more convenient way to show uncertainty is to draw the 95% uncertainty interval, CrI, of the regression line. To this end, we first define new x-values for which we would like to have the fitted values (about 100 points across the range of x will produce smooth-looking lines when connected by line segments). We save these new x-values within the new tibble `newdat`. Then, we create a new model matrix that contains these new x-values (`newmodmat`) using the function `model.matrix`. We then calculate the 1000 fitted values for each element of the new x (one value for each of the 1000 simulated regressions, Fig. \@ref(fig:figlmer1)), using matrix multiplication (%*%). We save these values in the matrix “fitmat”. Finally, we extract the 2.5% and 97.5% quantiles for each x-value from fitmat, and draw the lines for the lower and upper limits of the credible interval (Fig. \@ref(fig:figlmer2)).
```r
# Calculate 95% credible interval
newdat <- tibble(x = seq(10, 30, by = 0.1))
newmodmat <- model.matrix( ~ x, data = newdat)
fitmat <- matrix(ncol = nsim, nrow = nrow(newdat))
for(i in 1:nsim) {fitmat[,i] <- newmodmat %*% coef(bsim)[i,]}
newdat$CrI_lo <- apply(fitmat, 1, quantile, probs = 0.025)
newdat$CrI_up <- apply(fitmat, 1, quantile, probs = 0.975)
# Make plot
regplot <-
ggplot(dat, aes(x = x, y = y)) +
geom_point() +
geom_smooth(method = lm, se = FALSE) +
geom_line(data = newdat, aes(x = x, y = CrI_lo), lty = 3) +
geom_line(data = newdat, aes(x = x, y = CrI_up), lty = 3) +
labs(x = "Predictor (x)", y = "Outcome (y)")
regplot
```
<div class="figure">
<img src="2.03-lm_files/figure-html/figlmer2-1.png" alt="Regression with 95% credible interval of the posterior distribution of the fitted values." width="672" />
<p class="caption">(\#fig:figlmer2)Regression with 95% credible interval of the posterior distribution of the fitted values.</p>
</div>
The interpretation of the 95% uncertainty interval is straightforward: We are 95% sure that the true regression line is within the credible interval (given the data and the model). As with all statistical results, this interpretation is only valid in the model world (if the world would look like the model). The larger the sample size, the narrower the interval, because each additional data point increases information about the true regression line.
The uncertainty interval measures statistical uncertainty of the regression line, but it does not describe how new observations would scatter around the regression line. If we want to describe where future observations will be, we have to report the posterior predictive distribution. We can get a sample of random draws from the posterior predictive distribution $\hat{y}|\boldsymbol{\beta},\sigma^2,\boldsymbol{X}\sim normal( \boldsymbol{X \beta, \sigma^2})$ using the simulated joint posterior distributions of the model parameters, thus taking the uncertainty of the parameter estimates into account. We draw a new $\hat{y}$-value from $normal( \boldsymbol{X \beta, \sigma^2})$ for each simulated set of model parameters. Then, we can visualize the 2.5% and 97.5% quantiles of this distribution for each new x-value.
```r
# increase number of simulation to produce smooth lines of the posterior
# predictive distribution
set.seed(34)
nsim <- 50000
bsim <- sim(mod, n.sim=nsim)
fitmat <- matrix(ncol=nsim, nrow=nrow(newdat))
for(i in 1:nsim) fitmat[,i] <- newmodmat%*%coef(bsim)[i,]
# prepare matrix for simulated new data
newy <- matrix(ncol=nsim, nrow=nrow(newdat))
# for each simulated fitted value, simulate one new y-value
for(i in 1:nsim) {
newy[,i] <- rnorm(nrow(newdat), mean = fitmat[,i], sd = bsim@sigma[i])
}
# Calculate 2.5% and 97.5% quantiles
newdat$pred_lo <- apply(newy, 1, quantile, probs = 0.025)
newdat$pred_up <- apply(newy, 1, quantile, probs = 0.975)
# Add the posterior predictive distribution to plot
regplot +
geom_line(data = newdat, aes(x = x, y = pred_lo), lty = 2) +
geom_line(data = newdat, aes(x = x, y = pred_up), lty = 2)
```
<div class="figure">
<img src="2.03-lm_files/figure-html/figlmer3-1.png" alt="Regression line with 95% uncertainty interval (dotted lines) and the 95% interval of the simulated predictive distribution (broken lines). Note that we increased the number of simulations to 50,000 to produce smooth lines." width="672" />
<p class="caption">(\#fig:figlmer3)Regression line with 95% uncertainty interval (dotted lines) and the 95% interval of the simulated predictive distribution (broken lines). Note that we increased the number of simulations to 50,000 to produce smooth lines.</p>
</div>
Of future observations, 95% are expected to be within the interval defined by the broken lines in Fig. \@ref(fig:figlmer3). Increasing sample size will not give a narrower predictive distribution because the predictive distribution primarily depends on the residual variance $\sigma^2$ which is a property of the data that is independent of sample size.
The way we produced Fig. \@ref(fig:figlmer3) is somewhat tedious compared to how easy we could have obtained the same figure using frequentist methods: `predict(mod, newdata = newdat, interval = "prediction")` would have produced the y-values for the lower and upper lines in Fig. \@ref(fig:figlmer3) in one R-code line. However, once we have a simulated sample of the posterior predictive distribution, we have much more information than is contained in the frequentist prediction interval. For example, we could give an estimate for the proportion of observations greater than 20, given $x = 25$.
```r
sum(newy[newdat$x == 25, ] > 20) / nsim
```
```
## [1] 0.44504
```
Thus, we expect 44% of future observations with $x = 25$ to be higher than 20. We can extract similar information for any relevant threshold value.
Another reason to learn the more complicated R code we presented here, compared to the frequentist methods, is that, for more complicated models such as mixed models, the frequentist methods to obtain confidence intervals of fitted values are much more complicated than the Bayesian method just presented. The latter can be used with only slight adaptations for mixed models and also for generalized linear mixed models.
### Interpretation of the R summary output
The solution for $\boldsymbol{\beta}$ is the Equation \@ref(eq:lmmatrix). Most statistical software, including R, return an estimated frequentist standard error for each $\beta_k$. We extract these standard errors together with the estimates for the model parameters using the `summary` function.
```r
summary(mod)
```
```
##
## Call:
## lm(formula = y ~ x, data = dat)
##
## Residuals:
## Min 1Q Median 3Q Max
## -11.5777 -3.6280 -0.0532 3.9873 12.1374
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.0050 2.5349 0.791 0.433
## x 0.6880 0.1186 5.800 0.000000507 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 5.049 on 48 degrees of freedom
## Multiple R-squared: 0.412, Adjusted R-squared: 0.3998
## F-statistic: 33.63 on 1 and 48 DF, p-value: 0.0000005067
```
The summary output first gives a rough summary of the residual distribution. However, we will do more rigorous residual analyses in Chapter \@ref(residualanalysis). The estimates of the model coefficients follow. The column "Estimate" contains the estimates for the intercept $\beta_0$ and the slope $\beta_1$ . The column "Std. Error" contains the estimated (frequentist) standard errors of the estimates. The last two columns contain the t-value and the p-value of the classical t-test for the null hypothesis that the coefficient equals zero. The last part of the summary output gives the parameter $\sigma$ of the model, named "residual standard error" and the residual degrees of freedom.
We think the name "residual standard error" for "sigma" is confusing, because $\sigma$ is not a measurement of uncertainty of a parameter estimate like the standard errors of the model coefficients are. $\sigma$ is a model parameter that describes how the observations scatter around the fitted values, that is, it is a standard deviation. It is independent of sample size, whereas the standard errors of the estimates for the model parameters will decrease with increasing sample size. Such a standard error of the estimate of $\sigma$, however, is not given in the summary output. Note that, by using Bayesian methods, we could easily obtain the standard error of the estimated $\sigma$ by calculating the standard deviation of the posterior distribution of $\sigma$.
The $R^2$ and the adjusted $R^2$ measure the proportion of variance in the outcome variable $y$ that is explained by the predictors in the model. $R^2$ is calculated from the sum of squared residuals, $SSR = \sum_{i=1}^{n}(y_i - \hat{y})$, and the "total sum of squares", $SST = \sum_{i=1}^{n}(y_i - \bar{y})$, where $\bar{y})$ is the mean of $y$. $SST$ is a measure of total variance in $y$ and $SSR$
is a measure of variance that cannot be explained by the model, thus $R^2 = 1- \frac{SSR}{SST}$ is a measure of variance that can be explained by the model. If $SSR$ is close to $SST$, $R^2$ is close to zero and the model cannot explain a lot of variance. The smaller $SSR$, the closer $R^2$ is to 1. This version of $R2$ approximates 1 if the number of model parameters approximates sample size even if none of the predictor variables correlates with the outcome. It is exactly 1 when the number of model parameters equals sample size, because $n$ measurements can be exactly described by $n$ parameters. The adjusted $R^2$, $R^2 = \frac{var(y)-\hat\sigma^2}{var(y)}$ takes sample size $n$ and the number of model parameters $k$ into account (see explanation to variance in chapter \@ref(basics)). Therefore, the adjusted $R^2$ is recommended as a measurement of the proportion of explained variance.
## Linear model with one categorical predictor (one-way ANOVA)
The aim of analysis of variance (ANOVA) is to compare means of an outcome variable $y$ between different groups. To do so in the frequentist’s framework, variances between and within the groups are compared using F-tests (hence the name “analysis of variance”). When doing an ANOVA in a Bayesian way, inference is based on the posterior distributions of the group means and the differences between the group means.
One-way ANOVA means that we only have one predictor variable, specifically a categorical predictor variable (in R defined as a "factor"). We illustrate the one-way ANOVA based on an example of simulated data (Fig. \@ref(fig:figanova)). We have measured weights of 30 virtual individuals for each of 3 groups. Possible research questions could be: How big are the differences between the group means? Are individuals from group 2 heavier than the ones from group 1? Which group mean is higher than 7.5 g?
```r
# settings for the simulation
set.seed(626436)
b0 <- 12 # mean of group 1 (reference group)
sigma <- 2 # residual standard deviation
b1 <- 3 # difference between group 1 and group 2
b2 <- -5 # difference between group 1 and group 3
n <- 90 # sample size
# generate data
group <- factor(rep(c("group 1","group 2", "group 3"), each=30))
simresid <- rnorm(n, mean=0, sd=sigma) # simulate residuals
y <- b0 +
as.numeric(group=="group 2") * b1 +
as.numeric(group=="group 3") * b2 +
simresid
dat <- tibble(y, group)
# make figure
dat %>%
ggplot(aes(x = group, y = y)) +
geom_boxplot(fill = "orange") +
labs(y = "Weight (g)", x = "") +
ylim(0, NA)
```
<div class="figure">
<img src="2.03-lm_files/figure-html/figanova-1.png" alt="Weights (g) of the 30 individuals in each group. The dark horizontal line is the median, the box contains 50% of the observations (i.e., the interquartile range), the whiskers mark the range of all observations that are less than 1.5 times the interquartile range away from the edge of the box." width="672" />
<p class="caption">(\#fig:figanova)Weights (g) of the 30 individuals in each group. The dark horizontal line is the median, the box contains 50% of the observations (i.e., the interquartile range), the whiskers mark the range of all observations that are less than 1.5 times the interquartile range away from the edge of the box.</p>
</div>
An ANOVA is a linear regression with a categorical predictor variable instead of a continuous one. The categorical predictor variable with $k$ levels is (as a default in R) transformed to $k-1$ indicator variables. An indicator variable is a binary variable containing 0 and 1 where 1 indicates a specific level (a category of the predictor variable). Often, one indicator variable is constructed for every level except for the reference level. In our example, the categorical variable is "group" with the three levels "group 1", "group 2", and "group 3" ($k = 3$). Group 1 is taken as the reference level (default in R is the first in the alphabeth), and for each of the other two groups an indicator variable is constructed, $I(group_i = 2)$ and $I(group_i = 3)$. The function $I()$ gives out 1, if the expression is true and 0 otherwise. We can write the model as a formula:
\begin{align}
\mu_i &=\beta_0 + \beta_1 I(group_i=2) + \beta_1 I(group_i=3) \\
y_i &\sim normal(\mu_i, \sigma^2)
(\#eq:anova)
\end{align}
where $y_i$ is the $i$-th observation (weight measurement for individual $i$ in our example), and $\beta_{0,1,2}$ are the model coefficients. The residual variance is $\sigma^2$. The model coefficients $\beta_{0,1,2}$ constitute the deterministic part of the model. From the model formula it follows that the group means, $m_g$, are:
\begin{align}
m_1 &=\beta_0 \\
m_2 &=\beta_0 + \beta_1 \\
m_3 &=\beta_0 + \beta_2 \\
(\#eq:anovamw)
\end{align}
There are other possibilities to describe three group means with three parameters, for example:
\begin{align}
m_1 &=\beta_1 \\
m_2 &=\beta_2 \\
m_3 &=\beta_3 \\
(\#eq:anovamwalt)
\end{align}
In this case, the model formula would be:
\begin{align}
\mu_i &= \beta_1 I(group_i=1) + \beta_2 I(group_i=2) + \beta_3 I(group_i=3) \\
y_i &\sim Norm(\mu_i, \sigma^2)
(\#eq:anovaalt)
\end{align}
The way the group means are calculated within a model is called the parameterization of the model. Different statistical software use different parameterizations. The parameterization used by R by default is the one shown in Equation \@ref(eq:anova). R automatically takes the first level as the reference (the first level is the first one alphabetically unless the user defines a different order for the levels). The mean of the first group (i.e., of the first factor level) is the intercept, $b_0$ , of the model. The mean of another factor level is obtained by adding, to the intercept, the estimate of the corresponding parameter (which is the difference from the reference group mean).
The parameterization of the model is defined by the model matrix. In the case of a one-way ANOVA, there are as many columns in the model matrix as there are factor levels (i.e., groups); thus there are k factor levels and k model coefficients. Recall from Equation \@ref(eq:lmmatrix) that for each observation, the entry in the $j$-th column of the model matrix is multiplied by the $j$-th element of the model coefficients and the $k$ products are summed to obtain the fitted values. For a data set with $n = 5$ observations of which the first two are from group 1, the third from group 2, and the last two from group 3, the model matrix used for the parameterization described in Equation \@ref(eq:anovamw) and defined in R by the formula `~ group` is
\begin{align}
\boldsymbol{X}=
\begin{pmatrix}
1 & 0 & 0 \\
1 & 0 & 0 \\
1 & 1 & 0 \\
1 & 0 & 1 \\
1 & 0 & 1 \\
\end{pmatrix}
\end{align}
If parameterization of Equation \@ref(eq:anovamwalt) (corresponding R formula: `~ group - 1`) were used,
\begin{align}
\boldsymbol{X}=
\begin{pmatrix}
1 & 0 & 0 \\
1 & 0 & 0 \\
0 & 1 & 0 \\
0 & 0 & 1 \\
0 & 0 & 1 \\
\end{pmatrix}
\end{align}
To obtain the parameter estimates for model parameterized according to Equation \@ref(eq:anovamw) we fit the model in R:
```r
# fit the model
mod <- lm(y~group, data=dat)
# parameter estimates
mod
```
```
##
## Call:
## lm(formula = y ~ group, data = dat)
##
## Coefficients:
## (Intercept) groupgroup 2 groupgroup 3
## 12.367 2.215 -5.430
```
```r
summary(mod)$sigma
```
```
## [1] 1.684949
```
The "Intercept" is $\beta_0$. The other coefficients are named with the factor name ("group") and the factor level (either "group 2" or "group 3"). These are $\beta_1$ and $\beta_2$ , respectively. Before drawing conclusions from an R output we need to examine whether the model assumptions are met, that is, we need to do a residual analysis as described in Chapter \@ref(residualanalysis).
Different questions can be answered using the above ANOVA: What are the group means? What is the difference in the means between group 1 and group 2? What is the difference between the means of the heaviest and lightest group? In a Bayesian framework we can directly assess how strongly the data support the hypothesis that the mean of the group 2 is larger than the mean of group 1. We first simulate from the posterior distribution of the model parameters.
```r
library(arm)
nsim <- 1000
bsim <- sim(mod, n.sim=nsim)
```
Then we obtain the posterior distributions for the group means according to the parameterization of the model formula (Equation \@ref(eq:anovamw)).
```r
m.g1 <- coef(bsim)[,1]
m.g2 <- coef(bsim)[,1] + coef(bsim)[,2]
m.g3 <- coef(bsim)[,1] + coef(bsim)[,3]
```
The histograms of the simulated values from the posterior distributions of the three means are given in Fig. \@ref(fig:figanovares). The three means are well separated and, based on our data, we are confident that the group means differ. From these simulated posterior distributions we obtain the means and use the 2.5% and 97.5% quantiles as limits of the 95% uncertainty intervals (Fig. \@ref(fig:figanovares), right).
```r
# save simulated values from posterior distribution in tibble
post <-
tibble(`group 1` = m.g1, `group 2` = m.g2, `group 3` = m.g3) %>%
gather("groups", "Group means")
# histograms per group
leftplot <-
ggplot(post, aes(x = `Group means`, fill = groups)) +
geom_histogram(aes(y=..density..), binwidth = 0.5, col = "black") +
labs(y = "Density") +
theme(legend.position = "top", legend.title = element_blank())
# plot mean and 95%-CrI
rightplot <-
post %>%
group_by(groups) %>%
dplyr::summarise(
mean = mean(`Group means`),
CrI_lo = quantile(`Group means`, probs = 0.025),
CrI_up = quantile(`Group means`, probs = 0.975)) %>%
ggplot(aes(x = groups, y = mean)) +
geom_point() +
geom_errorbar(aes(ymin = CrI_lo, ymax = CrI_up), width = 0.1) +
ylim(0, NA) +
labs(y = "Weight (g)", x ="")
multiplot(leftplot, rightplot, cols = 2)
```
<div class="figure">
<img src="2.03-lm_files/figure-html/figanovares-1.png" alt="Distribution of the simulated values from the posterior distributions of the group means (left); group means with 95% uncertainty intervals obtained from the simulated distributions (right)." width="672" />
<p class="caption">(\#fig:figanovares)Distribution of the simulated values from the posterior distributions of the group means (left); group means with 95% uncertainty intervals obtained from the simulated distributions (right).</p>
</div>
To obtain the posterior distribution of the difference between the means of group 1 and group 2, we simply calculate this difference for each draw from the joint posterior distribution of the group means.
```r
d.g1.2 <- m.g1 - m.g2
mean(d.g1.2)
```
```
## [1] -2.209551
```
```r
quantile(d.g1.2, probs = c(0.025, 0.975))
```
```
## 2.5% 97.5%
## -3.128721 -1.342693
```
The estimated difference is -2.2095511. In the small model world, we are 95% sure that the difference between the means of group 1 and 2 is between -3.1287208 and -1.3426929.
How strongly do the data support the hypothesis that the mean of group 2 is larger than the mean of group 1? To answer this question we calculate the proportion of the draws from the joint posterior distribution for which the mean of group 2 is larger than the mean of group 1.
```r
sum(m.g2 > m.g1) / nsim
```
```
## [1] 1
```
This means that in all of the 1000 simulations from the joint posterior distribution, the mean of group 2 was larger than the mean of group 1. Therefore, there is a very high probability (i.e., it is close to 1; because probabilities are never exactly 1, we write >0.999) that the mean of group 2 is larger than the mean of group 1.
## Other variants of normal linear models: Two-way anova, analysis of covariance and multiple regression
Up to now, we introduced normal linear models with one predictor only. We can add more predictors to the model and these can be numerical or categorical ones. Traditionally, models with 2 or 3 categorical predictors are called two-way or three-way ANOVA, respectively. Models with a mixture of categorical and numerical predictors are called ANCOVA. And, models containing only numerical predictors are called multiple regressions. Nowadays, we only use the term "normal linear model" as an umbrella term for all these types of models.
While it is easy to add additional predictors in the R formula of the model, it becomes more difficult to interpret the coefficients of such multi-dimensional models. Two important topics arise with multi-dimensional models, *interactions* and *partial effects*. We dedicate partial effects the full next chapter and introduce interactions in this chapter using two examples. The first, is a model including two categorical predictors and the second is a model with one categorical and one numeric predictor.
### Linear model with two categorical predictors (two-way ANOVA)
In the first example, we ask how large are the differences in wing length between age and sex classes of the Coal tit *Periparus ater*. Wing lengths were measured on 19 coal tit museum skins with known sex and age class.
```r
data(periparusater)
dat <- tibble(periparusater) # give the data a short handy name
dat$age <- recode_factor(dat$age, "4"="adult", "3"="juvenile") # replace EURING code
dat$sex <- recode_factor(dat$sex, "2"="female", "1"="male") # replace EURING code
```
To describe differences in wing length between the age classes or between the sexes a normal linear model with two categorical predictors is fitted to the data. The two predictors are specified on the right side of the model formula separated by the "+" sign, which means that the model is an additive combination of the two effects (as opposed to an interaction, see following).
```r
mod <- lm(wing ~ sex + age, data=dat)
```
After having seen that the residual distribution does not appear to violate the model assumptions (as assessed with diagnostic residual plots, see Chapter \@ref(residualanalysis)), we can draw inferences. We first have a look at the model parameter estimates:
```r
mod
```
```
##
## Call:
## lm(formula = wing ~ sex + age, data = dat)
##
## Coefficients:
## (Intercept) sexmale agejuvenile
## 61.3784 3.3423 -0.8829
```
```r
summary(mod)$sigma
```
```
## [1] 2.134682
```
R has taken the first level of the factors age and sex (as defined in the data.frame dat) as the reference levels. The intercept is the expected wing length for individuals having the reference level in age and sex, thus adult female. The other two parameters provide estimates of what is to be added to the intercept to get the expected wing length for the other levels. The parameter `sexmale` is the average difference between females and males. We can conclude that in males have in average a 3.3 mm longer wing than females. Similarly, the parameter `agejuvenile` measures the differences between the age classes and we can conclude that, in average, juveniles have a 0.9 shorter wing than adults. When we insert the parameter estimates into the model formula, we get the receipt to calculate expected values for each age and sex combination:
$\hat{y_i} = \hat{\beta_0} + \hat{\beta_1}I(sex=male) + \hat{\beta_2}I(age=juvenile)$ which yields
$\hat{y_i}$ = 61.4 $+$ 3.3 $I(sex=male) +$ -0.9 $I(age=juvenile)$.
Alternatively, we could use matrix notation. We construct a new data set that contains one virtual individual for each age and sex class.
```r
newdat <- tibble(expand.grid(sex=factor(levels(dat$sex)),
age=factor(levels(dat$age))))
# expand.grid creates a data frame with all combination of values given
newdat
```
```
## # A tibble: 4 × 2
## sex age
## <fct> <fct>
## 1 female adult
## 2 male adult
## 3 female juvenile
## 4 male juvenile
```
```r
newdat$fit <- predict(mod, newdata=newdat) # fast way of getting fitted values
# or
Xmat <- model.matrix(~sex+age, data=newdat) # creates a model matrix
newdat$fit <- Xmat %*% coef(mod)
```
For this new data set the model matrix contains four rows (one for each combination of age class and sex) and three columns. The first column contains only ones because the values of this column are multiplied by the intercept ($\beta_0$) in the matrix multiplication. The second column contains an indicator variable for males (so only the rows corresponding to males contain a one) and the third column has ones for juveniles.
\begin{align}
\hat{y} =
\boldsymbol{X \hat{\beta}} =
\begin{pmatrix}
1 & 0 & 0 \\
1 & 1 & 0 \\
1 & 0 & 1 \\
1 & 1 & 1 \\
\end{pmatrix}
\times
\begin{pmatrix}
61.4 \\
3.3 \\
-0.9
\end{pmatrix} =
\begin{pmatrix}
61.4 \\
64.7 \\
60.5 \\
63.8
\end{pmatrix} =
\boldsymbol{\mu}
(\#eq:lmmatrix)
\end{align}
The result of the matrix multiplication is a vector containing the expected wing length for adult and juvenile females and adult and juvenile males.
When creating the model matrix with `model.matrix` care has to be taken that the columns in the model matrix match the parameters in the vector of model coefficients. To achieve that, it is required that the model formula is identical to the model formula of the model (same order of terms!), and that the factors in newdat are identical in their levels and their order as in the data the model was fitted to.
To describe the uncertainty of the fitted values, we use 2000 sets of parameter values of the joint posterior distribution to obtain 2000 values for each of the four fitted values. These are stored in the object "fitmat". In the end, we extract for every fitted value, i.e., for every row in fitmat, the 2.5% and 97.5% quantiles as the lower and upper limits of the 95% uncertainty interval.
```r
nsim <- 2000
bsim <- sim(mod, n.sim=nsim)
fitmat <- matrix(ncol=nsim, nrow=nrow(newdat))
for(i in 1:nsim) fitmat[,i] <- Xmat %*% coef(bsim)[i,]
newdat$lwr <- apply(fitmat, 1, quantile, probs=0.025)
newdat$upr <- apply(fitmat, 1, quantile, probs=0.975)
```
```r
dat$sexage <- factor(paste(dat$sex, dat$age))
newdat$sexage <- factor(paste(newdat$sex, newdat$age))
dat$pch <- 21
dat$pch[dat$sex=="male"] <- 22
dat$col="blue"
dat$col[dat$age=="adult"] <- "orange"
par(mar=c(4,4,0.5,0.5))
plot(wing~jitter(as.numeric(sexage), amount=0.05), data=dat, las=1,
ylab="Wing length (mm)", xlab="Sex and age", xaxt="n", pch=dat$pch,
bg=dat$col, cex.lab=1.2, cex=1, cex.axis=1, xlim=c(0.5, 4.5))
axis(1, at=c(1:4), labels=levels(dat$sexage), cex.axis=1)
segments(as.numeric(newdat$sexage), newdat$lwr, as.numeric(newdat$sexage), newdat$upr, lwd=2,
lend="butt")
points(as.numeric(newdat$sexage), newdat$fit, pch=17)
```
<div class="figure">
<img src="2.03-lm_files/figure-html/fgwingpa-1.png" alt="Wing length measurements on 19 museumm skins of coal tits per age class and sex. Fitted values are from the additive model (black triangles) and from the model including an interaction (black dots). Vertical bars = 95% uncertainty intervals." width="672" />
<p class="caption">(\#fig:fgwingpa)Wing length measurements on 19 museumm skins of coal tits per age class and sex. Fitted values are from the additive model (black triangles) and from the model including an interaction (black dots). Vertical bars = 95% uncertainty intervals.</p>
</div>
We can see that the fitted values are not equal to the arithmetic means of the groups; this is especially clear for juvenile males. The fitted values are constrained because only three parameters were used to estimate four means. In other words, this model assumes that the age difference is equal in both sexes and, vice versa, that the difference between the sexes does not change with age. If the effect of sex changes with age, we would include an *interaction* between sex and age in the model. Including an interaction adds a fourth parameter enabling us to estimate the group means exactly. In R, an interaction is indicated with the `:` sign.
```r
mod2 <- lm(wing ~ sex + age + sex:age, data=dat)
# alternative formulations of the same model:
# mod2 <- lm(wing ~ sex * age, data=dat)
# mod2 <- lm(wing ~ (sex + age)^2, data=dat)
```
The formula for this model is $\hat{y_i} = \hat{\beta_0} + \hat{\beta_1}I(sex=male) + \hat{\beta_2}I(age=juvenile) + \hat{\beta_3}I(age=juvenile)I(sex=male)$. From this formula we get the following expected values for the sexes and age classes:
for adult females: $\hat{y} = \beta_0$
for adult males: $\hat{y} = \beta_0 + \beta_1$
for juveniles females: $\hat{y} = \beta_0 + \beta_2$
for juveniles males: $\hat{y} = \beta_0 + \beta_1 + \beta_2 + \beta_3$
The interaction parameter measures how much different between age classes is the difference between the sexes.
To obtain the fitted values the R-code above can be recycled with two adaptations. First, the model name needs to be changed to "mod2". Second, importantly, the model matrix needs to be adapted to the new model formula.
```r
newdat$fit2 <- predict(mod2, newdata=newdat)
bsim <- sim(mod2, n.sim=nsim)
Xmat <- model.matrix(~ sex + age + sex:age, data=newdat)
fitmat <- matrix(ncol=nsim, nrow=nrow(newdat))
for(i in 1:nsim) fitmat[,i] <- Xmat %*% coef(bsim)[i,]
newdat$lwr2 <- apply(fitmat, 1, quantile, probs=0.025)
newdat$upr2 <- apply(fitmat, 1, quantile, probs=0.975)
print(newdat[,c(1:5,7:9)], digits=3)
```
```
## # A tibble: 4 × 8
## sex age fit[,1] lwr upr fit2 lwr2 upr2
## <fct> <fct> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 female adult 61.4 59.3 63.3 61.1 58.8 63.5
## 2 male adult 64.7 63.3 66.2 64.8 63.3 66.4
## 3 female juvenile 60.5 58.4 62.6 60.8 58.2 63.4
## 4 male juvenile 63.8 61.7 66.0 63.5 60.7 66.2
```
These fitted values are now exactly equal to the arithmetic means of each groups.
```r
tapply(dat$wing, list(dat$age, dat$sex), mean) # arithmetic mean per group
```
```
## female male
## adult 61.12500 64.83333
## juvenile 60.83333 63.50000
```
We can also see that the uncertainty of the fitted values is larger for the model with an interaction than for the additive model. This is because, in the model including the interaction, an additional parameter has to be estimated based on the same amount of data. Therefore, the information available per parameter is smaller than in the additive model. In the additive model, some information is pooled between the groups by making the assumption that the difference between the sexes does not depend on age.
The degree to which a difference in wing length is ‘important’ depends on the context of the study. Here, for example, we could consider effects of wing length on flight energetics and maneuverability or methodological aspects like measurement error. Mean between-observer difference in wing length measurement is around 0.3 mm [@Jenni.1989]. Therefore, we may consider that the interaction is important because its point estimate is larger than 0.3 mm.
```r
mod2
```
```
##
## Call:
## lm(formula = wing ~ sex + age + sex:age, data = dat)
##
## Coefficients:
## (Intercept) sexmale agejuvenile
## 61.1250 3.7083 -0.2917
## sexmale:agejuvenile
## -1.0417
```
```r
summary(mod2)$sigma
```
```
## [1] 2.18867
```
Further, we think a difference of 1 mm in wing length may be relevant compared to the among-individual variation of which the standard deviation is around 2 mm. Therefore, we report the parameter estimates of the model including the interaction together with their uncertainty intervals.
Table: (\#tab:sumtabpa)Parameter estimates of the model for wing length of Coal tits with 95% uncertainty interval.
|Parameter | Estimate| lwr| upr|
|:-------------------|--------:|-----:|-----:|
|(Intercept) | 61.12| 58.85| 63.53|
|sexmale | 3.71| 0.93| 6.59|
|agejuvenile | -0.29| -3.93| 3.36|
|sexmale:agejuvenile | -1.04| -5.96| 3.90|
From these parameters we obtain the estimated differences in wing length between the sexes for adults of 3.7mm and the posterior probability of the hypotheses that males have an average wing length that is at least 1mm larger compared to females is `mean(bsim@coef[,2]>1)` which is 0.97. Thus, there is some evidence that adult Coal tit males have substantially larger wings than adult females in these data. However, we do not draw further conclusions on other differences from these data because statistical uncertainty is large due to the low sample size.
### A linear model with a categorical and a numeric predictor (ANCOVA)
An analysis of covariance, ANCOVA, is a normal linear model that contains at least one factor and one continuous variable as predictor variables. The continuous variable is also called a covariate, hence the name analysis of covariance. An ANCOVA can be used, for example, when we are interested in how the biomass of grass depends on the distance from the surface of the soil to the ground water in two different species (*Alopecurus pratensis*, *Dactylis glomerata*). The two species were grown by @Ellenberg1953 in tanks that showed a gradient in distance from the soil surface to the ground water. The distance from the soil surface to the ground water is used as a covariate (‘water’). We further assume that the species react differently to the water conditions. Therefore, we include an interaction between species and water. The model formula is then
$\hat{y_i} = \beta_0 + \beta_1I(species=Dg) + \beta_2water_i + \beta_3I(species=Dg)water_i$
$y_i \sim normal(\hat{y_i}, \sigma^2)$
To fit the model, it is important to first check whether the factor is indeed defined as a factor and the continuous variable contains numbers (i.e., numeric or integer values) in the data frame.
```r
data(ellenberg)
index <- is.element(ellenberg$Species, c("Ap", "Dg")) & complete.cases(ellenberg$Yi.g)
dat <- ellenberg[index,c("Water", "Species", "Yi.g")] # select two species
dat <- droplevels(dat)
str(dat)
```
```
## 'data.frame': 84 obs. of 3 variables:
## $ Water : int 5 20 35 50 65 80 95 110 125 140 ...
## $ Species: Factor w/ 2 levels "Ap","Dg": 1 1 1 1 1 1 1 1 1 1 ...
## $ Yi.g : num 34.8 28 44.5 24.8 37.5 ...
```
Species is a factor with two levels and Water is an integer variable, so we are fine and we can fit the model
```r
mod <- lm(log(Yi.g) ~ Species + Water + Species:Water, data=dat)
# plot(mod) # 4 standard residual plots
```
We log-transform the biomass to make the residuals closer to normally distributed. So, the normal distribution assumption is met well. However, a slight banana shaped relationship exists between the residuals and the fitted values indicating a slight non-linear relationship between biomass and water. Further, residuals showed substantial autocorrelation because the grass biomass was measured in different tanks. Measurements from the same tank were more similar than measurements from different tanks after correcting for the distance to water. Thus, the analysis we have done here suffers from pseudoreplication. We will re-analyze the example data in a more appropriate way in Chapter \@ref(lmer).
Let's have a look at the model matrix (first and last six rows only).
```r
head(model.matrix(mod)) # print the first 6 rows of the matrix
```
```
## (Intercept) SpeciesDg Water SpeciesDg:Water
## 24 1 0 5 0
## 25 1 0 20 0
## 26 1 0 35 0
## 27 1 0 50 0
## 28 1 0 65 0
## 29 1 0 80 0
```
```r
tail(model.matrix(mod)) # print the last 6 rows of the matrix
```
```
## (Intercept) SpeciesDg Water SpeciesDg:Water
## 193 1 1 65 65
## 194 1 1 80 80
## 195 1 1 95 95
## 196 1 1 110 110
## 197 1 1 125 125
## 198 1 1 140 140
```
The first column of the model matrix contains only 1s. These are multiplied by the intercept in the matrix multiplication that yields the fitted values. The second column contains the indicator variable for species *Dactylis glomerata* (Dg). Species *Alopecurus pratensis* (Ap) is the reference level. The third column contains the values for the covariate. The last column contains the product of the indicator for species Dg and water. This column specifies the interaction between species and water.
The parameters are the intercept, the difference between the species, a slope for water and the interaction parameter.
```r
mod
```
```
##
## Call:
## lm(formula = log(Yi.g) ~ Species + Water + Species:Water, data = dat)
##
## Coefficients:
## (Intercept) SpeciesDg Water SpeciesDg:Water
## 4.33041 -0.23700 -0.01791 0.01894
```
```r
summary(mod)$sigma
```
```
## [1] 0.9001547
```
These four parameters define two regression lines, one for each species (Figure \@ref(fig:fgbiom) Left). For Ap, it is $\hat{y_i} = \beta_0 + \beta_2water_i$, and for Dg it is $\hat{y_i} = (\beta_0 + \beta_1) + (\beta_2 + \beta_3)water_i$. Thus, $\beta_1$ is the difference in the intercept between the species and $\beta_3$ is the difference in the slope.
<div class="figure">
<img src="2.03-lm_files/figure-html/fgbiom-1.png" alt="Aboveground biomass (g, log-transformed) in relation to distance to ground water and species (two grass species). Fitted values from a model including an interaction species x water (left) and a model without interaction (right) are added. The dotted line indicates water=0." width="672" />
<p class="caption">(\#fig:fgbiom)Aboveground biomass (g, log-transformed) in relation to distance to ground water and species (two grass species). Fitted values from a model including an interaction species x water (left) and a model without interaction (right) are added. The dotted line indicates water=0.</p>
</div>
As a consequence of including an interaction in the model, the interpretation of the main effects become difficult. From the above model output, we read that the intercept of the species Dg is lower than the intercept of the species Ap. However, from a graphical inspection of the data, we would expect that the average biomass of species Dg is higher than the one of species Ap. The estimated main effect of species is counter-intuitive because it is measured where water is zero (i.e, it is the difference in the intercepts and not between the mean biomasses of the species). Therefore, the main effect of species in the above model does not have a biologically meaningful interpretation. We have two possibilities to get a meaningful species effect. First, we could delete the interaction from the model (Figure \@ref(fig:fgbiom) Right). Then the difference in the intercept reflects an average difference between the species. However, the fit for such an additive model is much worth compared to the model with interaction, and an average difference between the species may not make much sense because this difference so much depends on water. Therefore, we prefer to use a model including the interaction and may opt for th second possibility. Second, we could move the location where water equals 0 to the center of the data by transforming, specifically centering, the variable water: $water.c = water - mean(water)$. When the predictor variable (water) is centered, then the intercept corresponds to the difference in fitted values measured in the center of the data.
For drawing biological conclusions from these data, we refer to Chapter \@ref(lmer), where we use a more appropriate model.
## Partial coefficients and some comments on collinearity
Many biologists think that it is forbidden to include correlated predictor variables in a model. They use variance inflating factors (VIF) to omit some of the variables. However, omitting important variables from the model just because a correlation coefficient exceeds a threshold value can have undesirable effects. Here, we explain why and we present the usefulness and limits of partial coefficients (also called partial correlation or partial effects). We start with an example illustrating the usefulness of partial coefficients and then, give some guidelines on how to deal with collinearity.
As an example, we look at hatching dates of Snowfinches and how these dates relate to the date when snow melt started (first date in the season when a minimum of 5% ground is snow free). A thorough analyses of the data is presented by @Schano.2021. An important question is how well can Snowfinches adjust their hatching dates to the snow conditions. For Snowfinches, it is important to raise their nestlings during snow melt. Their nestlings grow faster when they are reared during the snow melt compared to after snow has completely melted, because their parents find nutrient rich insect larvae in the edges of melting snow patches.
```r
load("RData/snowfinch_hatching_date.rda")
# Pearson's correlation coefficient
cor(datsf$elevation, datsf$meltstart, use = "pairwise.complete")
```
```
## [1] 0.3274635
```
```r
mod <- lm(meltstart~elevation, data=datsf)
100*coef(mod)[2] # change in meltstart with 100m change in elevation
```
```
## elevation
## 2.97768
```
Hatching dates of Snowfinch broods were inferred from citizen science data from the Alps, where snow melt starts later at higher elevations compared to lower elevations. Thus, the start of snow melt is correlated with elevation (Pearson's correlation coefficient 0.33). In average, snow starts melting 3 days later with every 100m increase in elevation.
```r
mod1 <- lm(hatchday.mean~meltstart, data=datsf)
mod1
```
```
##
## Call:
## lm(formula = hatchday.mean ~ meltstart, data = datsf)
##
## Coefficients:
## (Intercept) meltstart
## 167.99457 0.06325
```
From a a normal linear regression of hatching date on the snow melt date, we obtain an estimate of 0.06 days delay in hatching date with one day later snow melt. This effect sizes describes the relationship in the data that were collected along an elevational gradient. Along the elevational gradient there are many factors that change such as average temperature, air pressure or sun radiation. All these factors may have an influence on the birds decision to start breeding. Consequentily, from the raw correlation between hatching dates and start of snow melt we cannot conclude how Snowfinches react to changes in the start of snow melt because the correlation seen in the data may be caused by other factors changing with elevation (such a correlation is called "pseudocorrelation"). However, we are interested in the correlation between hatching date and date of snow melt independent of other factors changing with elevation. In other words, we would like to measure how much in average hatching date delays when snow melt starts one day later while all other factors are kept constant. This is called the partial effect of snow melt date. Therefore, we include elevation as a covariate in the model.
```r
library(arm)
mod <- lm(hatchday.mean~elevation + meltstart, data=datsf)
mod
```
```
##
## Call:
## lm(formula = hatchday.mean ~ elevation + meltstart, data = datsf)
##
## Coefficients:
## (Intercept) elevation meltstart
## 154.383936 0.007079 0.037757
```
From this model, we obtain an estimate of 0.04 days delay in hatching date with one day later snow melt at a given elevation. That gives a difference in hatching date between early and late years (around one month difference in snow melt date) at a given elevation of 1.13 days (Figure \@ref(fig:sfexfig)). We further get an estimate of 0.71 days later hatching date for each 100m shift in elevation. Thus, a 18.75 days later snow melt corresponds to a similar delay in average hatching date when elevation increases by 100m.
When we estimate the coefficient within a constant elevation (coloured regression lines in Figure \@ref(fig:sfexfig)), it is lower than the raw correlation and closer to a causal relationship, because it is corrected for elevation. However, in observational studies, we never can be sure whether the partial coefficients can be interpreted as a causal relationship unless we include all factors that influence hatching date. Nevertheless, partial effects give much more insight into a system compared to univariate analyses because we can separated effects of simultaneously acting variables (that we have measured). The result indicates that Snowfinches may not react very sensibly to varying timing of snow melt, whereas at higher elevations they clearly breed later compared to lower elevations.
<div class="figure">
<img src="2.03-lm_files/figure-html/sfexfig-1.png" alt="Illustration of the partial coefficient of snow melt date in a model of hatching date. Panel A shows the entire raw data together with the regression lines drawn for three different elevations. The regression lines span the range of snow melt dates occurring at the respective elevation (shown in panel C). Panel B is the same as panel A, but zoomed in to the better see the regression lines and with an additional regression line (in black) from the model that does not take elevation into account." width="672" />
<p class="caption">(\#fig:sfexfig)Illustration of the partial coefficient of snow melt date in a model of hatching date. Panel A shows the entire raw data together with the regression lines drawn for three different elevations. The regression lines span the range of snow melt dates occurring at the respective elevation (shown in panel C). Panel B is the same as panel A, but zoomed in to the better see the regression lines and with an additional regression line (in black) from the model that does not take elevation into account.</p>
</div>
We have seen that it can be very useful to include more than one predictor variable in a model even if they are correlated with each other. In fact, there is nothing wrong with that. However, correlated predictors (collinearity) make things more complicated.
For example, partial regression lines should not be drawn across the whole range of values of a variable, to avoid extrapolating out of data. At 2800 m asl snow melt never starts in the beginning of March. Therefore, the blue regression line would not make sense for snow melt dates in March.
Further, sometimes correlations among predictors indicate that these predictors measure the same underlying aspect and we are actually interested in the effect of this underlying aspect on our response. For example, we could include also the date of the end of snow melt. Both variables, the start and the end of the snow melt measure the timing of snow melt. Including both as predictor in the model would result in partial coefficients that measure how much hatching date changes when the snow melt starts one day later, while the end date is constant. That interpretation is a mixture of the effect of timing and duration rather than of snow melt timing alone. Similarly, the coefficient of the end of snow melt measures a mixture of duration and timing. Thus, if we include two variables that are correlated because they measure the same aspect (just a little bit differently), we get coefficients that are hard to interpret and may not measure what we actually are interested in. In such a cases, we get easier to interpret model coefficients, if we include just one variable of each aspect that we are interested in, e.g. we could include one timing variable (e.g. start of snow melt) and the duration of snow melt that may or may not be correlated with the start of snow melt.
To summarize, the decision of what to do with correlated predictors primarily relies on the question we are interested in, i.e., what exactly should the partial coefficients be an estimate for.
A further drawback of collinearity is that model fitting can become difficult. When strong correlations are present, model fitting algorithms may fail. If they do not fail, the statistical uncertainty of the estimates often becomes large. This is because the partial coefficient of one variable needs to be estimated for constant values of the other predictors in the model which means that a reduced range of values is available as illustrated in Figure \@ref(fig:sfexfig) C. However, if uncertainty intervals (confidence, credible or compatibility intervals) are reported alongside the estimates, then using correlated predictors in the same model is absolutely fine, if the fitting algorithm was successful.
The correlations per se can be interesting. Further readings on how to visualize and analyse data with complex correlation structures:
- principal component analysis [@Manly.1994]
- path analyses, e.g. @Shipley.2009
- structural equation models [@Hoyle2012]
<a href="" target="_blank"><img src="images/ruchen.jpg" width="1452" style="display: block; margin: auto;" /></a>
## Ordered Factors and Contrasts {#orderedfactors}
In this chapter, we have seen that the model matrix is an $n \times k$ matrix (with $n$ = sample size and $k$ = number of model coefficients) that is multiplied by the vector of the $k$ model coefficients to obtain the fitted values of a normal linear model. The first column of the model matrix normally contains only ones. This column is multiplied by the intercept. The other columns contain the observed values of the predictor variables if these are numeric variables, or indicator variables (= dummy variables) for factor levels if the predictors are categorical variables (= factors). For categorical variables the model matrix can be constructed in a number of ways. How it is constructed determines how the model coefficients can be interpreted. For example, coefficients could represent differences between means of specific factor levels to the mean of the reference level. That is what we have introduced above. However, they could also represent a linear, quadratic or cubic effect of an ordered factor. Here, we show how this works.
An ordered factor is a categorical variable with levels that have a natural order, for example, ‘low’, ‘medium’ and ‘high’. How do we tell R that a factor is ordered? The swallow data contain a factor ‘nesting_aid’ that contains the type aid provided in a barn for the nesting swallows. The natural order of the levels is none < support (e.g., a wooden stick in the wall that helps support a nest built by the swallow) < artificial_nest < both (support and artificial nest). However, when we read in the data R orders these levels alphabetically rather than according to the logical order.
```r
data(swallows)
levels(swallows$nesting_aid)
```
```
## [1] "artif_nest" "both" "none" "support"
```
And with the function contrasts we see how R will construct the model matrix.
```r
contrasts(swallows$nesting_aid)
```
```
## both none support
## artif_nest 0 0 0
## both 1 0 0
## none 0 1 0
## support 0 0 1
```
R will construct three dummy variables and call them ‘both’, ‘none’, and ‘support’. The variable ‘both’ will have an entry of one when the observation is ‘both’ and zero otherwise. Similarly, the other two dummy variables are indicator variables of the other two levels and ‘artif_nest’ is the reference level. The model coefficients can then be interpreted as the difference between ‘artif_nest’ and each of the other levels. The instruction how to transform a factor into columns of a model matrix is called the contrasts.
Now, let's bring the levels into their natural order and define the factor as an ordered factor.
```r
swallows$nesting_aid <- factor(swallows$nesting_aid, levels=c("none", "support", "artif_nest", "both"), ordered=TRUE)
levels(swallows$nesting_aid)
```
```
## [1] "none" "support" "artif_nest" "both"
```
The levels are now in the natural order. R will, from now on, use this order for analyses, tables, and plots, and because we defined the factor to be an ordered factor, R will use polynomial contrasts:
```r
contrasts(swallows$nesting_aid)
```
```
## .L .Q .C
## [1,] -0.6708204 0.5 -0.2236068
## [2,] -0.2236068 -0.5 0.6708204
## [3,] 0.2236068 -0.5 -0.6708204
## [4,] 0.6708204 0.5 0.2236068
```
When using polynomial contrasts, R will construct three (= number of levels minus one) variables that are called ‘.L’, ‘.Q’, and ‘.C’ for linear, quadratic and cubic effects. The contrast matrix defines which numeric value will be inserted in each of the three corresponding columns in the model matrix for each observation, for example, an observation with ‘support’ in the factor ‘nesting_aid’ will get the values -0.224, -0.5 and 0.671 in the columns L, Q and C of the model matrix. These contrasts define yet another way to get 4 different group means:
$m1 = \beta_0 – 0.671* \beta_1 + 0.5*\beta_2 - 0.224* \beta_3$
$m2 = \beta_0 – 0.224* \beta_1 - 0.5*\beta_2 + 0.671* \beta_3$
$m3 = \beta_0 + 0.224* \beta_1 - 0.5*\beta_2 - 0.671* \beta_3$
$m4 = \beta_0 + 0.671* \beta_1 + 0.5*\beta_2 + 0.224* \beta_3$
The group means are the same, independent of whether a factor is defined as ordered or not. The ordering also has no effect on the variance that is explained by the factor ‘nesting_aid’ or the overall model fit. Only the model coefficients and their interpretation depend on whether a factor is defined as ordered or not. When we define a factor as ordered, the coefficients can be interpreted as linear, quadratic, cubic, or higher order polynomial effects. The number of the polynomials will always be the number of factor levels minus one (unless the intercept is omitted from the model in which case it is the number of factor levels). Linear, quadratic, and further polynomial effects normally are more interesting for ordered factors than single differences from a reference level because linear and polynomial trends tell us something about consistent changes in the outcome along the ordered factor levels. Therefore, an ordered factor with k levels is treated like a covariate consisting of the centered level numbers (-1.5, -0.5, 0.5, 1.5 in our case with four levels) and k-1 orthogonal polynomials of this covariate are included in the model. Thus, if we have an ordered factor A with three levels, `y~A` is equivalent to `y~x+I(x^2)`, with x=-1 for the lowest, x=0 for the middle and x=1 for the highest level.
Note that it is also possible to define own contrasts if we are interested in specific differences or trends. However, it is not trivial to find meaningful and orthogonal (= uncorrelated) contrasts.
## Quadratic and Higher Polynomial Terms
The straight regression line for the biomass of grass species Ap *Alopecurus pratensis* dependent on the distance to the ground water does not fit well (Figure \@ref(fig:fgbiom)). The residuals at low and high values of water tend to be positive and intermediate water levels are associated with negative residuals. This points out a possible violation of the model assumptions.
The problem is that the relationship between distance to water and biomass of species Ap is not linear. In real life, we often find non-linear relationships, but if the shape of the relationship is quadratic (plus, potentially, a few more polynomials) we can still use ‘linear modeling’ (the term 'linear' refers to the linear function used to describe the relationship between the outcome and the predictor variables: $f(x) = \beta_0 + \beta_1x + \beta_2x^2$ is a linear function compared to, e.g., $f(x) = \beta^x$, which is not a linear function). We simply add the quadratic term of the predictor variable, that is, water in our example, as a further predictor in the linear predictor:
$\hat{y_i} = \beta_0+\beta_1water_i+\beta_2water_i^2$.
A quadratic term can be fitted in R using the function `I()` which tells R that we want the squared values of distance to water. If we do not use `I()` the `^2` indicates a two-way interaction. The model specification is then `lm(log(Yi.g) ~ Water + I(Water^2), data=...)`.
The cubic term would be added by `+I(Water^3)`.
As with interactions, a polynomial term changes the interpretation of lower level polynomials. Therefore, we normally include all polynomials up to a specific degree. Furthermore, polynomials are normally correlated (if no special transformation is used, see below) which could cause problems when fitting the model such as non-convergence. To avoid collinearity among polynomials, so called orthogonal polynomials can be used. These are polynomials that are uncorrelated. To that end, we can use the function `poly` which creates as many orthogonal polynomials of the variable as we want:
`poly(dat$Water, 2)` creates two columns, the first one can be used to model the linear effect of water, the second one to model the quadratic term of water:
```r
t.poly <- poly(dat$Water, 2)
dat$Water.l <- t.poly[,1] # linear term for water
dat$Water.q <- t.poly[,2] # quadratic term for water
mod <- lm(log(Yi.g) ~ Water.l + Water.q, data=dat)
```
When orthogonal polynomials are used, the estimated linear and quadratic effects can be interpreted as purely linear and purely quadratic influences of the predictor on the outcome. The function poly applies a specific transformation to the original variables. To reproduce the transformation (e.g. for getting the corresponding orthogonal polynomials for new data used to draw an effect plot), the function predict can be used with the poly-object created based on the original data.
```r
newdat <- data.frame(Water = seq(0,130))
# transformation analogous to the one used to fit the model:
newdat$Water.l <- predict(t.poly, newdat$Water)[,1]
newdat$Water.q <- predict(t.poly, newdat$Water)[,2]
```
These transformed variables can then be used to calculate fitted values that correspond to the water values specified in the new data.
<file_sep>
# Structural equation models {#SEM}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
> We should provide an example in Stan.
## Introduction
```r
------------------------------------------------------------------------------------------------------
# General settings
#------------------------------------------------------------------------------------------------------
library(MASS)
library(rjags)
library(MCMCpack)
#------------------------------------------------------------------------------------------------------
# Simulation
#------------------------------------------------------------------------------------------------------
n <- 100
heffM <- 0.6 # effect of H on M
heffCS <- 0.0 # effect of H on Clutch size
meffCS <- 0.6 # effect of M on Clutch size
SigmaM <- matrix(c(0.1,0.04,0.04,0.1),2,2)
meffm1 <- 0.6
meffm2 <- 0.7
SigmaH <- matrix(c(0.1,0.04,0.04,0.1),2,2)
meffh1 <- 0.6
meffh2 <- -0.7
# Latente Variablen
H <- rnorm(n, 0, 1)
M <- rnorm(n, heffM * H, 0.1)
# Clutch size
CS <- rnorm(n, heffCS * H + meffCS * M, 0.1)
# Indicators
eM <- cbind(meffm1 * M, meffm2 * M)
datM <- matrix(NA, ncol = 2, nrow = n)
eH <- cbind(meffh1 * H, meffh2 * H)
datH <- matrix(NA, ncol = 2, nrow = n)
for(i in 1:n) {
datM[i,] <- mvrnorm(1, eM[i,], SigmaM)
datH[i,] <- mvrnorm(1, eH[i,], SigmaH)
}
#------------------------------------------------------------------------------
# JAGS Model
#------------------------------------------------------------------------------
dat <- list(datM = datM, datH = datH, n = n, CS = CS, #H = H, M = M,
S3 = matrix(c(1,0,0,1),nrow=2)/1)
# Function to create initial values
inits <- function() {
list(
meffh = runif(2, 0, 0.1),
meffm = runif(2, 0, 0.1),
heffM = runif(1, 0, 0.1),
heffCS = runif(1, 0, 0.1),
meffCS = runif(1, 0, 0.1),
tauCS = runif(1, 0.1, 0.3),
tauMH = runif(1, 0.1, 0.3),
tauH = rwish(3,matrix(c(.02,0,0,.04),nrow=2)),
tauM = rwish(3,matrix(c(.02,0,0,.04),nrow=2))
# M = as.numeric(rep(0, n))
)
}
t.n.thin <- 50
t.n.chains <- 2
t.n.burnin <- 20000
t.n.iter <- 50000
# Run JAGS
jagres <- jags.model('JAGS/BUGSmod1.R',data = dat, n.chains = t.n.chains, inits = inits, n.adapt = t.n.burnin)
params <- c("meffh", "meffm", "heffM", "heffCS", "meffCS")
mod <- coda.samples(jagres, params, n.iter=t.n.iter, thin=t.n.thin)
res <- round(data.frame(summary(mod)$quantiles[, c(3, 1, 5)]), 3)
res$TRUEVALUE <- c(heffCS, heffM, meffCS, meffh1, meffh2, meffm1, meffm2)
res
# Traceplots
post <- data.frame(rbind(mod[[1]], mod[[2]]))
names(post) <- dimnames(mod[[1]])[[2]]
par(mfrow = c(3,3))
param <- c("meffh[1]", "meffh[2]", "meffm[1]", "meffm[2]", "heffM", "heffCS", "meffCS")
traceplot(mod[, match(param, names(post))])
```
<file_sep>
# Generalized linear mixed models {#glmm}
## Introduction
THIS CHAPTER IS UNDER CONSTRUCTION!!!
<!-- Steffis draft version, started 17.11.2021, fk worked on it 15.11.2022, svf revised it 22.11.2022-->
In chapter \@ref(lmer) on linear mixed effect models we have introduced how to analyze metric outcome variables for which a normal error distribution can be assumed (potentially after transformation), when the data have a hierarchical structure and, as a consequence, observations are not independent.
In chapter \@ref(glm) on generalized linear models we have introduced how to analyze outcome variables for which a normal error distribution can not be assumed, as for example binary outcomes or count data. More precisely, we have extended modelling outcomes with normal error to modelling outcomes with error distributions from the exponential family (e.g., binomial or Poisson).
Generalized linear mixed models (GLMM) combine the two complexities and are used to analyze outcomes with a non-normal error distribution when the data have a hierarchical structure. In this chapter, we will show how to analyze such data. Remember, a hierarchical structure of the data means that the data are collected at different levels, for example smaller and larger spatial units, or include repeated measurements in time on a specific subject. Typically, the outcome variable is measured/observed at the lowest level but other variables may be measured at different levels. A first example is introduced in the next section.
### Binomial Mixed Model
#### Background
<!-- text from old book, slightly modified
https://bookdown.org/yihui/rmarkdown-cookbook/bibliography.html
Items can be cited directly within the documentation using the syntax @key where key is the citation key in the first line of the entry, e.g., @R-base. To put citations in parentheses, use [@key]. To cite multiple entries, separate the keys by semicolons, e.g., [@key-1; @key-2; @key-3]. To suppress the mention of the author, add a minus sign before @, e.g., [-@R-base].
-->
To illustrate the binomial mixed model we use a subset of a data set used by @Gruebler2010 on barn swallow *Hirundo rustica* nestling survival (we selected a nonrandom sample to be able to fit a simple model; hence, the results do not add unbiased knowledge about the swallow biology!). For 63 swallow broods, we know the clutch size and the number of the nestlings that
fledged. The broods came from 51 farms (larger unit), thus some of the farms had more than one brood. Note that each farm can harbor one or several broods, and the broods are nested within farms (as opposed to crossed, see chapter \@ref(lmer)), i.e., each brood belongs to only one farm. There are three predictors measured at the level of the farm: colony size (the number of swallow broods on that farm), cow (whether there are cows on the farm or not), and dung heap (the number of dung heaps, piles of cow dung, within 500 m of the farm).
The aim was to assess how swallows profit from insects that are attracted by livestock on the farm and by dung heaps. Broods from the same farm are not independent of each other because they belong to the same larger unit (farm), and thus share the characteristics of the farm (measured or unmeasured). Predictor variables were measured at the level of the farm, and are thus the same for all broods from a farm. In the model described and fitted below, we account for the non-independence of these clutches when building the model by including a random intercept per farm to model random variation between farms.
<!-- we could also add a random slope model later on -->
The outcome variable is a proportion (proportion fledged from clutch) and thus consists of two values for each observation, as seen with the binomial model without random factors (Section \@ref(glm).2.2):
<!-- add correct chapter reference for GLM model -->
the number of chicks that fledged (successes) and the number of chicks that died (failures), i.e., the clutch size minus number that fledged.
The random factor "farm" adds a farm-specific deviation $b_g$ to the intercept in the linear predictor. These deviations are modeled as normally distributed with mean $0$ and standard deviation $\sigma_g$.
$$
y_i \sim binomial\left(p_i, n_i\right)\\
logit\left(p_i\right) = \beta_0 + b_{g[i]} + \beta_1\;colonysize_i + \beta_2\;I\left(cow_i = 1\right) + \beta_3\;dungheap_i\\
b_g \sim normal\left(0, \sigma_g\right)
$$
<!-- You may refer to these equations using \@ref(eq:y_binom), etc., fk: das hat Rmarkdown nicht geschluckt, ich habe nun die $$ verwendet-->
<!-- fk: can we hide/delete the selected farms in the code below?
svf: komme nicht draus wo Du meinst.fk: ich habe die schon rausgestrichen, vorher waren alle Farm-Nummern in einem R-Code-Kommentar aufgelistet-->
```r
# Data on Barn Swallow (Hirundo rustica) nestling survival on farms
# (a part of the data published in Grüebler et al. 2010, J Appl Ecol 47:1340-1347)
library(blmeco)
data(swallowfarms)
#?swallowfarms # to see the documentation of the data set
dat <- swallowfarms
str(dat)
```
```
## 'data.frame': 63 obs. of 6 variables:
## $ farm : int 1001 1002 1002 1002 1004 1008 1008 1008 1010 1016 ...
## $ colsize: int 1 4 4 4 1 11 11 11 3 3 ...
## $ cow : int 1 1 1 1 1 1 1 1 0 1 ...
## $ dung : int 0 0 0 0 1 1 1 1 2 2 ...
## $ clutch : int 8 9 8 7 13 7 9 16 10 8 ...
## $ fledge : int 8 0 6 5 9 3 7 4 9 8 ...
```
```r
# check number of farms in the data set
length(unique(dat$farm))
```
```
## [1] 51
```
#### Fitting a Binomial Mixed Model in R
##### Using the glmer function
```r
dat$colsize.z <- scale(dat$colsize) # z-transform values for better model convergence
dat$dung.z <- scale(dat$dung)
dat$die <- dat$clutch - dat$fledge
dat$farm.f <- factor(dat$farm) # for clarity we define farm as a factor
```
The `glmer` function uses the standard way to formulate a statistical model in R, with the outcome on the left, followed by the `~` symbol, meaning "explained by", followed by the predictors, which are separated by `+`. The notation for the random factor with only a random intercept was introduced in chapter \@ref(lmer) and is `(1|farm.f)` here.
Remember that for fitting a binomial model we have to provide the number of successful events (number of fledglings that survived) and the number of failures (those that died) within a two-column matrix that we create using the function `cbind`.
```r
# fit GLMM using glmer function from lme4 package
library(lme4)
mod.glmer <- glmer(cbind(fledge,die) ~ colsize.z + cow + dung.z + (1|farm.f) ,
data=dat,
family=binomial)
```
##### Assessing Model Assumptions for the glmer fit
<!--fk: Reference to Figures and Tables. \@ref(type:name)-->
The residuals of the model look fairly normal (top left panel of Figure \@ref(fig:assessmodelassumptions) with slightly wider tails. The random intercepts for the farms look perfectly normal as they should. The plot of the residuals vs. fitted values (bottom left panel) shows a slight increase in the residuals with increasing fitted values. Positive correlations between the residuals and the fitted values are common in mixed models due to the shrinkage effect (chapter \@ref(lmer)). Due to the same reason the fitted proportions slightly overestimate the observed proportions when these are large, but underestimate them when small (bottom right panel). What is looking like a lack of fit here can be seen as preventing an overestimation of the among farm variance based on the assumption that the farms in the data are a random sample of farms belonging to the same population.
<!--fk: maybe checking the mean of the random effects is no longer needed in recent R versions? we may check that and if it is no longer needed, delete here-->
The mean of the random effects is close to zero as it should. We check that because sometimes the `glmer` function fails to correctly separate the farm-specific intercepts from the overall intercept. A non-zero mean of random effects does not mean a lack of fit, but a failure of the model fitting algorithm. In such a case, we recommend using a different fitting algorithm, e.g. `brm` (see below) or `stan_glmer` from the `rstanarm` package.
A slight overdispersion (approximated dispersion parameter >1) seems to be present, but nothing to worry about.
```r
par(mfrow=c(2,2), mar=c(3,5,1,1))
# check normal distribution of residuals
qqnorm(resid(mod.glmer), main="qq-plot residuals")
qqline(resid(mod.glmer))
# check normal distribution of random intercepts
qqnorm(ranef(mod.glmer)$farm.f[,1], main="qq-plot, farm")
qqline(ranef(mod.glmer)$farm.f[,1])
# residuals vs fitted values to check homoscedasticity
plot(fitted(mod.glmer), resid(mod.glmer))
abline(h=0)
# plot data vs. predicted values
dat$fitted <- fitted(mod.glmer)
plot(dat$fitted,dat$fledge/dat$clutch)
abline(0,1)
```
<div class="figure">
<img src="2.07-glmm_files/figure-html/assessmodelassumptions-1.png" alt="Diagnostic plots to assess model assumptions for mod.glmer. Uppper left: quantile-quantile plot of the residuals vs. theoretical quantiles of the normal distribution. Upper rihgt: quantile-quantile plot of the random effects "farm". Lower left: residuals vs. fitted values. Lower right: observed vs. fitted values." width="576" />
<p class="caption">(\#fig:assessmodelassumptions)Diagnostic plots to assess model assumptions for mod.glmer. Uppper left: quantile-quantile plot of the residuals vs. theoretical quantiles of the normal distribution. Upper rihgt: quantile-quantile plot of the random effects "farm". Lower left: residuals vs. fitted values. Lower right: observed vs. fitted values.</p>
</div>
```r
# check distribution of random effects
mean(ranef(mod.glmer)$farm.f[,1])
```
```
## [1] -0.001690303
```
```r
# check for overdispersion
dispersion_glmer(mod.glmer)
```
```
## [1] 1.192931
```
```r
detach(package:lme4)
```
##### Using the brm function
Now we fit the same model using the function `brm` from the R package `brms`. This function allows fitting Bayesian generalized (non-)linear multivariate multilevel models using Stan [@Betancourt2013_b] for full Bayesian inference. We shortly introduce the fitting algorithm used by Stan, Hamiltonian Monte Carlo, in chapter \@ref(stan). When using the function `brm` there is no need to install `rstan` or write the model in Stan-language. A wide range of distributions and link functions are supported, and the function offers many things more. Here we use it to fit the model as specified by the formula object above.
Note that brm requires that a binomial outcome is specified in the format `successes|trials()`, which is the number of fledged nestlings out of the total clutch size in our case. In contrast, the `glmer` function required to specify the number of nestlings that fledged and died (which together sum up to clutch size), in the format `cbind(successes, failures)`.
The family is also called `binomial` in `brm`, but would be `bernoulli` for a binary outcome, whereas `glmer` would use binomial in both situations (Bernoulli distribution is a special case of the binomial). However, it is slightly confusing that (at the time of writing this chapter) the documentation for `brmsfamily` did not mention the binomial family under Usage, where it probably went missing, but it is mentioned under Arguments for the argument family.
Prior distributions are an integral part of a Bayesian model, therefore we need to specify prior distributions. We can see what default prior distributions `brm` is using by applying the `get_prior` function to the model formula. The default prior for the effect sizes is a flat prior which gives a density of 1 for any value between minus and plus infinity. Because this is not a proper probability distribution it is also called an improper distribution. The intercept gets a t-distribution with mean of 0, standard deviation of 2.5 and 3 degrees of freedoms. Transforming this t-distribution to the proportion scale (using the inverse-logit function) becomes something similar to a uniform distribution between 0 and 1 that can be seen as non-informative for a probability. For the among-farm standard deviation, it uses the same t-distribution as for the intercept. However, because variance parameters such as standard deviations only can take on positive numbers, it will use only the positive half of the t-distribution (this is not seen in the output of `get_prior`). When we have no prior information on any parameter, or if we would like to base the results solely on the information in the data, we specify weakly informative prior distributions that do not noticeably affect the results but they will facilitate the fitting algorithm. This is true for the priors of the intercept and among-farm standard deviation. However, for the effect sizes, we prefer specifying more narrow distributions (see chapter \@ref(priors)). To do so, we use the function `prior`.
<!-- Question to fk: but the priors chosen below seem to be quite informative! Why do you prefer these over the default priors. answer of fk: normal(0,5) means that prior to looking at the data we give effect sizes of lower than -10 or larger than +10 low probability. An effect size of +/-10 in the logit-scale changes the probability from close to 0 to close to 1 or viceversa. This is, in my opinion essentially non-informative. I fitted the model with normal(0,5) and with the default flat priors and I I do not think that the estimated effects differ markedly. with normal(0,5): 0.40 (-0.06 - 0.88) vs. with the default flat priors: 0.41 (-0.10 - 0.94). very flat priors also contain information, i.e. that effect sizes of -1000 are equally plausible as effect sizes around 0, a statement that we would not support even prior to looking at the data.-->
To apply MCMC sampling we need some more arguments: `warmup` specifies the number of iterations during which we allow the algorithm to be adapted to our specific model and to converge to the posterior distribution. These iterations should be discarded (similar to the burn-in period when using, e.g., Gibbs sampling); `iter` specifies the total number of iterations (including those discarded); `chains` specifies the number of chains; `init` specifies the starting values of the iterations. By default (`init=NULL`) or by setting `init="random"` the initial values are randomly chosen which is recommended because then different initial values are chosen for each chain which helps to identify non-convergence. However, sometimes random initial values cause the Markov chains to behave badly.
<!-- Question to fk: you write default is random, but default is init=NULL. I adapted the sentence above. But Iis NULL and random for the inits exactly the same? why are there two options then? fk: in R version 4.0 the default is "random" and in R version 4.2. the default is NULL. looks like they changed most of the defaults to be specified with NULL, but what NULL means is what was the default before.... Maybe this will change in future again -->
Then you can either use the maximum likelihood estimates of the parameters as starting values, or simply ask the algorithm to start with zeros. `thin` specifies the thinning of the chain, i.e., whether all iterations should be kept (thin=1) or for example every 4th only (thin=4); `cores` specifies the number of cores used for the algorithm; `seed` specifies the random seed, allowing for replication of results.
```r
library(brms)
# check which parameters need a prior
get_prior(fledge|trials(clutch) ~ colsize.z + cow + dung.z + (1|farm.f),
data=dat,
family=binomial(link="logit"))
```
```
## prior class coef group resp dpar nlpar lb ub
## (flat) b
## (flat) b colsize.z
## (flat) b cow
## (flat) b dung.z
## student_t(3, 0, 2.5) Intercept
## student_t(3, 0, 2.5) sd 0
## student_t(3, 0, 2.5) sd farm.f 0
## student_t(3, 0, 2.5) sd Intercept farm.f 0
## source
## default
## (vectorized)
## (vectorized)
## (vectorized)
## default
## default
## (vectorized)
## (vectorized)
```
```r
# specify own priors
myprior <- prior(normal(0,5), class="b")
mod.brm <- brm(fledge|trials(clutch) ~ colsize.z + cow + dung.z + (1|farm.f) ,
data=dat, family=binomial(link="logit"),
prior=myprior,
warmup = 500,
iter = 2000,
chains = 2,
init = "random",
cores = 2,
seed = 123)
# note: thin=1 is default and we did not change this here.
```
<!-- Question to fk: I pasted this part of the code to this separate chunk. Do we need it and should I explain it?
and why did you exclude the random intercept? Or did I do this by mistake?, fk: only in this code we see that the prior for the variance parameter is restricted to positive values. I would not show the code, otherwise we have to explain the Stan code. It is a good idea to have it in a junk that is not shown.-->
##### Checking model convergence for the brm fit
We first check whether we find warnings in the R console about problems of the fitting algorithm. Warnings should be taken seriously. Often, we find help in the Stan online documentation (or when typing `launch_shinystan(mod.brm)` into the R-console) what to change when calling the `brm` function to get a fit that is running smoothly. Once, we get rid of all warnings, we need to check how well the Markov chains mixed. We can either do that by scanning through the many diagnostic plots given by `launch_shinystan(mod)` or create the most important plots ourselves such as the traceplot (Figure \@ref(fig:checkconvergencemodelbrm)).
```r
par(mar=c(2,2,2,2))
mcmc_plot(mod.brm, type = "trace")
```
<div class="figure">
<img src="2.07-glmm_files/figure-html/checkconvergencemodelbrm-1.png" alt="Traceplot of the Markov chains. After convergence, both Markov chains should sample from the same stationary distribution. Indications of non-convergence would be, if the two chains diverge or vary around different means." width="672" />
<p class="caption">(\#fig:checkconvergencemodelbrm)Traceplot of the Markov chains. After convergence, both Markov chains should sample from the same stationary distribution. Indications of non-convergence would be, if the two chains diverge or vary around different means.</p>
</div>
<!--siehe Kapitel 5.3 hier: https://www.rensvandeschoot.com/tutorials/generalised-linear-models-with-brms/, fk: dieses Buch ist auch noch under construction. Ich würde glaubs nur zu fertigen Büchern einen Link anfügen.-->
##### Checking model fit by posterior predictive model checking
To assess how well the model fits to the data we do posterior predictive model checking (Chapter \@ref(modelchecking)). For binomial as well as for Poisson models comparing the standard deviation of the data with those of replicated data from the model is particularly important. If the standard deviation of the real data would be much higher compared to the ones of the replicated data from the model, overdispersion would be an issue. However, here, the model is able to capture the variance in the data correctly (Figure \@ref(fig:ppbinomial)).
The fitted vs observed plot also shows a good fit.
```r
yrep <- posterior_predict(mod.brm)
sdyrep <- apply(yrep, 1, sd)
par(mfrow=c(1,3), mar=c(3,4,1,1))
hist(yrep, freq=FALSE, main=NA, xlab="Number of fledglings")
hist(dat$fledge, add=TRUE, col=rgb(1,0,0,0.3), freq=FALSE)
legend(10, 0.15, fill=c("grey",rgb(1,0,0,0.3)), legend=c("yrep", "y"))
hist(sdyrep)
abline(v=sd(dat$fledge), col="red", lwd=2)
plot(fitted(mod.brm)[,1], dat$fledge, pch=16, cex=0.6)
abline(0,1)
```
<div class="figure">
<img src="2.07-glmm_files/figure-html/ppbinomial-1.png" alt="Posterior predictive model checking: Histogram of the number of fledglings simulated from the model together with a histogram of the real data, and a histogram of the standard deviations of replicated data from the model together with the standard deviation of the data (vertical line in red). The third plot gives the fitted vs. observed values." width="672" />
<p class="caption">(\#fig:ppbinomial)Posterior predictive model checking: Histogram of the number of fledglings simulated from the model together with a histogram of the real data, and a histogram of the standard deviations of replicated data from the model together with the standard deviation of the data (vertical line in red). The third plot gives the fitted vs. observed values.</p>
</div>
After checking the diagnostic plots, the posterior predictive model checking and the general model fit, we assume that the model describes the data generating process reasonably well, so that we can proceed to drawing conclusions.
#### Drawing Conclusions
The generic `summary` function gives us the results for the model object containing the fitted model, and works for both the model fitted with `glmer` and `brm`. Let's start having a look at the summary from `mod.glmer`.
The summary provides the fitting method, the model formula, statistics for the model fit including the Akaike information criterion (AIC), the Bayesian information criterion (BIC), the scaled residuals, the random effects variance and information about observations and groups, a table with coefficient estimates for the fixed effects (with standard errors and a z-test for the coefficient) and correlations between fixed effects. We recommend to always check if the number of observations and groups, i.e., 63 barn swallow nests from 51 farms here, is correct. This information shows if the `glmer` function has correctly recognized the hierarchical structure in the data. Here, this is correct. To assess the associations between the predictor variables and the outcome analyzed, we need to look at the column "Estimate" in the table of fixed effects. This column contains the estimated model coefficients, and the standard error for these estimates is given in the column "Std. Error", along with a z-test for the null hypothesis of a coefficient of zero.
In the random effects table, the among farm variance and standard deviation (square root of the variance) are given.
The function `confint` shows the 95% confidence intervals for the random effects (`.sig01`) and fixed effects estimates.
In the `summary` output from `mod.brm` we see the model formula and some information on the Markov chains after the warm-up. In the group-level effects (between group standard deviations) and population-level effects (effect sizes, model coefficients) tables some summary statistics of the posterior distribution of each parameter are given. The "Estimate" is the mean of the posterior distribution, the "Est.Error" is the standard deviation of the posterior distribution (which is the standard error of the parameter estimate). Then we see the lower and upper limit of the 95% credible interval. Also, some statistics for measuring how well the Markov chains converged are given: the "Rhat" and the effective sample size (ESS). The bulk ESS tells us how many independent samples we have to describe the posterior distribution, and the tail ESS tells us on how many samples the limits of the 95% credible interval is based on.
Because we used the logit link function, the coefficients are actually on the logit scale and are a bit difficult to interpret. What we can say is that positive coefficients indicate an increase and negative coefficients indicate a decrease in the proportion of nestlings fledged. For continuous predictors, as colsize.z and dung.z, this coefficient refers to the change in the logit of the outcome with a change of one in the predictor (e.g., for colsize.z an increase of one corresponds to an increase of a standard deviation of colsize). For categorical predictors, the coefficients represent a difference between one category and another (reference category is the one not shown in the table).
To visualize the coefficients we could draw effect plots.
```r
# glmer
summary(mod.glmer)
```
```
## Generalized linear mixed model fit by maximum likelihood (Laplace
## Approximation) [glmerMod]
## Family: binomial ( logit )
## Formula: cbind(fledge, die) ~ colsize.z + cow + dung.z + (1 | farm.f)
## Data: dat
##
## AIC BIC logLik deviance df.resid
## 282.5 293.2 -136.3 272.5 58
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -3.2071 -0.4868 0.0812 0.6210 1.8905
##
## Random effects:
## Groups Name Variance Std.Dev.
## farm.f (Intercept) 0.2058 0.4536
## Number of obs: 63, groups: farm.f, 51
##
## Fixed effects:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -0.09533 0.19068 -0.500 0.6171
## colsize.z 0.05087 0.11735 0.434 0.6646
## cow 0.39370 0.22692 1.735 0.0827 .
## dung.z -0.14236 0.10862 -1.311 0.1900
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Correlation of Fixed Effects:
## (Intr) clsz.z cow
## colsize.z 0.129
## cow -0.828 -0.075
## dung.z 0.033 0.139 -0.091
```
```r
confint.95 <- confint(mod.glmer); confint.95
```
```
## 2.5 % 97.5 %
## .sig01 0.16809483 0.7385238
## (Intercept) -0.48398346 0.2863200
## colsize.z -0.18428769 0.2950063
## cow -0.05360035 0.8588134
## dung.z -0.36296714 0.0733620
```
```r
# brm
summary(mod.brm)
```
```
## Family: binomial
## Links: mu = logit
## Formula: fledge | trials(clutch) ~ colsize.z + cow + dung.z + (1 | farm.f)
## Data: dat (Number of observations: 63)
## Draws: 2 chains, each with iter = 2000; warmup = 500; thin = 1;
## total post-warmup draws = 3000
##
## Group-Level Effects:
## ~farm.f (Number of levels: 51)
## Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## sd(Intercept) 0.55 0.15 0.26 0.88 1.00 858 1513
##
## Population-Level Effects:
## Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## Intercept -0.09 0.22 -0.52 0.34 1.00 3151 2341
## colsize.z 0.05 0.13 -0.21 0.30 1.00 2951 2043
## cow 0.40 0.25 -0.09 0.89 1.00 3269 2245
## dung.z -0.15 0.12 -0.39 0.08 1.00 2882 2257
##
## Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
## and Tail_ESS are effective sample size measures, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).
```
From the results we conclude that in farms without cows (when cow=0) and for average colony sizes (when colsize.z=0) and average number of dung heaps (when dung.z=0) the average nestling survival of Barn swallows is the inverse-logit function of the Intercept, thus, `plogis(`-0.09`)` = 0.48 with a 95% uncertainty interval of 0.37 - 0.58. We further see that colony size and number of dung heaps are less important than whether cows are present or not. Their estimated partial effect is small and their uncertainty interval includes only values close to zero. However, whether cows are present or not may be important for the survival of nestlings. The average nestling survival in farms with cows is `plogis(`-0.09` +`0.4` )` = 0.58. For getting the uncertainty interval of this survival estimate, we need to do the calculation for every simulation from the posterior distribution of both parameters.
```r
bsim <- posterior_samples(mod.brm)
# survival of nestlings on farms with cows:
survivalest <- plogis(bsim$b_Intercept + bsim$b_cow)
quantile(survivalest, probs=c(0.025, 0.975)) # 95% uncertainty interval
```
```
## 2.5% 97.5%
## 0.5078202 0.6400263
```
In medical research, it is standard to report the fixed-effects coefficients from GLMM with binomial or Bernoulli error as odds ratios by taking the exponent (R function `exp` for $e^{()}$) of the coefficient on the logit-scale. For example, the coefficient for cow from `mod.glmer`, 0.39 (95% CI from -0.05 to -0.05), represents an odds ratio of exp(
0.39)=1.48 (95% CI from 0.95 to 0.95). This means that the odds for fledging (vs. not fledging) from a clutch from a farm with livestock present is about 1.5 times larger than the odds for fledging if no livestock is present (relative effect).
## Summary
<file_sep># Visualizations {#figures}
## Short Checklist for figures
1. The figure should represent the answer to the study question. Often, the classical types of plots such as a scatterplot, a bar plot, or an effects plot are sufficient. However, in many cases, adding a little bit of creativity can greatly
improve readability or the message of the figure.
2.Label the x- and y-axes. Make sure that the units are indicated either within
the title or in the figure legend.
3. Starty-axisatzeroifthereferencetozeroisimportantfortheinterpretationof
effects. The argument ylim[c(0, max(dat$y)) in R is used for this
purpose.
4. Scale the axes so that all data are shown. Make sure that sample size is
indicated either in the figure or in the legend (or, at least, easy to find in the
text).
5. Use interpretable units. That means, if the variable on the x-axis has been
z-transformed to fit the model, back-transform the effects to the original scale.
6. Give the raw data whenever possible. Sometimes, a significant effect cannot be seen in the raw data because so many other variables have an influence on the outcome. Then, you may prefer showing the effect only (e.g., a regression line with a credible interval) and give the residual standard deviation in the figure legend. Even then, we think it is important to show the raw data graphically somewhere else in the paper or in the supplementary material. A scatterplot of the data can contain structures that are lost in summary statistics.
7. Draw the figures as simply as possible. Avoid 3D graphics. Delete all unnecessary elements.
8. Reduce the number of different colors to a minimum necessary. A color scale from orange to blue gives a gray scale in a black-and-white print. `colorRampPalette(c("orange", "blue"))(5)` produces five colors on a scale from orange to blue. Remember that around 8% of the northern European male population have difficulties distinguishing red from green but it is easier for them to distinguish orange from blue.
## Further reading {-}
- [Data Visualization. A practical introduction](https://socviz.co): A practical introduction to data visulization in R. Good data visualizations also make it easier to communicate your ideas and findings to other people. Beyond that, producing effective plots from your own data is the best way to develop a good eye for reading and understanding graphs—good and bad—made by others.
- [Fundamentals of Data Visualization](https://serialmentor.com/dataviz/): A guide to making visualizations that accurately reflect the data, tell a story, and look professional. […] This is an online preview of the book “Fundamentals of Data Visualization” to be published with O’Reilly Media, Inc. Completed chapters will be posted here as they become available. The book is meant as a guide to making visualizations that accurately reflect the data, tell a story, and look professional. It has grown out of my experience of working with students and postdocs in my laboratory on thousands of data visualizations.
<file_sep># Prior distributions {#priors}
## Introduction
## How to choose a prior {#choosepriors}
> Tabelle von Fränzi (CourseIII_glm_glmmm/course2018/presentations_handouts/presentations)
## Prior sensitivity
xxx
<file_sep># (PART) ECOLOGICAL MODELS {-}
# Introduction to PART III {#PART-III}
<a href="" target="_blank"><img src="images/part_IIII.jpg" width="410" style="display: block; margin: auto;" /></a>
------
This part is a collection of more complicated ecological models to analyse data that may not be analysed with the traditional linear models that we covered in [PART I](#PART-I) of this book.
## Model notations
It is unavoidable that different authors use different notations for the same thing, or that the same notation is used for different things. We try to use, whenever possible, notations that is commonly used at the [International Statistical Ecology Congress ISEC](www.isec2018.org/home). Resulting from an earlier ISEC, @Thomson2009 give guidelines on what letter should be used for which parameter in order to achieve a standard notation at least among people working with classical mark-recapture models. However, the alphabet has fewer letters compared to the number of ecological parameters. Therefore, the same letter cannot stand for the same parameter across all papers, books and chapters. Here, we try to use the same letter for the same parameter within the same chapter.
<file_sep>
# Linear Mixed Effect Models{#lmer}
<a href="" target="_blank"><img src="images/himmelsherold.jpg" width="640" style="display: block; margin: auto;" /></a>
------
## Background
### Why Mixed Effects Models?
Mixed effects models (or hierarchical models @Gelman2007 for a discussion on the terminology) are used to analyze nonindependent, grouped, or hierarchical data. For example, when we measure growth rates of nestlings in different nests by taking mass measurements of each nestling several times during the nestling phase, the measurements are grouped within nestlings (because there are repeated measurements of each) and the nestlings are grouped within nests. Measurements from the same individual are likely to be more similar than measurements from different individuals, and individuals from the same nest are likely to be more similar than nestlings from different nests. Measurements of the same group (here, the “groups” are individuals or nests) are not independent. If the grouping structure of the data is ignored in the model, the residuals do not fulfill the independence assumption.
Further, predictor variables can be measured on different hierarchical levels. For example, in each nest some nestlings were treated with a hormone implant whereas others received a placebo. Thus, the treatment is measured at the level of the individual, while clutch size is measured at the level of the nest. Clutch size was measured only once per nest but entered in the data file more than once (namely for each individual from the same nest). Repeated measure results in pseudoreplication if we do not account for the hierarchical data structure in the model. Mixed models allow modeling of the hierarchical structure of the data and, therefore, account for pseudoreplication.
Mixed models are further used to analyze variance components. For example, when the nestlings were cross-fostered so that they were not raised by their genetic parents, we would like to estimate the proportions of the variance (in a measurement, e.g., wing length) that can be assigned to genetic versus to environmental differences.
The three problems, grouped data, repeated measure and interest in variances are solved by adding further variance parameters to the model. As a result, the linear predictor of such models contain parameters that are fixed and parameters that vary among levels of a grouping variable. The latter are called "random effects". Thus, a mixed model contains fixed and random effects. Often the grouping variable, which is a categorical variable, i.e., a factor, is called the random effect, even though it is not the factor that is random. The levels of the factor are seen as a random sample from a bigger population of levels, and a distribution, usually the normal distribution, is fitted to the level-specific parameter values. Thus, a random effect in a model can be seen as a model (for a parameter) that is nested within the model for the data.
Predictors that are defined as fixed effects are either numeric or, if they are categorical, they have a finite (“fixed”) number of levels. For example, the factor “treatment” in the Barn owl study below has exactly two levels "placebo" and "corticosterone" and nothing more. In contrast, random effects have a theoretically infinite number of levels of which we have measured a random sample. For example, we have measured 10 nests, but there are many more nests in the world that we have not measured. Normally, fixed effects have a low number of levels whereas random effects have a large number of levels (at least 3!). For fixed effects we are interested in the specific differences between levels (e.g., between males and females), whereas for random effects we are only interested in the between-level (between-group, e.g., between-nest) variance rather than in differences between specific levels (e.g., nest A versus nest B).
Typical fixed effects are: treatment, sex, age classes, or season. Typical random effects are: nest, individual, field, school, or study plot. It depends sometimes on the aim of the study whether a factor should be treated as fixed or random. When we would like to compare the average size of a corn cob between specific regions, then we include region as a fixed factor. However, when we would like to know how the size of a corn cob is related to the irrigation system and we have several measurements within each of a sample of regions, then we treat region as a random factor.
### Random Factors and Partial Pooling
In a model with fixed factors, the differences of the group means to the mean of the reference group are separately estimated as model parameters. This produces $k-1$ (independent) model parameters, where $k$ is the number of groups (or number of factor levels). In contrast, for a random factor, the between-group variance is estimated and the $k$ group-specific means are assumed to be normally distributed around the population mean. These $k$ means are thus not independent. We usually call the differences between the specific mean of group $g$ and the mean of all groups $b_g$. They are assumed to be realizations of the same (in most cases normal) distribution with a mean of zero. They are like residuals. The variance of the $b_g$ values is the among-group variance.
Treating a factor as a random factor is equivalent to partial pooling of the data. There are three different ways to obtain means for grouped data. First, the grouping structure of the data can be ignored. This is called complete pooling (left panel in Figure \@ref(fig:pooling)).
Second, group means may be estimated separately for each group. In this case, the data from all other groups are ignored when estimating a group mean. No pooling occurs in this case (right panel in Figure \@ref(fig:pooling)).
Third, the data of the different groups can be partially pooled (i.e., treated as a random effect). Thereby, the group means are weighted averages of the population mean and the unpooled group means. The weights are proportional to sample size and the inverse of the variance (see @Gelman2007, p. 252). Further, the estimated mean of all group equals the mean of the group specific means, thus, every group is weighed similarly for calculating the overall mean. In contrast, in the complete pooling case, the groups get weights proportional to their sample sizes.
Complete pooling | Partial pooling | No pooling |
:-------------------|:----------------------|:------------------|
$\hat{y_i} = \beta_0$ \ $y_i \sim normal(\hat{y_i}, \sigma^2)$ | $\hat{y_i} = \beta_0 + b_{g[i]}$ \ $b_g \sim normal(0, \sigma_b^2)$ \ $y_i \sim normal(\hat{y_i}, \sigma^2)$ | $\hat{y_i} = \beta_{0[g[i]]}$ \ $y_i \sim normal(\hat{y_i}, \sigma_g^2)$ |
<div class="figure">
<img src="2.05-lmer_files/figure-html/pooling-1.png" alt="Three possibilities to obtain group means for grouped data: complete pooling, partial pooling, and no pooling. Open symbols = data, orange dots with vertical bars = group means with 95% uncertainty intervals, horizontal black line with shaded interval = population mean with 95% uncertainty interval." width="672" />
<p class="caption">(\#fig:pooling)Three possibilities to obtain group means for grouped data: complete pooling, partial pooling, and no pooling. Open symbols = data, orange dots with vertical bars = group means with 95% uncertainty intervals, horizontal black line with shaded interval = population mean with 95% uncertainty interval.</p>
</div>
What is the advantage of analyses using partial pooling (i.e., mixed, hierarchical, or multilevel modelling) compared to the complete or no pooling analyses? Complete pooling ignores the grouping structure of the data. As a result, the uncertainty interval of the population mean may be too narrow. We are too confident in the result because we assume that all observations are independent when they are not. This is a typical case of pseudoreplication. On the other hand, the no pooling method (which is equivalent to treating the factor as fixed) has the danger of overestimation of the among-group variance because the group means are estimated independently of each other. The danger of overestimating the among-group variance is particularly large when sample sizes per group are low and within-group variance large. In contrast, the partial pooling method assumes that the group means are a random sample from a common distribution. Therefore, information is exchanged between groups. Estimated means for groups with low sample sizes, large variances, and means far away from the population mean are shrunk towards the population mean. Thus, group means that are estimated with a lot of imprecision (because of low sample size and high variance) are shrunk towards the population mean. How strongly they are shrunk depends on the precision of the estimates for the group specific means and the population mean.
An example will help make this clear. Imagine that we measured 60 nestling birds from 10 nests (6 nestlings per nest) and found that the average nestling mass at day 10 was around 20 g with a among-nest standard deviation of 2 g. Then, we measure only one nestling from one additional nest (from the same population) whose mass was 12 g. What do we know about the average mass of this new nest? The mean of the measurements for this nest is 12 g, but with n = 1 uncertainty is high. Because we know that the average mass of the other nests was 20 g, and because the new nest belonged to the same population, a value higher than 12 g is a better estimate for an average nestling mass of the new nest than the 12 g measurement of one single nestling, which could, by chance, have been an exceptionally light individual. This is the shrinkage that partial pooling allows in a mixed model. Because of this shrinkage, the estimates for group means from a mixed model are sometimes called shrinkage estimators. A consequence of the shrinkage is that the residuals are positively correlated with the fitted values.
To summarize, mixed models are used to appropriately estimate among-group variance, and to account for non-independency among data points.
## Fitting a normal linear mixed model in R
To introduce the linear mixed model, we use repeated hormone measures at nestling Barn Owls *Tyto alba*. The cortbowl data set contains stress hormone data (corticosterone, variable ‘totCort’) of nestling Barn owls which were either treated with a corticosterone-implant, or with a placebo-implant as the control group. The aim of the study was to quantify the corticosterone increase due to the corticosterone implants [@Almasi.2009]. In each brood, one or two nestlings were implanted with a corticosterone-implant and one or two nestlings with a placebo-implant (variable ‘Implant’). Blood samples were taken just before implantation, and at days 2 and 20 after implantation.
```r
data(cortbowl)
dat <- cortbowl
dat$days <- factor(dat$days, levels=c("before", "2", "20"))
str(dat) # the data was sampled in 2004,2005, and 2005 by the Swiss Ornithologicla Institute
```
```
## 'data.frame': 287 obs. of 6 variables:
## $ Brood : Factor w/ 54 levels "231","232","233",..: 7 7 7 7 8 8 9 9 10 10 ...
## $ Ring : Factor w/ 151 levels "898054","898055",..: 44 45 45 46 31 32 9 9 18 19 ...
## $ Implant: Factor w/ 2 levels "C","P": 2 2 2 1 2 1 1 1 2 1 ...
## $ Age : int 49 29 47 25 57 28 35 53 35 31 ...
## $ days : Factor w/ 3 levels "before","2","20": 3 2 3 2 3 1 2 3 2 2 ...
## $ totCort: num 5.76 8.42 8.05 25.74 8.04 ...
```
In total, there are 287 measurements of 151 individuals (variable ‘Ring’) of 54 broods. Because the measurements from the same individual are non-independent, we use a mixed model to analyze these data: Two additional arguments for a mixed model are: a) the mixed model allows prediction of corticosterone levels for an ‘average’ individual, whereas the fixed effect model allows prediction of corticosterone levels only for the 151 individuals that were sampled; and b) fewer parameters are needed. If we include individual as a fixed factor, we would use 150 parameters, while the random factor needs a much lower number of parameters.
We first create a graphic to show the development for each individual, separately for owls receiving corticosterone versus owls receiving a placebo (Figure \@ref(fig:corttest)).
<div class="figure">
<img src="2.05-lmer_files/figure-html/corttest-1.png" alt="Total corticosterone before and at day 2 and 20 after implantation of a corticosterone or a placebo implant. Lines connect measurements of the same individual." width="672" />
<p class="caption">(\#fig:corttest)Total corticosterone before and at day 2 and 20 after implantation of a corticosterone or a placebo implant. Lines connect measurements of the same individual.</p>
</div>
We fit a normal linear model with ‘Ring’ as a random factor, and ‘Implant’, ‘days’ and the interaction of ‘Implant’ $\times$ ‘days’ as fixed effects. Note that both ‘Implant’ and ‘days’ are defined as factors, thus R creates indicator variables for all levels except the reference level. Later, we will also include ‘Brood’ as a grouping level; for now, we ignore this level and start with a simpler (less perfect) model for illustrative purposes.
$\hat{y_i} = \beta_0 + b_{Ring[i]} + \beta_1I(days=2) + \beta_2I(days=20) + \beta_3I(Implant=P) + \beta_4I(days=2)I(Implant=P) + \beta_5I(days=20)I(Implant=P)$
$b_{Ring} \sim normal(0, \sigma_b)$
$y_i \sim normal(\hat{y_i}, \sigma)$
Several different functions to fit a mixed model have been written in R: `lme`, `gls`, `gee` have been the first ones. Then `lmer` followed, and now, `stan_lmer` and `brm` allow to fit a large variety of hierarchical models. We here start w `ith using lmer` from the package lme4 (which is automatically loaded to the R-console when loading arm), because it is a kind of basis function also for `stan_lmer`and `brm`. Further, `sim` can treat lmer-objects but none of the earlier ones.
The function `lmer` is used similarly to the function `lm`. The only difference is that the random factors are added in the model formula within parentheses. The ’1’ stands for the intercept and the ‘|’ means ‘grouped by’. ‘(1|Ring)’, therefore, adds the random deviations for each individual to the average intercept. These deviations are the b_{Ring} in the model formula above. Corticosterone data are log transformed to achieve normally distributed residuals.
After having fitted the model, in real life, we always first inspect the residuals, before we look at the model output. However, that is a dilemma for this text book. Here, we would like to explain how the model is constructed just after having shown the model code. Therefore, we do the residual analyses later, but in real life, we would do it now.
```r
mod <- lmer(log(totCort) ~ Implant + days + Implant:days + (1|Ring),
data=dat, REML=TRUE)
mod
```
```
## Linear mixed model fit by REML ['lmerMod']
## Formula: log(totCort) ~ Implant + days + Implant:days + (1 | Ring)
## Data: dat
## REML criterion at convergence: 611.9053
## Random effects:
## Groups Name Std.Dev.
## Ring (Intercept) 0.3384
## Residual 0.6134
## Number of obs: 287, groups: Ring, 151
## Fixed Effects:
## (Intercept) ImplantP days2 days20
## 1.91446 -0.08523 1.65307 0.26278
## ImplantP:days2 ImplantP:days20
## -1.71999 -0.09514
```
The output of the lmer-object tells us that the model was fitted using the REML-method, which is the restricted maximum likelihood method. The ‘REML criterion’ is the statistic describing the model fit for a model fitted by REML. The model output further contains the parameter estimates. These are grouped into a random effects and fixed effects section. The random effects section gives the estimates for the among-individual standard deviation of the intercept ($\sigma_{Ring} =$ 0.34) and the residual standard deviation ($\sigma =$ 0.61). The fixed effects section gives the estimates for the intercept ($\beta_0 =$ 1.91), which is the mean logarithm of corticosterone for an ‘average’ individual that received a corticosterone implant at the day of implantation. The other model coefficients are defined as follows: the difference in the logarithm of corticosterone between placebo- and corticosterone-treated individuals before implantation ($\beta_1 =$ -0.09), the difference between day 2 and before implantation for the corticosterone-treated individuals ($\beta_2 =$ 1.65), the difference between day 20 and before implantation for the corticosterone-treated individuals ($\beta_3 =$ 0.26), and the interaction parameters which tell us how the differences between day 2 and before implantation ($\beta_4 =$ -1.72), and day 20 and before implantation ($\beta_5 =$ -0.1), differ for the placebo-treated individuals compared to the corticosterone treated individuals.
Neither the model output shown above nor the summary function (not shown) give any information about the proportion of variance explained by the model such as an $R^2$. The reason is that it is not straightforward to obtain a measure of model fit in a mixed model, and different definitions of $R^2$ exist [@Nakagawa.2013].
The function `fixef` extracts the estimates for the fixed effects, the function `ranef` extracts the estimates for the random deviations from the population intercept for each individual. The `ranef`-object is a list with one element for each random factor in the model. We can extract the random effects for each ring using the `$Ring` notation.
```r
round(fixef(mod), 3)
```
```
## (Intercept) ImplantP days2 days20 ImplantP:days2
## 1.914 -0.085 1.653 0.263 -1.720
## ImplantP:days20
## -0.095
```
```r
head(ranef(mod)$Ring) # print the first 6 Ring effects
```
```
## (Intercept)
## 898054 0.24884979
## 898055 0.11845863
## 898057 -0.10788277
## 898058 0.06998959
## 898059 -0.08086498
## 898061 -0.08396839
```
## Restricted maximum likelihood estimation (REML)
<!-- we have not yet explained the ML-method. in the old book, chapter 5. we need to find a place for ML-method-->
For a mixed model the restricted maximum likelihood method is used by default instead of the maximum likelihood (ML) method. The reason is that the ML-method underestimates the variance parameters because this method assumes that the fixed parameters are known without uncertainty when estimating the variance parameters. However, the estimates of the fixed effects have uncertainty. The REML method uses a mathematical trick to make the estimates for the variance parameters independent of the estimates for the fixed effects. We recommend reading the very understandable description of the REML method in @Zuur.2009. For our purposes, the relevant difference between the two methods is that the ML-estimates are unbiased for the fixed effects but biased for the random effects, whereas the REML-estimates are biased for the fixed effects and unbiased for the random effects. However, when sample size is large compared to the number of model parameters, the differences between the ML- and REML-estimates become negligible. As a guideline, use REML if the interest is in the random effects (variance parameters), and ML if the interest is in the fixed effects. The estimation method can be chosen by setting the argument ‘REML’ to ‘FALSE’ (default is ‘TRUE’).
```r
mod <- lmer(log(totCort) ~ Implant + days + Implant:days + (1|Ring),
data=dat, REML=FALSE) # using ML
```
When we fit the model by `stan_lmer` from the rstanarm-package or `brm` from the brms-package, i.e., using the Bayes theorem instead of ML or REML, we do not have to care about this choice (of course!). The result from a Bayesian analyses is unbiased for all parameters (at least from a mathematical point of view - also parameters from a Bayesian model can be biased if the model violates assumptions or is confounded).
<file_sep># (PART) ECOLOGICAL MODELS {-}
# Introduction to PART III {#PART-III}
```{r fig.align='center', echo=FALSE, fig.link=''}
knitr::include_graphics('images/part_IIII.jpg', dpi = 150)
```
------
This part is a collection of more complicated ecological models to analyse data that may not be analysed with the traditional linear models that we covered in [PART I](#PART-I) of this book.
## Model notations
It is unavoidable that different authors use different notations for the same thing, or that the same notation is used for different things. We try to use, whenever possible, notations that is commonly used at the [International Statistical Ecology Congress ISEC](www.isec2018.org/home). Resulting from an earlier ISEC, @Thomson2009 give guidelines on what letter should be used for which parameter in order to achieve a standard notation at least among people working with classical mark-recapture models. However, the alphabet has fewer letters compared to the number of ecological parameters. Therefore, the same letter cannot stand for the same parameter across all papers, books and chapters. Here, we try to use the same letter for the same parameter within the same chapter.
<file_sep>
# Capture-mark recapture model with a mixture structure to account for missing sex-variable for parts of the individuals {#cjs_with_mix}
## Introduction
In some species the identification of the sex is not possible for all individuals without sampling DNA. For example, morphological dimorphism is absent or so weak that parts of the individuals cannot be assigned to one of the sexes. Particularly in ornithological long-term capture recapture data sets that typically are obtained by voluntary bird ringers who do normaly not have the possibilities to analyse DNA, often the sex identification is missing in parts of the individuals. For estimating survival, it would nevertheless be valuable to include data of all individuals, use the information on sex-specific effects on survival wherever possible but account for the fact that of parts of the individuals the sex is not known. We here explain how a Cormack-Jolly-Seber model can be integrated with a mixture model in oder to allow for a combined analyses of individuals with and without sex identified.
An introduction to the Cormack-Jolly-Seber model we gave in Chapter 14.5 of the book @KornerNievergelt2015. We here expand this model by a mixture structure that allows including individuals with a missing categorical predictor variable, such as sex.
## Data description
```r
## simulate data
# true parameter values
theta <- 0.6 # proportion of males
nocc <- 15 # number of years in the data set
b0 <- matrix(NA, ncol=nocc-1, nrow=2)
b0[1,] <- rbeta((nocc-1), 3, 4) # capture probability of males
b0[2,] <- rbeta((nocc-1), 2, 4) # capture probability of females
a0 <- matrix(NA, ncol=2, nrow=2)
a1 <- matrix(NA, ncol=2, nrow=2)
a0[1,1]<- qlogis(0.7) # average annual survival for adult males
a0[1,2]<- qlogis(0.3) # average annual survival for juveniles
a0[2,1] <- qlogis(0.55) # average annual survival for adult females
a0[2,2] <- a0[1,2]
a1[1,1] <- 0
a1[1,2] <- -0.5
a1[2,1] <- -0.8
a1[2,2] <- a1[1,2]
nindi <- 1000 # number of individuals with identified sex
nindni <- 1500 # number of individuals with non-identified sex
nind <- nindi + nindni # total number of individuals
y <- matrix(ncol=nocc, nrow=nind)
z <- matrix(ncol=nocc, nrow=nind)
first <- sample(1:(nocc-1), nind, replace=TRUE)
sex <- sample(c(1,2), nind, prob=c(theta, 1-theta), replace=TRUE)
juvfirst <- sample(c(0,1), nind, prob=c(0.5, 0.5), replace=TRUE)
juv <- matrix(0, nrow=nind, ncol=nocc)
for(i in 1:nind) juv[i,first[i]] <- juv[i]
x <- runif(nocc-1, -2, 2) # a time dependent covariate covariate
p <- b0 # recapture probability
phi <- array(NA, dim=c(2, 2, nocc-1))
# for ad males
phi[1,1,] <- plogis(a0[1,1]+a1[1,1]*x)
# for ad females
phi[2,1,] <- plogis(a0[2,1]+a1[2,1]*x)
# for juvs
phi[1,2,] <- phi[2,2,] <- plogis(a0[2,2]+a1[2,2]*x)
for(i in 1:nind){
z[i,first[i]] <- 1
y[i, first[i]] <- 1
for(t in (first[i]+1):nocc){
z[i, t] <- rbinom(1, size=1, prob=z[i,t-1]*phi[sex[i],juv[i,t-1]+1, t-1])
y[i, t] <- rbinom(1, size=1, prob=z[i,t]*p[sex[i],t-1])
}
}
y[is.na(y)] <- 0
```
The mark-recapture data set consists of capture histories of 2500 individuals over 15 time periods. For each time period $t$ and individual $i$ the capture history matrix $y$ contains $y_{it}=1$ if the individual $i$ is captured during time period $t$, or $y_{it}=0$ if the individual $i$ is not captured during time period $t$. The marking time period varies between individuals from 1 to 14. At the marking time period, the age of the individuals was classified either as juvenile or as adult. Juveniles turn into adults after one time period, thus age is known for all individuals during all time periods after marking. For 1000 individuals of the 2500 individuals, the sex is identified, whereas for 1500 individuals, the sex is unknown. The example data contain one covariate $x$ that takes on one value for each time period.
```r
# bundle the data for Stan
i <- 1:nindi
ni <- (nindi+1):nind
datax <- list(yi=y[i,], nindi=nindi, sex=sex[i], nocc=nocc,
yni=y[ni,], nindni=nindni, firsti=first[i], firstni=first[ni],
juvi=juv[i,]+1, juvni=juv[ni,]+1, year=1:nocc, x=x)
```
## Model description
The observations $y_{it}$, an indicator of whether individual i was recaptured during time period $t$ is modelled conditional on the latent true state of the individual birds $z_{it}$ (0 = dead or permanently emigrated, 1 = alive and at the study site) as a Bernoulli variable. The probability $P(y_{it} = 1)$ is the product of the probability that an alive individual is recaptured, $p_{it}$, and the state of the bird $z_{it}$ (alive = 1, dead = 0). Thus, a dead bird cannot be recaptured, whereas for a bird alive during time period $t$, the recapture probability equals $p_{it}$:
$$y_{it} \sim Bernoulli(z_{it}p_{it})$$
The latent state variable $z_{it}$ is a Markovian variable with the state at time $t$ being dependent on the state at time $t-1$ and the apparent survival probability $$\phi_{it}$$:
$$z_{it} \sim Bernoulli(z_{it-1}\phi_{it})$$
We use the term apparent survival in order to indicate that the parameter $\phi$ is a product of site fidelity and survival. Thus, individuals that permanently emigrated from the study area cannot be distinguished from dead individuals.
In both models, the parameters $\phi$ and $p$ were modelled as sex-specific. However, for parts of the individuals, sex could not be identified, i.e. sex was missing. Ignoring these missing values would most likely lead to a bias because they were not missing at random. The probability that sex can be identified is increasing with age and most likely differs between sexes. Therefore, we included a mixture model for the sex:
$$Sex_i \sim Categorical(q_i)$$
where $q_i$ is a vector of length 2, containing the probability of being a male and a female, respectively. In this way, the sex of the non-identified individuals was assumed to be male or female with probability $q[1]$ and $q[2]=1-q[1]$, respectively. This model corresponds to the finite mixture model introduced by @Pledger2003 in order to account for unknown classes of birds (heterogeneity). However, in our case, for parts of the individuals the class (sex) was known.
In the example model, we constrain apparent survival to be linearly dependent on a covariate x with different slopes for males, females and juveniles using the logit link function.
$$logit(\phi_{it}) = a0_{sex-age-class[it]} + a1_{sex-age-class[it]}x_i$$
Annual recapture probability was modelled for each year and age and sex class independently:
$$p_{it} = b0_{t,sex-age-class[it]}$$
Uniform prior distributions were used for all parameters with a parameter space limited to values between 0 and 1 (probabilities) and a normal distribution with a mean of 0 and a standard deviation of 1.5 for the intercept $a0$, and a standard deviation of 5 was used for $a1$.
## The Stan code
The trick for coding the CMR-mixture model in Stan is to formulate the model 3 times:
1. For the individuals with identified sex
2. For the males that were not identified
3. For the females that were not identified
Then for the non-identified individuals a mixture model is formulated that assigns a probability of being a female or a male to each individual.
```stan
data {
int<lower=2> nocc; // number of capture events
int<lower=0> nindi; // number of individuals with identified sex
int<lower=0> nindni; // number of individuals with non-identified sex
int<lower=0,upper=2> yi[nindi,nocc]; // CH[i,k]: individual i captured at k
int<lower=0,upper=nocc-1> firsti[nindi]; // year of first capture
int<lower=0,upper=2> yni[nindni,nocc]; // CH[i,k]: individual i captured at k
int<lower=0,upper=nocc-1> firstni[nindni]; // year of first capture
int<lower=1, upper=2> sex[nindi];
int<lower=1, upper=2> juvi[nindi, nocc];
int<lower=1, upper=2> juvni[nindni, nocc];
int<lower=1> year[nocc];
real x[nocc-1]; // a covariate
}
transformed data {
int<lower=0,upper=nocc+1> lasti[nindi]; // last[i]: ind i last capture
int<lower=0,upper=nocc+1> lastni[nindni]; // last[i]: ind i last capture
lasti = rep_array(0,nindi);
lastni = rep_array(0,nindni);
for (i in 1:nindi) {
for (k in firsti[i]:nocc) {
if (yi[i,k] == 1) {
if (k > lasti[i]) lasti[i] = k;
}
}
}
for (ii in 1:nindni) {
for (kk in firstni[ii]:nocc) {
if (yni[ii,kk] == 1) {
if (kk > lastni[ii]) lastni[ii] = kk;
}
}
}
}
parameters {
real<lower=0, upper=1> theta[nindni]; // probability of being male for non-identified individuals
real<lower=0, upper=1> b0[2,nocc-1]; // intercept of p
real a0[2,2]; // intercept for phi
real a1[2,2]; // coefficient for phi
}
transformed parameters {
real<lower=0,upper=1>p_male[nindni,nocc]; // capture probability
real<lower=0,upper=1>p_female[nindni,nocc]; // capture probability
real<lower=0,upper=1>p[nindi,nocc]; // capture probability
real<lower=0,upper=1>phi_male[nindni,nocc-1]; // survival probability
real<lower=0,upper=1>chi_male[nindni,nocc+1]; // probability that an individual
// is never recaptured after its
// last capture
real<lower=0,upper=1>phi_female[nindni,nocc-1]; // survival probability
real<lower=0,upper=1>chi_female[nindni,nocc+1]; // probability that an individual
// is never recaptured after its
// last capture
real<lower=0,upper=1>phi[nindi,nocc-1]; // survival probability
real<lower=0,upper=1>chi[nindi,nocc+1]; // probability that an individual
// is never recaptured after its
// last capture
{
int k;
int kk;
for(ii in 1:nindi){
if (firsti[ii]>1) {
for (z in 1:(firsti[ii]-1)){
phi[ii,z] = 1;
}
}
for(tt in firsti[ii]:(nocc-1)) {
// linear predictor for phi:
phi[ii,tt] = inv_logit(a0[sex[ii], juvi[ii,tt]] + a1[sex[ii], juvi[ii,tt]]*x[tt]);
}
}
for(ii in 1:nindni){
if (firstni[ii]>1) {
for (z in 1:(firstni[ii]-1)){
phi_female[ii,z] = 1;
phi_male[ii,z] = 1;
}
}
for(tt in firstni[ii]:(nocc-1)) {
// linear predictor for phi:
phi_male[ii,tt] = inv_logit(a0[1, juvni[ii,tt]] + a1[1, juvni[ii,tt]]*x[tt]);
phi_female[ii,tt] = inv_logit(a0[2, juvni[ii,tt]]+ a1[2, juvni[ii,tt]]*x[tt]);
}
}
for(i in 1:nindi) {
// linear predictor for p for identified individuals
for(w in 1:firsti[i]){
p[i,w] = 1;
}
for(kkk in (firsti[i]+1):nocc)
p[i,kkk] = b0[sex[i],year[kkk-1]];
chi[i,nocc+1] = 1.0;
k = nocc;
while (k > firsti[i]) {
chi[i,k] = (1 - phi[i,k-1]) + phi[i,k-1] * (1 - p[i,k]) * chi[i,k+1];
k = k - 1;
}
if (firsti[i]>1) {
for (u in 1:(firsti[i]-1)){
chi[i,u] = 0;
}
}
chi[i,firsti[i]] = (1 - p[i,firsti[i]]) * chi[i,firsti[i]+1];
}// close definition of transformed parameters for identified individuals
for(i in 1:nindni) {
// linear predictor for p for non-identified individuals
for(w in 1:firstni[i]){
p_male[i,w] = 1;
p_female[i,w] = 1;
}
for(kkkk in (firstni[i]+1):nocc){
p_male[i,kkkk] = b0[1,year[kkkk-1]];
p_female[i,kkkk] = b0[2,year[kkkk-1]];
}
chi_male[i,nocc+1] = 1.0;
chi_female[i,nocc+1] = 1.0;
k = nocc;
while (k > firstni[i]) {
chi_male[i,k] = (1 - phi_male[i,k-1]) + phi_male[i,k-1] * (1 - p_male[i,k]) * chi_male[i,k+1];
chi_female[i,k] = (1 - phi_female[i,k-1]) + phi_female[i,k-1] * (1 - p_female[i,k]) * chi_female[i,k+1];
k = k - 1;
}
if (firstni[i]>1) {
for (u in 1:(firstni[i]-1)){
chi_male[i,u] = 0;
chi_female[i,u] = 0;
}
}
chi_male[i,firstni[i]] = (1 - p_male[i,firstni[i]]) * chi_male[i,firstni[i]+1];
chi_female[i,firstni[i]] = (1 - p_female[i,firstni[i]]) * chi_female[i,firstni[i]+1];
} // close definition of transformed parameters for non-identified individuals
} // close block of transformed parameters exclusive parameter declarations
} // close transformed parameters
model {
// priors
theta ~ beta(1, 1);
for (g in 1:(nocc-1)){
b0[1,g]~beta(1,1);
b0[2,g]~beta(1,1);
}
a0[1,1]~normal(0,1.5);
a0[1,2]~normal(0,1.5);
a1[1,1]~normal(0,3);
a1[1,2]~normal(0,3);
a0[2,1]~normal(0,1.5);
a0[2,2]~normal(a0[1,2],0.01); // for juveniles, we assume that the effect of the covariate is independet of sex
a1[2,1]~normal(0,3);
a1[2,2]~normal(a1[1,2],0.01);
// likelihood for identified individuals
for (i in 1:nindi) {
if (lasti[i]>0) {
for (k in firsti[i]:lasti[i]) {
if(k>1) target+= (log(phi[i, k-1]));
if (yi[i,k] == 1) target+=(log(p[i,k]));
else target+=(log1m(p[i,k]));
}
}
target+=(log(chi[i,lasti[i]+1]));
}
// likelihood for non-identified individuals
for (i in 1:nindni) {
real log_like_male = 0;
real log_like_female = 0;
if (lastni[i]>0) {
for (k in firstni[i]:lastni[i]) {
if(k>1){
log_like_male += (log(phi_male[i, k-1]));
log_like_female += (log(phi_female[i, k-1]));
}
if (yni[i,k] == 1){
log_like_male+=(log(p_male[i,k]));
log_like_female+=(log(p_female[i,k]));
}
else{
log_like_male+=(log1m(p_male[i,k]));
log_like_female+=(log1m(p_female[i,k]));
}
}
}
log_like_male += (log(chi_male[i,lastni[i]+1]));
log_like_female += (log(chi_female[i,lastni[i]+1]));
target += log_mix(theta[i], log_like_male, log_like_female);
}
}
```
## Call Stan from R, check convergence and look at results
```r
# Run STAN
library(rstan)
fit <- stan(file = "stanmodels/cmr_mixture_model.stan", data=datax, verbose = FALSE)
# for above simulated data (25000 individuals x 15 time periods)
# computing time is around 48 hours on an intel corei7 laptop
# for larger data sets, we recommed moving the transformed parameters block
# to the model block in order to avoid monitoring of p_male, p_female,
# phi_male and phi_female producing memory problems
# launch_shinystan(fit) # diagnostic plots
summary(fit)
```
```
## mean se_mean sd 2.5% 25%
## b0[1,1] 0.60132367 0.0015709423 0.06173884 0.48042366 0.55922253
## b0[1,2] 0.70098709 0.0012519948 0.04969428 0.60382019 0.66806698
## b0[1,3] 0.50293513 0.0010904085 0.04517398 0.41491848 0.47220346
## b0[1,4] 0.28118209 0.0008809447 0.03577334 0.21440931 0.25697691
## b0[1,5] 0.34938289 0.0009901335 0.03647815 0.27819918 0.32351323
## b0[1,6] 0.13158569 0.0006914740 0.02627423 0.08664129 0.11286629
## b0[1,7] 0.61182981 0.0010463611 0.04129602 0.53187976 0.58387839
## b0[1,8] 0.48535193 0.0010845951 0.04155762 0.40559440 0.45750793
## b0[1,9] 0.52531291 0.0008790063 0.03704084 0.45247132 0.50064513
## b0[1,10] 0.87174780 0.0007565552 0.03000936 0.80818138 0.85259573
## b0[1,11] 0.80185454 0.0009425675 0.03518166 0.73173810 0.77865187
## b0[1,12] 0.33152443 0.0008564381 0.03628505 0.26380840 0.30697293
## b0[1,13] 0.42132288 0.0012174784 0.04140382 0.34062688 0.39305210
## b0[1,14] 0.65180372 0.0015151039 0.05333953 0.55349105 0.61560493
## b0[2,1] 0.34237039 0.0041467200 0.12925217 0.12002285 0.24717176
## b0[2,2] 0.18534646 0.0023431250 0.07547704 0.05924694 0.12871584
## b0[2,3] 0.61351083 0.0024140550 0.07679100 0.46647727 0.56242546
## b0[2,4] 0.37140208 0.0024464965 0.06962399 0.24693888 0.32338093
## b0[2,5] 0.19428215 0.0034618302 0.11214798 0.02800056 0.11146326
## b0[2,6] 0.27371336 0.0026553769 0.09054020 0.11827243 0.20785316
## b0[2,7] 0.18611173 0.0014387436 0.05328492 0.09122869 0.14789827
## b0[2,8] 0.25648337 0.0018258589 0.05287800 0.16255769 0.21913271
## b0[2,9] 0.20378754 0.0021367769 0.07380004 0.07777998 0.15215845
## b0[2,10] 0.52679548 0.0024625568 0.08696008 0.36214334 0.46594844
## b0[2,11] 0.47393354 0.0032593161 0.10555065 0.28843967 0.39781278
## b0[2,12] 0.22289155 0.0017082729 0.05551514 0.12576797 0.18203335
## b0[2,13] 0.26191486 0.0024159794 0.07016314 0.14106495 0.21234017
## b0[2,14] 0.65111737 0.0055743944 0.18780555 0.29279480 0.50957591
## a0[1,1] 0.95440670 0.0013771881 0.04808748 0.86301660 0.92146330
## a0[1,2] 0.01529770 0.0469699511 1.46995922 -2.82218067 -0.95533706
## a0[2,1] 0.16384995 0.0049928331 0.12634422 -0.06399631 0.07533962
## a0[2,2] 0.01535679 0.0469634175 1.47006964 -2.81864060 -0.95515751
## a1[1,1] 0.15937249 0.0028992587 0.08864790 -0.01288607 0.10017613
## a1[1,2] 0.08055953 0.1007089857 3.02148727 -5.95525636 -1.96662599
## a1[2,1] -0.83614134 0.0074143920 0.18655882 -1.21033848 -0.95698565
## a1[2,2] 0.08071668 0.1006904255 3.02145647 -5.94617355 -1.96508733
## 50% 75% 97.5% n_eff Rhat
## b0[1,1] 0.60206306 0.6431566 0.7206343 1544.5301 1.002331
## b0[1,2] 0.70165494 0.7355204 0.7946280 1575.4617 1.001482
## b0[1,3] 0.50367411 0.5330078 0.5898079 1716.3196 1.001183
## b0[1,4] 0.27997512 0.3046483 0.3544592 1649.0040 1.000760
## b0[1,5] 0.34936442 0.3751935 0.4191138 1357.3073 1.002072
## b0[1,6] 0.12987449 0.1481661 0.1873982 1443.8040 1.003676
## b0[1,7] 0.61203228 0.6397577 0.6933929 1557.5904 1.001458
## b0[1,8] 0.48513822 0.5134314 0.5672066 1468.1355 1.002511
## b0[1,9] 0.52534212 0.5501747 0.5994060 1775.7335 1.000824
## b0[1,10] 0.87324112 0.8934047 0.9258033 1573.3747 1.000719
## b0[1,11] 0.80300311 0.8261868 0.8675033 1393.1817 1.001172
## b0[1,12] 0.33044476 0.3552199 0.4052902 1794.9956 1.000566
## b0[1,13] 0.42116690 0.4492297 0.5026942 1156.5339 1.000289
## b0[1,14] 0.64956850 0.6864706 0.7607107 1239.4056 1.004061
## b0[2,1] 0.33493631 0.4251416 0.6150923 971.5524 1.004049
## b0[2,2] 0.17981663 0.2358847 0.3446097 1037.6210 1.001474
## b0[2,3] 0.61326419 0.6644156 0.7628427 1011.8737 1.005727
## b0[2,4] 0.36837778 0.4158585 0.5190457 809.8949 1.003803
## b0[2,5] 0.17910449 0.2591418 0.4533117 1049.4733 1.001499
## b0[2,6] 0.26739172 0.3299594 0.4685139 1162.6006 1.001170
## b0[2,7] 0.18254607 0.2198969 0.3003156 1371.6455 1.000878
## b0[2,8] 0.25280556 0.2895585 0.3704113 838.7174 1.005624
## b0[2,9] 0.19724053 0.2501298 0.3694806 1192.8747 1.003687
## b0[2,10] 0.52587075 0.5845730 0.7061694 1247.0027 1.002851
## b0[2,11] 0.46874445 0.5392302 0.7046892 1048.7425 0.999473
## b0[2,12] 0.21961656 0.2580782 0.3397127 1056.1081 1.000907
## b0[2,13] 0.25601959 0.3056204 0.4142888 843.3960 1.003130
## b0[2,14] 0.65824835 0.7973674 0.9698829 1135.0669 1.003838
## a0[1,1] 0.95368445 0.9862439 1.0515747 1219.2071 1.003898
## a0[1,2] 0.01633534 0.9911055 2.9717839 979.4231 1.003726
## a0[2,1] 0.15519648 0.2472483 0.4230776 640.3489 1.004625
## a0[2,2] 0.01587281 0.9898084 2.9659552 979.8429 1.003744
## a1[1,1] 0.15647489 0.2205720 0.3354845 934.8953 1.007190
## a1[1,2] 0.06683287 2.1568781 6.0295208 900.1297 1.003701
## a1[2,1] -0.83503982 -0.7075691 -0.4814539 633.1119 1.010568
## a1[2,2] 0.06586905 2.1557247 6.0239735 900.4432 1.003704
```
<file_sep># This script will be run at the start of each chapter
rm(list = ls())
# Libraries
library(knitr)
library(blmeco)
library(tidyverse)
library(Rmisc)
# Knitr settings
options(scipen = 6);
opts_chunk$set(echo = TRUE, hide = TRUE, cache = FALSE, warning = FALSE, message = FALSE,
fig.width = 8)#fig.asp=.45
# Darstellung von R-Berechnungen innerhalb von Text
inline_hook <- function(x) {
if (is.numeric(x)) {
x <- format(x, nsmall = 2, digits = 2)
}
x
}
knit_hooks$set(inline = inline_hook)
<file_sep>
# Prior distributions {#priors}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Introduction
The prior is an integral part of a Bayesian model. We must specify one.
When to use informative priors: In practise (management, politics etc.) we would like to base our decisions on all information available. Therefore, we consider it to be responsible including informative priors in applied research whenever possible. Priors allow combining information from the literature with information in data or combining information from different data sets.
When using non-informative priors: in basic research when results should only report the information in the current data set. Results from a case study may later be used in a meta-analyses that assumes independence across the different studies included.
## How to choose a prior {#choosepriors}
<!-- Tabelle von Fränzi (CourseIII_glm_glmmm/course2018/presentations_handouts/presentations) -->
important reference: @Lemoine.2019
TODO
### Priors for variance parameters
@Gelman.2006 discusses advantages of using folded t-distributions or cauchy distributions as prior distributions for variance parameters in hierarchical models.
When specifying t-distributions, we find it hard to imagine how the distributions looks like with what parameter values. Therefore, we simulate values from different distributions and look at the histograms. Because the parameterisation of the t-distribution differst among software language, it is important to use the software the model is finally fitted in Figure \@ref(fig:tdjags) we give some examples of folded t-distributions specified in jags using different values for the precision (second parameter) and degrees of freedom (third parameter).
```{r tdjags, fig.align='center', echo=FALSE, fig.link='', fig.cap="Folded t-distributions with different precisions and degrees of freedom. The panel titles give the jags code of the distribution. Dark blue vertical lines indicate 90% quantiles, light-blue lines indicate 98% quantiles."}
knitr::include_graphics('images/example_t_distributions_jags.jpg', dpi = 150)
```
GIVE EXAMPLE FOR STAN TOO
## Prior sensitivity
xxx
<file_sep>
# Model comparison and multimodel inference {#model_comparison}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Introduction
literature to refer to: @Tredennick2021
## Summary
xxx
<file_sep>
# Basics of statistics {#basics}
This chapter introduces some important terms useful for doing data analyses.
It also introduces the essentials of the classical frequentist tests such as t-test. Even though we will not use nullhypotheses tests later [@Amrhein.2019], we introduce them here because we need to understand the scientific literature. For each classical test, we provide a suggestion how to present the statistical results without using null hypothesis tests. We further discuss some differences between the Bayesian and frequentist statistics.
## Variables and observations
Empirical research involves data collection. Data are collected by recording measurements of variables for observational units. An observational unit may be, for example, an individual, a plot, a time interval or a combination of those. The collection of all units ideally build a random sample of the entire population of units in that we are interested. The measurements (or observations) of the random sample are stored in a data table (sometimes also called data set, but a data set may include several data tables. A collection of data tables belonging to the same study or system is normally bundled and stored in a data base). A data table is a collection of variables (columns). Data tables normally are handled as objects of class `data.frame` in R. All measurements on a row in a data table belong to the same observational unit. The variables can be of different scales (Table \@ref(tab:scalemeasurement)).
Table: (\#tab:scalemeasurement) Scales of measurements
Scale | Examples | Properties | Coding in R |
:-------|:------------------|:------------------|:--------------------|
Nominal | Sex, genotype, habitat | Identity (values have a unique meaning) | `factor()` |
Ordinal | Elevational zones | Identity and magnitude (values have an ordered relationship) | `ordered()` |
Numeric | Discrete: counts; continuous: body weight, wing length | Identity, magnitude, and intervals or ratios | `intgeger()` `numeric()` |
The aim of many studies is to describe how a variable of interest ($y$) is related to one or more predictor variables ($x$). How these variables are named differs between authors. The y-variable is called "outcome variable", "response" or "dependent variable". The x-variables are called "predictors", "explanatory variables" or "independent variables". The choose of the terms for x and y is a matter of taste. We avoid the terms "dependent" and "independent" variables because often we do not know whether the variable $y$ is in fact depending on the $x$ variables and also, often the x-variables are not independent of each other. In this book, we try to use "outcome" and "predictor" variables because these terms sound most neutral to us in that they refer to how the statistical model is constructed rather than to a real life relationship.
## Displaying and summarizing data
### Histogram
While nominal and ordinal variables are summarized by giving the absolute number or the proportion of observations for each category, numeric variables normally are summarized by a location and a scatter statistics, such as the mean and the standard deviation or the median and some quantiles. The distribution of a numeric variable is graphically displayed in a histogram (Fig. \@ref(fig:histogram)).
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/histogram-1.png" alt="Histogram of the length of ell of statistics course participants." width="672" />
<p class="caption">(\#fig:histogram)Histogram of the length of ell of statistics course participants.</p>
</div>
To draw a histogram, the variable is displayed on the x-axis and the $x_i$-values are assigned to classes. The edges of the classes are called ‘breaks’. They can be set with the argument `breaks=` within the function `hist`. The values given in the `breaks=` argument must at least span the values of the variable. If the argument `breaks=` is not specified, R searches for breaks-values that make the histogram look smooth. The number of observations falling in each class is given on the y-axis. The y-axis can be re-scaled so that the area of the histogram equals 1 by setting the argument `density=TRUE`. In that case, the values on the y-axis correspond to the density values of a probability distribution (Chapter \@ref(distributions)).
### Location and scatter
Location statistics are mean, median or mode. A common mean is the
- arithmetic mean: $\hat{\mu} = \bar{x} = \frac{i=1}{n} x_i \sum_{1}^{n}$ (R function `mean`),
where $n$ is the sample size. The parameter $\mu$ is the (unknown) true mean of the entire population of which the $1,...,n$ measurements are a random sample of. $\bar{x}$ is called the sample mean and used as an estimate for $\mu$. The $^$ above any parameter indicates that the parameter value is obtained from a sample and, therefore, it may be different from the true value.
The median is the 50% quantile. We find 50% of the measurements below and 50% above the median. If $x_1,..., x_n$ are the ordered measurements of a variable, then the median is:
- median $= x_{(n+1)/2}$ for uneven $n$, and median $= \frac{1}{2}(x_{n/2} + x_{n/2+1})$ for even $n$ (R function `median`).
The mode is the value that is occurring with highest frequency or that has the highest density.
Scatter also is called spread, scale or variance. Variance parameters describe how far away from the location parameter single observations can be found, or how the measurements are scattered around their mean. The variance is defined as the average squared difference between the observations and the mean:
- variance $\hat{\sigma^2} = s^2 = \frac{1}{n-1}\sum_{i=1}^{n}(x_i-\bar{x})^2$
The term $(n-1)$ is called the degrees of freedom. It is used in the denominator of the variance formula instead of $n$ to prevent underestimating the variance. Because $\bar{x}$ is in average closer to $x_i$ than the unknown true mean $\mu$ would be, the variance would be underestimated if $n$ is used in the denominator.
<!-- <font size="1"> The maximum likelihood estimate of the variance corresponds to the variance formula using $n$ instead of $n-1$ in the denominator, see, e.g., @Royle.2008b.</font> -->
The variance is used to compare the degree of scatter among different groups. However, its values are difficult to interpret because of the squared unit. Therefore, the square root of the variance, the standard deviation is normally reported:
- standard deviation $\hat{\sigma} = s = \sqrt{s^2}$ (R Function `sd`)
The standard deviation is approximately the average deviation of an observation from the sample mean. In the case of a [normal distribution][normdist], about two thirds (68%) of the data are expected within one standard deviation around the mean.
The variance and standard deviation each describe the scatter with a single value. Thus, we have to assume that the observations are scattered symmetrically around their mean in order to get a picture of the distribution of the measurements. When the measurements are spread asymmetrically (skewed distribution), then it may be more precise to describe the scatter with more than one value. Such statistics could be quantiles from the lower and upper tail of the data.
Quantiles inform us about both location and spread of a distribution. The $p$th-quantile is the value with the property that a proportion $p$ of all values are less than or equal to the value of the quantile. The median is the 50% quantile. The 25% quantile and the 75% quantile are also called the lower and upper quartiles, respectively. The range between the 25% and the 75% quartiles is called the interquartile range. This range includes 50% of the distribution and is also used as a measure of scatter. The R function `quantile` extracts sample quantiles. The median, the quartiles, and the interquartile range can be graphically displayed using box and-whisker plots (boxplots in short, R function `boxplot`). The horizontal fat bars are the medians (Fig. \@ref(fig:boxplot)). The boxes mark the interquartile range. The whiskers reach out to the last observation within 1.5 times the interquartile range from the quartile. Circles mark observations beyond 1.5 times the interquartile range from the quartile.
```r
par(mar=c(5,4,1,1))
boxplot(ell~car, data=dat, las=1, ylab="Lenght of ell [cm]",
col="tomato", xaxt="n")
axis(1, at=c(1,2), labels=c("Not owing a car", "Car owner"))
n <- table(dat$car)
axis(1, at=c(1,2), labels=paste("n=", n, sep=""), mgp=c(3,2, 0))
```
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/boxplot-1.png" alt="Boxplot of the length of ell of statistics course participants who are or ar not owner of a car." width="672" />
<p class="caption">(\#fig:boxplot)Boxplot of the length of ell of statistics course participants who are or ar not owner of a car.</p>
</div>
The boxplot is an appealing tool for comparing location, variance and distribution of measurements among groups.
### Correlations
A correlation measures the strength with which characteristics of two variables are associated with each other (co-occur). If both variables are numeric, we can visualize the correlation using a scatterplot.
```r
par(mar=c(5,4,1,1))
plot(temp~ell, data=dat, las=1, xlab="Lenght of ell [cm]",
ylab="Comfort temperature [°C]",
pch=16)
```
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/scatterplot-1.png" alt="Scatterplot of the length of ell and the comfort temperature of statistics course participants." width="672" />
<p class="caption">(\#fig:scatterplot)Scatterplot of the length of ell and the comfort temperature of statistics course participants.</p>
</div>
The covariance between variable $x$ and $y$ is defined as:
- covariance $q = \frac{1}{n-1}\sum_{i=1}^{n}((x_i-\bar{x})*(y_i-\bar{y}))$ (R function `cov`)
As for the variance, also the units of the covariance are sqared and therefore covariance values are difficult to interpret. A standardized covariance is the Pearson correlation coefficient:
- Pearson correlation coefficient: $r=\frac{\sum_{i=1}^{n}(x_i-\bar{x})(y_i-\bar{y})}{\sqrt{\sum_{i=1}^{n}(x_i-\bar{x})^2\sum_{i=1}^{n}(y_i-\bar{y})^2}}$ (R function `cor`)
Means, variances, standard deviations, covariances and correlations are sensible for outliers. Single observations containing extreme values normally have a overproportional influence on these statistics. When outliers are present in the data, we may prefer a more robust correlation measure such as the Spearman correlation or Kendall’s tau. Both are based on the ranks of the measurements instead of the measurements themselves.
- Spearman correlation coefficient: correlation between rank(x) and rank(y) (R function `cor(x,y, method="spearman")`)
- Kendall's tau: $\tau = 1-\frac{4I}{(n(n-1))}$, where $I$ = number of pairs $(i,k)$ for which $(x_i < x_k)$ & $(y_i > y_k)$ or viceversa. (R function `cor(x,y, method="kendall")`)
### Principal components analyses PCA
The principal components analysis (PCA) is a multivariate correlation analysis. A multidimensional data set with $k$ variables can be seen as a cloud of points (observations) in a $k$-dimensional space. Imagine, we could move around in the space and look at the cloud from different locations. From some locations, the data looks highly correlated, whereas from others, we cannot see the correlation. That is what PCA is doing. It is rotating the coordinate system (defined by the original variables) of the data cloud so that the correlations are no longer visible. The axes of the new coordinates system are linear combinations of the original variables. They are called principal components. There are as many principal coordinates as there are original variables, i.e. $k$, $p_1, ..., p_k$. The principal components meet further requirements:
- the first component explains most variance
- the second component explains most of the remaining variance and is perpendicular (= uncorrelated) to the first one
- third component explains most of the remaining variance and is perpendicular to the first two
- ...
For example, in a two-dimensional data set $(x_1, x_2)$ the principal components become
$pc_{1i} = b_{11}x_{1i} + b_{12}x_{2i}$
$pc_{2i} = b_{21}x_{1i} + b_{22}x_{2i}$ with $b_{jk}$ being loadings of principal component $j$ and original variable $k$. Fig. \@ref(fig:principal) shows the two principal components for a two-dimensional data set. They can be calculated using matrix algebra: principal components are eigenvectors of the covariance or correlation matrix.
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/principal-1.png" alt="Principal components of a two dimensional data set based on the covariance matrix (green) and the correlation matrix (brown)." width="672" />
<p class="caption">(\#fig:principal)Principal components of a two dimensional data set based on the covariance matrix (green) and the correlation matrix (brown).</p>
</div>
The choice between correlation or covariance matrix is essential and important. The covariance matrix is an unstandardized correlation matrix. Therefore, the units, i.e., whether cm or m are used, influence the results of the PCA if it is based on the covariance matrix. When the PCA is based on the covariance matrix, the results will change, when we change the units of one variable, e.g., from cm to m. Basing the PCA on the covariance matrix only makes sense, when the variances are comparable among the variables, i.e., if all variables are measured in the same unit and we would like to weight each variable according to its variance. If this is not the case, the PCA must be based on the correlation matrix.
```r
pca <- princomp(cbind(x1,x2)) # PCA based on covariance matrix
pca <- princomp(cbind(x1,x2), cor=TRUE) # PCA based on correlation matrix
loadings(pca)
```
```
##
## Loadings:
## Comp.1 Comp.2
## x1 0.707 0.707
## x2 0.707 -0.707
##
## Comp.1 Comp.2
## SS loadings 1.0 1.0
## Proportion Var 0.5 0.5
## Cumulative Var 0.5 1.0
```
The loadings measure the correlation of each variable with the principal components. They inform about what aspects of the data each component is measuring. The signs of the loadings are arbitrary, thus we can multiplied them by -1 without changing the PCA. Sometimes this can be handy for describing the meaning of the principal component in a paper. For example, @Zbinden.2018 combined the number of hunting licenses, the duration of the hunting period and the number of black grouse cocks that were allowed to be hunted per hunter in a principal component in order to measure hunting pressure. All three variables had a negative loading in the first component, so that high values of the component meant low hunting pressure. Before the subsequent analyses, for which a measure of hunting pressure was of interest, the authors changed the signs of the loadings so that this component measured hunting pressure.
The proportion of variance explained by each component is, beside the loadings, an important information. If the first few components explain the main part of the variance, it means that maybe not all $k$ variables are necessary to describe the data, or, in other words, the original $k$ variables contain a lot of redundant information.
```r
# extract the variance captured by each component
summary(pca)
```
```
## Importance of components:
## Comp.1 Comp.2
## Standard deviation 1.2679406 0.6263598
## Proportion of Variance 0.8038367 0.1961633
## Cumulative Proportion 0.8038367 1.0000000
```
<font size="1">Ridge regression is similar to doing a PCA within a linear model while components with low variance are shrinked to a higher degree than components with a high variance.</font>
## Inferential statistics
### Uncertainty
> there is never a "yes-or-no" answer
> there will always be uncertainty
Amrhein (2017)[https://peerj.com/preprints/26857]
The decision whether an effect is important or not cannot not be done based on data alone. For making a decision we should, beside the data, carefully consider the consequences of each decision, the aims we would like to achieve, and the risk, i.e. how bad it is to make the wrong decision. Structured decision making or decision analyses provide methods to combine consequences of decisions, objectives of different stakeholders and risk attitudes of decision makers to make optimal decisions [@Hemming2022, Runge2020]. In most data analyses, particularly in basic research and when working on case studies, we normally do not consider consequences of decisions. However, the results will be more useful when presented in a way that other scientists can use them for a meta-analysis, or stakeholders and politicians can use them for making better decisions. Useful results always include information on the size of a parameter of interest, e.g. an effect of a drug or an average survival, together with an uncertainty measure.
Therefore, statistics is describing patterns of the process that presumably has generated the data and quantifying the uncertainty of the described patterns that is due to the fact that the data is just a random sample from the larger population we would like to know the patterns of.
Quantification of uncertainty is only possible if:
1. the mechanisms that generated the data are known
2. the observations are a random sample from the population of interest
Most studies aim at understanding the mechanisms that generated the data, thus they are most likely not known beforehand. To overcome that problem, we construct models, e.g. statistical models, that are (strong) abstractions of the data generating process. And we report the model assumptions. All uncertainty measures are conditional on the model we used to analyze the data, i.e., they are only reliable, if the model describes the data generating process realistically. Because most statistical models do not describe the data generating process well, the true uncertainty almost always is much higher than the one we report.
In order to obtain a random sample from the population under study, a good study design is a prerequisite.
To illustrate how inference about a big population is drawn from a small sample, we here use simulated data. The advantage of using simulated data is that the mechanism that generated the data is known as well as the big population.
Imagine there are 300000 PhD students on the world and we would like to know how many statistics courses they have taken in average before they started their PhD (Fig. \@ref(fig:histtruesample)). We use random number generators (`rpois` and `rgamma`) to simulate for each of the 300000 virtual students a number. We here use these 300000 numbers as the big population that in real life we almost never can sample in total. Normally, we know the number of courses students have taken just for a small sample of students. To simulate that situation we draw 12 numbers at random from the 300000 (R function `sample`). Then, we estimate the average number of statistics courses students take before they start a PhD from the sample of 12 students and we compare that mean to the true mean of the 300000 students.
```r
# simulate the virtual true population
set.seed(235325) # set seed for random number generator
# simulate fake data of the whole population
# using an overdispersed Poisson distribution,
# i.e. a Poisson distribution of whicht the mean
# has a gamma distribution
statscourses <- rpois(300000, rgamma(300000, 2, 3))
# draw a random sample from the population
n <- 12 # sample size
y <- sample(statscourses, 12, replace=FALSE)
```
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/histtruesample-1.png" alt="Histogram of the number of statistics courses of 300000 virtual PhD students have taken before their PhD started. The rugs on the x-axis indicate the random sample of 12 out of the 300000 students. The black vertical line indicates the mean of the 300000 students (true mean) and the blue line indicates the mean of the sample (sample mean)." width="672" />
<p class="caption">(\#fig:histtruesample)Histogram of the number of statistics courses of 300000 virtual PhD students have taken before their PhD started. The rugs on the x-axis indicate the random sample of 12 out of the 300000 students. The black vertical line indicates the mean of the 300000 students (true mean) and the blue line indicates the mean of the sample (sample mean).</p>
</div>
We observe the sample mean, what do we know about the population mean? There are two different approaches to answer this question. 1) We could ask us, how much the sample mean would scatter, if we repeat the study many times? This approach is called the frequentist' statistics. 2) We could ask us for any possible value, what is the probability that it is the true population mean? To do so, we use probability theory and that is called the Bayesian statistics.
Both approaches use (essentially similar) models. Only the mathematical techniques to calculate uncertainty measures differ between the two approaches. In cases when beside the data no other information is used to construct the model, then the results are approximately identical (at least for large enough sample sizes).
A frequentist 95% confidence interval (blue horizontal segment in Fig. \@ref(fig:CImean)) is constructed such that, if you were to (hypothetically) repeat the experiment or sampling many many times, 95% of the intervals constructed would contain the true value of the parameter (here the mean number of courses). From the Bayesian posterior distribution (pink in Fig. \@ref(fig:CImean)) we could construct a 95% interval (e.g., by using the 2.5% and 97.5% quantiles). This interval has traditionally been called credible interval. It can be interpreted that we are 95% sure that the true mean is inside that interval.
Both, confidence interval and posterior distribution, correspond to the statistical uncertainty of the sample mean, i.e., they measure how far away the sample mean could be from the true mean. In this virtual example, we know the true mean is 0.66, thus somewhere at the lower part of the 95% CI or in the lower quantiles of the posterior distribution. In real life, we do not know the true mean. The grey histogram in Fig. \@ref(fig:CImean) shows how means of many different virtual samples of 12 students scatter around the true mean. The 95% interval of these virtual means corresponds to the 95% CI, and the variance of these virtual means correspond to the variance of the posterior distribution. This virtual example shows that posterior distribution and 95% CI correctly measure the statistical uncertainty (variance, width of the interval), however we never know exactly how far the sample mean is from the true mean.
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/CImean-1.png" alt="Histogram of means of repeated samples from the true populations. The scatter of these means visualize the true uncertainty of the mean in this example. The blue vertical line indicates the mean of the original sample. The blue segment shows the 95% confidence interval (obtained by fequensist methods) and the violet line shows the posterior distribution of the mean (obtained by Bayesian methods)." width="672" />
<p class="caption">(\#fig:CImean)Histogram of means of repeated samples from the true populations. The scatter of these means visualize the true uncertainty of the mean in this example. The blue vertical line indicates the mean of the original sample. The blue segment shows the 95% confidence interval (obtained by fequensist methods) and the violet line shows the posterior distribution of the mean (obtained by Bayesian methods).</p>
</div>
Uncertainty intervals only are reliable if the model is a realistic abstraction of the data generating process (or if the model assumptions are realistic).
Because both terms, confidence and credible interval, suggest that the interval indicates confidence or credibility but the intervals actually show uncertainty, it has been suggested to rename the interval into compatibility or uncertainty interval [@Gelman.2019].
### Standard error
The standard error SE is, beside the uncertainty interval, an alternative possibility to measure uncertainty. It measures an average deviation of the sample mean from the (unknown) true population mean. The frequentist method for obtaining the SE is based on the central limit theorem. According to the central limit theorem the sum of independent, not necessarily normally distributed random numbers are normally distributed when sample size is large enough (Chapter \@ref(distributions)). Because the mean is a sum (divided by a constant, the sample size) it can be assumed that the distribution of many means of samples is normal. The standard deviation SD of the many means is called the standard error SE. It can be mathematically shown that the standard error SE equals the standard deviation SD of the sample divided by the square root of the sample size:
- frequentist SE = SD/sqrt(n) = $\frac{\hat{\sigma}}{\sqrt{n}}$
- Bayesian SE: Using Bayesian methods, the SE is the SD of the posterior distribution.
It is very important to keep the difference between SE and SD in mind! SD measures the scatter of the data, whereas SE measures the statistical uncertainty of the mean (or of another estimated parameter, Fig. \@ref(fig:sesd)). SD is a descriptive statistics describing a characteristics of the data, whereas SE is an inferential statistics showing us how far away the sample mean possibly is from the true mean. When sample size increases, SE becomes smaller, whereas SD does not change (given the added observations are drawn at random from the same big population as the ones already in the sample).
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/sesd-1.png" alt="Illustration of the difference between SD and SE. The SD measures the scatter in the data (sample, tickmarks on the x-axis). The SD is an estimate for the scatter in the big population (grey histogram, normally not known). The SE measures the uncertainty of the sample mean (in blue). The SE measures approximately how far, in average the sample mean (blue) is from the true mean (brown)." width="672" />
<p class="caption">(\#fig:sesd)Illustration of the difference between SD and SE. The SD measures the scatter in the data (sample, tickmarks on the x-axis). The SD is an estimate for the scatter in the big population (grey histogram, normally not known). The SE measures the uncertainty of the sample mean (in blue). The SE measures approximately how far, in average the sample mean (blue) is from the true mean (brown).</p>
</div>
## Bayes theorem and the common aim of frequentist and Bayesian methods
### Bayes theorem for discrete events
The Bayes theorem describes the probability of event A conditional on event B (the probability of A after B has already occurred) from the probability of B conditional on A and the two probabilities of the events A and B:
$P(A|B) = \frac{P(B|A)P(A)}{P(B)}$
Imagine, event A is "The person likes wine as a birthday present." and event B "The person has no car.". The conditional probability of A given B is the probability that a person not owing a car likes wine. Answers from students whether they have a car and what they like as a birthday present are summarized in Table \@ref(tab:winecar).
Table: (\#tab:winecar) Cross table of the student's birthday preference and car ownership.
car/birthday | flowers | wine | **sum** |
:------|:---------------|:---------------|:------------------|
no car | 6 | 9 | **15** |
car | 1 | 6 | **7** |
**sum** | **7**| **15**| **22** |
We can apply the Bayes theorem to obtain the probability that the student likes wine given it has no car, $P(A|B)$. Let's assume that only the ones who prefer wine go together for having a glass of wine at the bar after the statistics course. While they drink wine, the tell each other about their cars and they obtain the probability that a student who likes wine has no car, $P(B|A) = 0.6$. During the statistics class the teacher asked the students about their car ownership and birthday preference. Therefore, they know that $P(A) =$ likes wine $= 0.68$ and $P(B) =$ no car $= 0.68$. With these information, they can obtain the probability that a student likes wine given it has no car, even if not all students without cars went to the bar: $P(A|B) = \frac{0.6*0.68}{0.68} = 0.6$.
### Bayes theorem for continuous parameters
When we use the Bayes theorem for analyzing data, then the aim is to make probability statements for parameters. Because most parameters are measured at a continuous scale we use probability density functions to describe what we know about them. The Bayes theorem can be formulated for probability density functions denoted with $p(\theta)$, e.g. for a parameter $\theta$ (for example probability density functions see Chapter \@ref(distributions)).
What we are interested in is the probability of the parameter $\theta$ given the data, i.e., $p(\theta|y)$. This probability density function is called the posterior distribution of the parameter $\theta$. Here is the Bayes theorem formulated for obtaining the posterior distribution of a parameter from the data $y$, the prior distribution of the parameter $p(\theta)$ and assuming a model for the data generating process. The data model is defined by the likelihood that specifies how the data $y$ is distributed given the parameters $p(y|\theta)$:
$p(\theta|y) = \frac{p(y|\theta)p(\theta)}{p(y)} = \frac{p(y|\theta)p(\theta)}{\int p(y|\theta)p(\theta) d\theta}$
The probability of the data $p(y)$ is also called the scaling constant, because it is a constant. It is the integral of the likelihood over all possible values of the parameter(s) of the model.
### Estimating a mean assuming that the variance is known
For illustration, we first describe a simple (unrealistic) example for which it is almost possible to follow the mathematical steps for solving the Bayes theorem even for non-mathematicians. Even if we cannot follow all steps, this example will illustrate the principle how the Bayesian theorem works for continuous parameters. The example is unrealistic because we assume that the variance $\sigma^2$ in the data $y$ is known.
We construct a data model by assuming that $y$ is normally distributed:
$p(y|\theta) = normal(\theta, \sigma)$, with $\sigma$ known. The function $normal$ defines the probability density function of the normal distribution (Chapter \@ref(distributions)).
The parameter, for which we would like to get the posterior distribution is $\theta$, the mean. We specify a prior distribution for $\theta$. Because the normal distribution is a conjugate prior for a normal data model with known variance, we use the normal distribution. Conjugate priors have nice mathematical properties (see Chapter \@ref(priors)) and are therefore preferred when the posterior distribution is obtained algebraically.
That is the prior:
$p(\theta) = normal(\mu_0, \tau_0)$
With the above data, data model and prior, the posterior distribution of the mean $\theta$ is defined by:
$p(\theta|y) = normal(\mu_n, \tau_n)$, where
$\mu_n= \frac{\frac{1}{\tau_0^2}\mu_0 + \frac{n}{\sigma^2}\bar{y}}{\frac{1}{\tau_0^2}+\frac{n}{\sigma^2}}$ and
$\frac{1}{\tau_n^2} = \frac{1}{\tau_0^2} + \frac{n}{\sigma^2}$
$\bar{y}$ is the arithmetic mean of the data. Because only this value is needed in order to obtain the posterior distribution, this value is called the sufficient statistics.
From the mathematical formulas above and also from Fig. \@ref(fig:triplot) we see that the mean of the posterior distribution is a weighted average between the prior mean and $\bar{y}$ with weights equal to the precisions ($\frac{1}{\tau_0^2}$ and $\frac{n}{\sigma^2}$).
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/triplot-1.png" alt="Hypothetical example showing the result of applying the Bayes theorem for obtaining a posterior distribution of a continuous parameter. The likelhood is defined by the data and the model, the prior is expressing the knowledge about the parameter before looking at the data. Combining the two distributions using the Bayes theorem results in the posterior distribution." width="4900" />
<p class="caption">(\#fig:triplot)Hypothetical example showing the result of applying the Bayes theorem for obtaining a posterior distribution of a continuous parameter. The likelhood is defined by the data and the model, the prior is expressing the knowledge about the parameter before looking at the data. Combining the two distributions using the Bayes theorem results in the posterior distribution.</p>
</div>
### Estimating the mean and the variance
We now move to a more realistic example, which is estimating the mean and the variance of a sample of weights of Snowfinches *Montifringilla nivalis* (Fig. \@ref(fig:ssp)). To analyze those data, a model with two parameters (the mean and the variance or standard deviation) is needed. The data model (or likelihood) is specified as $p(y|\theta, \sigma) = normal(\theta, \sigma)$.
<div class="figure" style="text-align: center">
<img src="images/snowfinch2.JPG" alt="Snowfinches stay above the treeline for winter. They come to feeders." width="949" />
<p class="caption">(\#fig:ssp)Snowfinches stay above the treeline for winter. They come to feeders.</p>
</div>
```r
# weight (g)
y <- c(47.5, 43, 43, 44, 48.5, 37.5, 41.5, 45.5)
n <- length(y)
```
Because there are two parameters, we need to specify a two-dimensional prior distribution. We looked up in @Gelman2014 that the conjugate prior distribution in our case is an Normal-Inverse-Chisquare distribution:
$p(\theta, \sigma) = N-Inv-\chi^2(\mu_0, \sigma_0^2/\kappa_0; v_0, \sigma_0^2)$
From the same reference we looked up how the posterior distribution looks like in our case:
$p(\theta,\sigma|y) = \frac{p(y|\theta, \sigma)p(\theta, \sigma)}{p(y)} = N-Inv-\chi^2(\mu_n, \sigma_n^2/\kappa_n; v_n, \sigma_n^2)$, with
$\mu_n= \frac{\kappa_0}{\kappa_0+n}\mu_0 + \frac{n}{\kappa_0+n}\bar{y}$,
$\kappa_n = \kappa_0+n$,
$v_n = v_0 +n$,
$v_n\sigma_n^2=v_0\sigma_0^2+(n-1)s^2+\frac{\kappa_0n}{\kappa_0+n}(\bar{y}-\mu_0)^2$
For this example, we need the arithmetic mean $\bar{y}$ and standard deviation $s^2$ from the sample for obtaining the posterior distribution. Therefore, these two statistics are the sufficient statistics.
The above formula look intimidating, but we never really do that calculations. We let `R` doing that for us in most cases by simulating many numbers from the posterior distribution, e.g., using the function `sim` from the package arm [@Gelman.2007]. In the end, we can visualize the distribution of these many numbers to have a look at the posterior distribution.
In Fig. \@ref(fig:jointpdist) the two-dimensional $(\theta, \sigma)$ posterior distribution is visualized by using simulated values. The two dimensional distribution is called the joint posterior distribution. The mountain of dots in Fig. \@ref(fig:jointpdist) visualize the Normal-Inverse-Chisquare that we calculated above. When all values of one parameter is displayed in a histogram ignoring the values of the other parameter, it is called the marginal posterior distribution. Algebraically, the marginal distribution is obtained by integrating one of the two parameters out over the joint posterior distribution. This step is definitively way easier when simulated values from the posterior distribution are available!
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/jointpdist-1.png" alt="Visualization of the joint posterior distribution for the mean and standard deviation of Snowfinch weights. The lower left panel shows the two-dimensional joint posterior distribution, whereas the upper and right panel show the marginal posterior distributions of each parameter separately." width="672" />
<p class="caption">(\#fig:jointpdist)Visualization of the joint posterior distribution for the mean and standard deviation of Snowfinch weights. The lower left panel shows the two-dimensional joint posterior distribution, whereas the upper and right panel show the marginal posterior distributions of each parameter separately.</p>
</div>
The marginal posterior distributions of every parameter is what we normally report in a paper to report statistical uncertainty.
In our example, the marginal distribution of the mean is a t-distribution (Chapter \@ref(distributions)). Frequentist statistical methods also use a t-distribution to describe the uncertainty of an estimated mean for the case when the variance is not known. Thus, frequentist methods came to the same solution using a completely different approach and different techniques. Doesn't that increase dramatically our trust in statistical methods?
## Classical frequentist tests and alternatives
### Nullhypothesis testing
Null hypothesis testing is constructing a model that describes how the data would look like in case of what we expect to be would not be. Then, the data is compared to how the model thinks the data should look like. If the data does not look like the model thinks they should, we reject that model and accept that our expectation may be true.
To decide whether the data looks like the null-model thinks the data should look like the p-value is used. The p-value is the probability of observing the data or more extreme data given the null hypothesis is true.
Small p-values indicate that it is rather unlikely to observe the data or more extreme data given the null hypothesis $H_0$ is true.
Null hypothesis testing is problematic. We discuss some of the problems after having introduces the most commonly used classical tests.
### Comparison of a sample with a fixed value (one-sample t-test)
In some studies, we would like to compare the data to a theoretical value. The theoretical value is a fixed value, e.g. calculated based on physical, biochemical, ecological or any other theory. The statistical task is then to compare the mean of the data including its uncertainty with the theoretical value. The result of such a comparison may be an estimate of the mean of the data with its uncertainty or an estimate of the difference of the mean of the data to the theoretical value with the uncertainty of this difference.
<!-- fk: <NAME>: include a figure that shows the data in a box and the fixed value and the mean with CI, and one only showing data with stars-->
For example, a null hypothesis could be $H_0:$"The mean of Snowfinch weights is exactly 40g."
A normal distribution with a mean of $\mu_0=40$ and a variance equal to the estimated variance in the data $s^2$ is then assumed to describe how we would expect the data to look like given the null hypothesis was true. From that model it is possible to calculate the distribution of hypothetical means of many different hypothetical samples of sample size $n$. The result is a t-distribution (Fig. \@ref(fig:nht)). In classical tests, the distribution is standardized so that its variance was one. Then the sample mean, or in classical tests a standardized difference between the mean and the hypothetical mean of the null hypothesis (here 40g), called test statistics $t = \frac{\bar{y}-\mu_0}{\frac{s}{\sqrt{n}}}$, is compared to that (standardized) t-distribution. If the test statistics falls well within the expected distribution the null hypothesis is accepted. Then, the data is well compatible with the null hypothesis. However, if the test statistics falls in the tails or outside the distribution, then the null hypothesis is rejected and we could write that the mean weight of Snowfinches is statistically significantly different from 40g. Unfortunately, we cannot infer about the probability of the null hypothesis and how relevant the result is.
<div class="figure" style="text-align: center">
<img src="1.1-prerequisites_files/figure-html/nht-1.png" alt="Illustration of a one-sample t-test. The blue histogram shows the distribution of the measured weights with the sample mean (lightblue) indicated as a vertical line. The black line is the t-distribution that shows how hypothetical sample means are expected to be distributed if the big population of Snowfinches has a mean weight of 40g (i.e., if the null hypothesis was true). Orange area shows the area of the t-distribution that lays equal or farther away from 40g than the sample mean. The orange area is the p-value." width="768" />
<p class="caption">(\#fig:nht)Illustration of a one-sample t-test. The blue histogram shows the distribution of the measured weights with the sample mean (lightblue) indicated as a vertical line. The black line is the t-distribution that shows how hypothetical sample means are expected to be distributed if the big population of Snowfinches has a mean weight of 40g (i.e., if the null hypothesis was true). Orange area shows the area of the t-distribution that lays equal or farther away from 40g than the sample mean. The orange area is the p-value.</p>
</div>
We can use the r-function `t.test` to calculate the p-value of a one sample t-test.
```r
t.test(y, mu=40)
```
```
##
## One Sample t-test
##
## data: y
## t = 3.0951, df = 7, p-value = 0.01744
## alternative hypothesis: true mean is not equal to 40
## 95 percent confidence interval:
## 40.89979 46.72521
## sample estimates:
## mean of x
## 43.8125
```
The output of the r-function `t.test` also includes the mean and the 95% confidence interval (or compatibility or uncertainty interval) of the mean. The CI could, alternatively, be obtained as the 2.5% and 97.5% quantiles of a t-distribution with a mean equal to the sample mean, degrees of freedom equal to the sample size minus one and a standard deviation equal to the standard error of the mean.
```r
# lower limit of 95% CI
mean(y) + qt(0.025, df=length(y)-1)*sd(y)/sqrt(n)
```
```
## [1] 40.89979
```
```r
# upper limit of 95% CI
mean(y) + qt(0.975, df=length(y)-1)*sd(y)/sqrt(n)
```
```
## [1] 46.72521
```
When applying the Bayes theorem for obtaining the posterior distribution of the mean we end up with the same t-distribution as described above, in case we use flat prior distributions for the mean and the standard deviation. Thus, the two different approaches end up with the same result!
```r
par(mar=c(4.5, 5, 2, 2))
hist(y, col="blue", xlim=c(30,52), las=1, freq=FALSE, main=NA, ylim=c(0, 0.3))
abline(v=mean(y), lwd=2, col="lightblue")
abline(v=40, lwd=2)
lines(density(bsim@coef))
text(45, 0.3, "posterior distribution\nof the mean of y", cex=0.8, adj=c(0,1), xpd=NA)
```
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/unnamed-chunk-8-1.png" alt="Illustration of the posterior distribution of the mean. The blue histogram shows the distribution of the measured weights with the sample mean (lightblue) indicated as a vertical line. The black line is the posterior distribution that shows what we know about the mean after having looked at the data. The area under the posterior density function that is larger than 40 is the posterior probability of the hypothesis that the true mean Snwofinch weight is larger than 40g." width="672" />
<p class="caption">(\#fig:unnamed-chunk-8)Illustration of the posterior distribution of the mean. The blue histogram shows the distribution of the measured weights with the sample mean (lightblue) indicated as a vertical line. The black line is the posterior distribution that shows what we know about the mean after having looked at the data. The area under the posterior density function that is larger than 40 is the posterior probability of the hypothesis that the true mean Snwofinch weight is larger than 40g.</p>
</div>
The posterior probability of the hypothesis that the true mean Snowfinch weight is larger than 40g, $P(H:\mu>40) =$, is equal to the proportion of simulated random values from the posterior distribution, saved in the vector `bsim@coef`, that are larger than 40.
```r
# Two ways of calculating the proportion of values
# larger than a specific value within a vector of values
round(sum(bsim@coef[,1]>40)/nrow(bsim@coef),2)
```
```
## [1] 0.99
```
```r
round(mean(bsim@coef[,1]>40),2)
```
```
## [1] 0.99
```
```r
# Note: logical values TRUE and FALSE become
# the numeric values 1 and 0 within the functions sum() and mean()
```
We, thus, can be pretty sure that the mean Snowfinch weight (in the big world population) is larger than 40g. Such a conclusion is not very informative, because it does not tell us how much larger we can expect the mean Snowfinch weight to be. Therefore, we prefer reporting a credible interval (or compatibility interval or uncertainty interval) that tells us what values for the mean Snowfinch weight are compatible with the data (given the data model we used realistically reflects the data generating process). Based on such an interval, we can conclude that we are pretty sure that the mean Snowfinch weight is between 40 and 48g.
```r
# 80% credible interval, compatibility interval, uncertainty interval
quantile(bsim@coef[,1], probs=c(0.1, 0.9))
```
```
## 10% 90%
## 42.07725 45.54080
```
```r
# 95% credible interval, compatibility interval, uncertainty interval
quantile(bsim@coef[,1], probs=c(0.025, 0.975))
```
```
## 2.5% 97.5%
## 40.90717 46.69152
```
```r
# 99% credible interval, compatibility interval, uncertainty interval
quantile(bsim@coef[,1], probs=c(0.005, 0.995))
```
```
## 0.5% 99.5%
## 39.66181 48.10269
```
### Comparison of the locations between two groups (two-sample t-test)
Many research questions aim at measuring differences between groups. For example, we could be curious to know how different in size car owner are from people not owning a car.
A boxplot is a nice possibility to visualize the ell length measurements of two (or more) groups (Fig. \@ref(fig:boxplt)). From the boxplot, we do not see how many observations are in the two samples. We can add that information to the plot. The boxplot visualizes the samples but it does not show what we know about the big (unmeasured) population mean. To show that, we need to add a compatibility interval (or uncertainty interval, credible interval, confidence interval, in brown in Fig. \@ref(fig:boxplt)).
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/boxplt-1.png" alt="Ell length of car owners (Y) and people not owning a car (N). Horizontal bar = median, box = interquartile range, whiskers = extremest observation within 1.5 times the interquartile range from the quartile, circles=observations farther than 1.5 times the interquartile range from the quartile. Filled brown circles = means, vertical brown bars = 95% compatibility interval." width="672" />
<p class="caption">(\#fig:boxplt)Ell length of car owners (Y) and people not owning a car (N). Horizontal bar = median, box = interquartile range, whiskers = extremest observation within 1.5 times the interquartile range from the quartile, circles=observations farther than 1.5 times the interquartile range from the quartile. Filled brown circles = means, vertical brown bars = 95% compatibility interval.</p>
</div>
When we added the two means with a compatibility interval, we see what we know about the two means, but we do still not see what we know about the difference between the two means. The uncertainties of the means do not show the uncertainty of the difference between the means. To do so, we need to extract the difference between the two means from a model that describes (abstractly) how the data has been generated. Such a model is a linear model that we will introduce in Chapter \@ref(lm). The second parameter measures the differences in the means of the two groups. And from the simulated posterior distribution we can extract a 95% compatibility interval.
Thus, we can conclude that the average ell length of car owners is with high probability between 0.5 cm smaller and 2.5 cm larger than the averag ell of people not having a car.
```r
mod <- lm(ell~car, data=dat)
mod
```
```
##
## Call:
## lm(formula = ell ~ car, data = dat)
##
## Coefficients:
## (Intercept) carY
## 43.267 1.019
```
```r
bsim <- sim(mod, n.sim=nsim)
quantile(bsim@coef[,"carY"], prob=c(0.025, 0.5, 0.975))
```
```
## 2.5% 50% 97.5%
## -0.501348 1.014478 2.494324
```
The corresponding two-sample t-test gives a p-value for the null hypothesis: "The difference between the two means equals zero.", a confidence interval for the difference and the two means. While the function `lm`gives the difference Y minus N, the function `t.test`gives the difference N minus Y. Luckily the two means are also given in the output, so we know which group mean is the larger one.
```r
t.test(ell~car, data=dat, var.equal=TRUE)
```
```
##
## Two Sample t-test
##
## data: ell by car
## t = -1.4317, df = 20, p-value = 0.1677
## alternative hypothesis: true difference in means between group N and group Y is not equal to 0
## 95 percent confidence interval:
## -2.5038207 0.4657255
## sample estimates:
## mean in group N mean in group Y
## 43.26667 44.28571
```
In both possibilities, we used to compare the to means, the Bayesian posterior distribution of the difference and the t-test or the confidence interval of the difference, we used a data model. We thus assumed that the observations are normally distributed. In some cases, such an assumption is not a reasonable assumption. Then the result is not reliable. In such cases, we can either search for a more realistic model or use non-parametric (also called distribution free) methods. Nowadays, we have almost infinite possibilities to construct data models (e.g. generalized linear models and beyond). Therefore, we normally start looking for a model that fits the data better. However, in former days, all these possiblities did not exist (or were not easily available for non-mathematicians). Therefore, we here introduce two of such non-parametric methods, the Wilcoxon-test (or Mann-Whitney-U-test) and the randomisation test.
Some of the distribution free statistical methods are based on the rank instead of the value of the observations. The principle of the Wilcoxon-test is to rank the observations and sum the ranks per group. It is not completely true that the non-parametric methods do not have a model. The model of the Wilcoxon-test "knows" how the difference in the sum of the ranks between two groups is distributed given the mean of the two groups do not differ (null hypothesis). Therefore, it is possible to get a p-value, e.g. by the function `wilcox.test`.
```r
wilcox.test(ell~car, data=dat)
```
```
##
## Wilcoxon rank sum test with continuity correction
##
## data: ell by car
## W = 34.5, p-value = 0.2075
## alternative hypothesis: true location shift is not equal to 0
```
The note in the output tells us that ranking is ambiguous, when some values are equal. Equal values are called ties when they should be ranked.
The result of the Wilcoxon-test tells us how probable it is to observe the difference in the rank sum between the two sample or a more extreme difference given the means of the two groups are equal. That is at least something.
A similar result is obtained by using a randomisation test. This test is not based on ranks but on the original values. The aim of the randomisation is to simulate a distribution of the difference in the arithmetic mean between the two groups assuming this difference would be zero. To do so, the observed values are randomly distributed among the two groups. Because of the random distribution among the two groups, we expect that, if we repeat that virtual experiment many times, the average difference between the group means would be zero (both virtual samples are drawn from the same big population).
We can use a loop in R for repeating the random re-assignement to the two groups and, each time, extracting the difference between the group means. As a result, we have a vector of many (`nsim`) values that all are possible differences between group means given the two samples were drawn from the same population. The proportion of these values that have an equal or larger absolute value give the probability that the observed or a larger difference between the group means is observed given the null hypothesis would be true, thus that is a p-value.
```r
diffH0 <- numeric(nsim)
for(i in 1:nsim){
randomcars <- sample(dat$car)
rmod <- lm(ell~randomcars, data=dat)
diffH0[i] <- coef(rmod)["randomcarsY"]
}
mean(abs(diffH0)>abs(coef(mod)["carY"])) # p-value
```
```
## [1] 0.1858
```
Visualizing the possible differences between the group means given the null hypothesis was true shows that the observed difference is well within what is expected if the two groups would not differ in their means (Fig. \@ref(fig:ranhist)).
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/ranhist-1.png" alt="Histogram if differences between the means of randomly assigned groups (grey) and the difference between the means of the two observed groups (red)" width="672" />
<p class="caption">(\#fig:ranhist)Histogram if differences between the means of randomly assigned groups (grey) and the difference between the means of the two observed groups (red)</p>
</div>
The randomization test results in a p-value and, we could also report the observed difference between the group means. However, it does not tell us, what values of the difference all would be compatible with the data. We do not get an uncertainty measurement for the difference.
In order to get a compatibility interval without assuming a distribution for the data (thus non-parametric) we could bootstrap the samples.
Bootstrapping is sampling observations from the data with replacement. For example, if we have a sample of 8 observations, we draw 8 times a random observation from the 8 observation. Each time, we assume that all 8 observations are available. Thus a bootstrapped sample could include some observations several times, whereas others are missing. In this way, we simulate the variance in the data that is due to the fact that our data do not contain the whole big population.
Also bootstrapping can be programmed in R using a loop.
```r
diffboot <- numeric(1000)
for(i in 1:nsim){
ngroups <- 1
while(ngroups==1){
bootrows <- sample(1:nrow(dat), replace=TRUE)
ngroups <- length(unique(dat$car[bootrows]))
}
rmod <- lm(ell~car, data=dat[bootrows,])
diffboot[i] <- coef(rmod)[2]
}
quantile(diffboot, prob=c(0.025, 0.975))
```
```
## 2.5% 97.5%
## -0.3395643 2.4273810
```
The resulting values for the difference between the two group means can be interpreted as the distribution of those differences, if we had repeated the study many times (Fig. \@ref(fig:histboot)).
A 95% interval of the distribution corresponds to a 95% compatibility interval (or confidence interval or uncertainty interval).
```r
hist(diffboot); abline(v=coef(mod)[2], lwd=2, col="red")
```
<div class="figure">
<img src="1.1-prerequisites_files/figure-html/histboot-1.png" alt="Histogram of the boostrapped differences between the group means (grey) and the observed difference." width="672" />
<p class="caption">(\#fig:histboot)Histogram of the boostrapped differences between the group means (grey) and the observed difference.</p>
</div>
For both methods, randomisation test and bootstrapping, we have to assume that all observations are independent. Randomization and bootstrapping becomes complicated or even unfeasible when data are structured.
## Summary
Bayesian data analysis is applying the Bayes theorem for summarizing knowledge based on data, priors and the model assumptions.
Frequentist statistics is quantifying uncertainty by hypothetical repetitions.
<file_sep>
# Capture-mark recapture model with a mixture structure to account for missing sex-variable for parts of the individuals {#cjs_with_mix}
## Introduction
In some species the identification of the sex is not possible for all individuals without sampling DNA. For example, morphological dimorphism is absent or so weak that parts of the individuals cannot be assigned to one of the sexes. Particularly in ornithological long-term capture recapture data sets that typically are obtained by voluntary bird ringers who do normaly not have the possibilities to analyse DNA, often the sex identification is missing in parts of the individuals. For estimating survival, it would nevertheless be valuable to include data of all individuals, use the information on sex-specific effects on survival wherever possible but account for the fact that of parts of the individuals the sex is not known. We here explain how a Cormack-Jolly-Seber model can be integrated with a mixture model in oder to allow for a combined analyses of individuals with and without sex identified.
An introduction to the Cormack-Jolly-Seber model we gave in Chapter 14.5 of the book @KornerNievergelt2015. We here expand this model by a mixture structure that allows including individuals with a missing categorical predictor variable, such as sex.
## Data description
```{r datasim, echo=TRUE}
## simulate data
# true parameter values
theta <- 0.6 # proportion of males
nocc <- 15 # number of years in the data set
b0 <- matrix(NA, ncol=nocc-1, nrow=2)
b0[1,] <- rbeta((nocc-1), 3, 4) # capture probability of males
b0[2,] <- rbeta((nocc-1), 2, 4) # capture probability of females
a0 <- matrix(NA, ncol=2, nrow=2)
a1 <- matrix(NA, ncol=2, nrow=2)
a0[1,1]<- qlogis(0.7) # average annual survival for adult males
a0[1,2]<- qlogis(0.3) # average annual survival for juveniles
a0[2,1] <- qlogis(0.55) # average annual survival for adult females
a0[2,2] <- a0[1,2]
a1[1,1] <- 0
a1[1,2] <- -0.5
a1[2,1] <- -0.8
a1[2,2] <- a1[1,2]
nindi <- 1000 # number of individuals with identified sex
nindni <- 1500 # number of individuals with non-identified sex
nind <- nindi + nindni # total number of individuals
y <- matrix(ncol=nocc, nrow=nind)
z <- matrix(ncol=nocc, nrow=nind)
first <- sample(1:(nocc-1), nind, replace=TRUE)
sex <- sample(c(1,2), nind, prob=c(theta, 1-theta), replace=TRUE)
juvfirst <- sample(c(0,1), nind, prob=c(0.5, 0.5), replace=TRUE)
juv <- matrix(0, nrow=nind, ncol=nocc)
for(i in 1:nind) juv[i,first[i]] <- juv[i]
x <- runif(nocc-1, -2, 2) # a time dependent covariate covariate
p <- b0 # recapture probability
phi <- array(NA, dim=c(2, 2, nocc-1))
# for ad males
phi[1,1,] <- plogis(a0[1,1]+a1[1,1]*x)
# for ad females
phi[2,1,] <- plogis(a0[2,1]+a1[2,1]*x)
# for juvs
phi[1,2,] <- phi[2,2,] <- plogis(a0[2,2]+a1[2,2]*x)
for(i in 1:nind){
z[i,first[i]] <- 1
y[i, first[i]] <- 1
for(t in (first[i]+1):nocc){
z[i, t] <- rbinom(1, size=1, prob=z[i,t-1]*phi[sex[i],juv[i,t-1]+1, t-1])
y[i, t] <- rbinom(1, size=1, prob=z[i,t]*p[sex[i],t-1])
}
}
y[is.na(y)] <- 0
```
The mark-recapture data set consists of capture histories of `r round(nind)` individuals over `r round(nocc)` time periods. For each time period $t$ and individual $i$ the capture history matrix $y$ contains $y_{it}=1$ if the individual $i$ is captured during time period $t$, or $y_{it}=0$ if the individual $i$ is not captured during time period $t$. The marking time period varies between individuals from 1 to `r round(nocc-1)`. At the marking time period, the age of the individuals was classified either as juvenile or as adult. Juveniles turn into adults after one time period, thus age is known for all individuals during all time periods after marking. For `r round(nindi)` individuals of the `r round(nind)` individuals, the sex is identified, whereas for `r round(nindni)` individuals, the sex is unknown. The example data contain one covariate $x$ that takes on one value for each time period.
```{r}
# bundle the data for Stan
i <- 1:nindi
ni <- (nindi+1):nind
datax <- list(yi=y[i,], nindi=nindi, sex=sex[i], nocc=nocc,
yni=y[ni,], nindni=nindni, firsti=first[i], firstni=first[ni],
juvi=juv[i,]+1, juvni=juv[ni,]+1, year=1:nocc, x=x)
```
## Model description
The observations $y_{it}$, an indicator of whether individual i was recaptured during time period $t$ is modelled conditional on the latent true state of the individual birds $z_{it}$ (0 = dead or permanently emigrated, 1 = alive and at the study site) as a Bernoulli variable. The probability $P(y_{it} = 1)$ is the product of the probability that an alive individual is recaptured, $p_{it}$, and the state of the bird $z_{it}$ (alive = 1, dead = 0). Thus, a dead bird cannot be recaptured, whereas for a bird alive during time period $t$, the recapture probability equals $p_{it}$:
$$y_{it} \sim Bernoulli(z_{it}p_{it})$$
The latent state variable $z_{it}$ is a Markovian variable with the state at time $t$ being dependent on the state at time $t-1$ and the apparent survival probability $$\phi_{it}$$:
$$z_{it} \sim Bernoulli(z_{it-1}\phi_{it})$$
We use the term apparent survival in order to indicate that the parameter $\phi$ is a product of site fidelity and survival. Thus, individuals that permanently emigrated from the study area cannot be distinguished from dead individuals.
In both models, the parameters $\phi$ and $p$ were modelled as sex-specific. However, for parts of the individuals, sex could not be identified, i.e. sex was missing. Ignoring these missing values would most likely lead to a bias because they were not missing at random. The probability that sex can be identified is increasing with age and most likely differs between sexes. Therefore, we included a mixture model for the sex:
$$Sex_i \sim Categorical(q_i)$$
where $q_i$ is a vector of length 2, containing the probability of being a male and a female, respectively. In this way, the sex of the non-identified individuals was assumed to be male or female with probability $q[1]$ and $q[2]=1-q[1]$, respectively. This model corresponds to the finite mixture model introduced by @Pledger2003 in order to account for unknown classes of birds (heterogeneity). However, in our case, for parts of the individuals the class (sex) was known.
In the example model, we constrain apparent survival to be linearly dependent on a covariate x with different slopes for males, females and juveniles using the logit link function.
$$logit(\phi_{it}) = a0_{sex-age-class[it]} + a1_{sex-age-class[it]}x_i$$
Annual recapture probability was modelled for each year and age and sex class independently:
$$p_{it} = b0_{t,sex-age-class[it]}$$
Uniform prior distributions were used for all parameters with a parameter space limited to values between 0 and 1 (probabilities) and a normal distribution with a mean of 0 and a standard deviation of 1.5 for the intercept $a0$, and a standard deviation of 5 was used for $a1$.
## The Stan code
The trick for coding the CMR-mixture model in Stan is to formulate the model 3 times:
1. For the individuals with identified sex
2. For the males that were not identified
3. For the females that were not identified
Then for the non-identified individuals a mixture model is formulated that assigns a probability of being a female or a male to each individual.
```{r engine='cat', engine.opts=list(file="stanmodels/cmr_mixture_model.stan",lang="stan")}
data {
int<lower=2> nocc; // number of capture events
int<lower=0> nindi; // number of individuals with identified sex
int<lower=0> nindni; // number of individuals with non-identified sex
int<lower=0,upper=2> yi[nindi,nocc]; // CH[i,k]: individual i captured at k
int<lower=0,upper=nocc-1> firsti[nindi]; // year of first capture
int<lower=0,upper=2> yni[nindni,nocc]; // CH[i,k]: individual i captured at k
int<lower=0,upper=nocc-1> firstni[nindni]; // year of first capture
int<lower=1, upper=2> sex[nindi];
int<lower=1, upper=2> juvi[nindi, nocc];
int<lower=1, upper=2> juvni[nindni, nocc];
int<lower=1> year[nocc];
real x[nocc-1]; // a covariate
}
transformed data {
int<lower=0,upper=nocc+1> lasti[nindi]; // last[i]: ind i last capture
int<lower=0,upper=nocc+1> lastni[nindni]; // last[i]: ind i last capture
lasti = rep_array(0,nindi);
lastni = rep_array(0,nindni);
for (i in 1:nindi) {
for (k in firsti[i]:nocc) {
if (yi[i,k] == 1) {
if (k > lasti[i]) lasti[i] = k;
}
}
}
for (ii in 1:nindni) {
for (kk in firstni[ii]:nocc) {
if (yni[ii,kk] == 1) {
if (kk > lastni[ii]) lastni[ii] = kk;
}
}
}
}
parameters {
real<lower=0, upper=1> theta[nindni]; // probability of being male for non-identified individuals
real<lower=0, upper=1> b0[2,nocc-1]; // intercept of p
real a0[2,2]; // intercept for phi
real a1[2,2]; // coefficient for phi
}
transformed parameters {
real<lower=0,upper=1>p_male[nindni,nocc]; // capture probability
real<lower=0,upper=1>p_female[nindni,nocc]; // capture probability
real<lower=0,upper=1>p[nindi,nocc]; // capture probability
real<lower=0,upper=1>phi_male[nindni,nocc-1]; // survival probability
real<lower=0,upper=1>chi_male[nindni,nocc+1]; // probability that an individual
// is never recaptured after its
// last capture
real<lower=0,upper=1>phi_female[nindni,nocc-1]; // survival probability
real<lower=0,upper=1>chi_female[nindni,nocc+1]; // probability that an individual
// is never recaptured after its
// last capture
real<lower=0,upper=1>phi[nindi,nocc-1]; // survival probability
real<lower=0,upper=1>chi[nindi,nocc+1]; // probability that an individual
// is never recaptured after its
// last capture
{
int k;
int kk;
for(ii in 1:nindi){
if (firsti[ii]>1) {
for (z in 1:(firsti[ii]-1)){
phi[ii,z] = 1;
}
}
for(tt in firsti[ii]:(nocc-1)) {
// linear predictor for phi:
phi[ii,tt] = inv_logit(a0[sex[ii], juvi[ii,tt]] + a1[sex[ii], juvi[ii,tt]]*x[tt]);
}
}
for(ii in 1:nindni){
if (firstni[ii]>1) {
for (z in 1:(firstni[ii]-1)){
phi_female[ii,z] = 1;
phi_male[ii,z] = 1;
}
}
for(tt in firstni[ii]:(nocc-1)) {
// linear predictor for phi:
phi_male[ii,tt] = inv_logit(a0[1, juvni[ii,tt]] + a1[1, juvni[ii,tt]]*x[tt]);
phi_female[ii,tt] = inv_logit(a0[2, juvni[ii,tt]]+ a1[2, juvni[ii,tt]]*x[tt]);
}
}
for(i in 1:nindi) {
// linear predictor for p for identified individuals
for(w in 1:firsti[i]){
p[i,w] = 1;
}
for(kkk in (firsti[i]+1):nocc)
p[i,kkk] = b0[sex[i],year[kkk-1]];
chi[i,nocc+1] = 1.0;
k = nocc;
while (k > firsti[i]) {
chi[i,k] = (1 - phi[i,k-1]) + phi[i,k-1] * (1 - p[i,k]) * chi[i,k+1];
k = k - 1;
}
if (firsti[i]>1) {
for (u in 1:(firsti[i]-1)){
chi[i,u] = 0;
}
}
chi[i,firsti[i]] = (1 - p[i,firsti[i]]) * chi[i,firsti[i]+1];
}// close definition of transformed parameters for identified individuals
for(i in 1:nindni) {
// linear predictor for p for non-identified individuals
for(w in 1:firstni[i]){
p_male[i,w] = 1;
p_female[i,w] = 1;
}
for(kkkk in (firstni[i]+1):nocc){
p_male[i,kkkk] = b0[1,year[kkkk-1]];
p_female[i,kkkk] = b0[2,year[kkkk-1]];
}
chi_male[i,nocc+1] = 1.0;
chi_female[i,nocc+1] = 1.0;
k = nocc;
while (k > firstni[i]) {
chi_male[i,k] = (1 - phi_male[i,k-1]) + phi_male[i,k-1] * (1 - p_male[i,k]) * chi_male[i,k+1];
chi_female[i,k] = (1 - phi_female[i,k-1]) + phi_female[i,k-1] * (1 - p_female[i,k]) * chi_female[i,k+1];
k = k - 1;
}
if (firstni[i]>1) {
for (u in 1:(firstni[i]-1)){
chi_male[i,u] = 0;
chi_female[i,u] = 0;
}
}
chi_male[i,firstni[i]] = (1 - p_male[i,firstni[i]]) * chi_male[i,firstni[i]+1];
chi_female[i,firstni[i]] = (1 - p_female[i,firstni[i]]) * chi_female[i,firstni[i]+1];
} // close definition of transformed parameters for non-identified individuals
} // close block of transformed parameters exclusive parameter declarations
} // close transformed parameters
model {
// priors
theta ~ beta(1, 1);
for (g in 1:(nocc-1)){
b0[1,g]~beta(1,1);
b0[2,g]~beta(1,1);
}
a0[1,1]~normal(0,1.5);
a0[1,2]~normal(0,1.5);
a1[1,1]~normal(0,3);
a1[1,2]~normal(0,3);
a0[2,1]~normal(0,1.5);
a0[2,2]~normal(a0[1,2],0.01); // for juveniles, we assume that the effect of the covariate is independet of sex
a1[2,1]~normal(0,3);
a1[2,2]~normal(a1[1,2],0.01);
// likelihood for identified individuals
for (i in 1:nindi) {
if (lasti[i]>0) {
for (k in firsti[i]:lasti[i]) {
if(k>1) target+= (log(phi[i, k-1]));
if (yi[i,k] == 1) target+=(log(p[i,k]));
else target+=(log1m(p[i,k]));
}
}
target+=(log(chi[i,lasti[i]+1]));
}
// likelihood for non-identified individuals
for (i in 1:nindni) {
real log_like_male = 0;
real log_like_female = 0;
if (lastni[i]>0) {
for (k in firstni[i]:lastni[i]) {
if(k>1){
log_like_male += (log(phi_male[i, k-1]));
log_like_female += (log(phi_female[i, k-1]));
}
if (yni[i,k] == 1){
log_like_male+=(log(p_male[i,k]));
log_like_female+=(log(p_female[i,k]));
}
else{
log_like_male+=(log1m(p_male[i,k]));
log_like_female+=(log1m(p_female[i,k]));
}
}
}
log_like_male += (log(chi_male[i,lastni[i]+1]));
log_like_female += (log(chi_female[i,lastni[i]+1]));
target += log_mix(theta[i], log_like_male, log_like_female);
}
}
```
## Call Stan from R, check convergence and look at results
```{r runstan, eval=FALSE}
# Run STAN
library(rstan)
fit <- stan(file = "stanmodels/cmr_mixture_model.stan", data=datax, verbose = FALSE)
# for above simulated data (25000 individuals x 15 time periods)
# computing time is around 48 hours on an intel corei7 laptop
# for larger data sets, we recommed moving the transformed parameters block
# to the model block in order to avoid monitoring of p_male, p_female,
# phi_male and phi_female producing memory problems
# launch_shinystan(fit) # diagnostic plots
summary(fit)
```
```{r savefit, eval=FALSE, echo=FALSE}
#save(sumfit2, file="stanmodels/summaryfit_cmrmix.rda")
```
```{r loadfit, echo=FALSE}
load("stanmodels/summaryfit_cmrmix.rda")
sumfit2
```
<file_sep># (PART) BAYESIAN DATA ANALYSIS {-}
# Introduction to PART II {#PART-II}
```{r fig.align='center', echo=FALSE, fig.link=''}
knitr::include_graphics('images/part_II.jpg', dpi = 150)
```
------
## Further reading {-}
A really good introductory book to Bayesian data analyses is [@McElreath2016]. This book starts with a thorough introduction to applying the Bayes theorem for drawing inference from data. In addition, it carefully discusses what can and what cannot be concluded from statistical results. We like this very much.
We like looking up statistical methods in papers and books written by <NAME> [e.g. @Gelman2014] and <NAME> (e.g. [@Hastie2009, @Efron2016]) because both explain complicated things in a concise and understandable way.
<file_sep>
# Generalized linear models {#glm}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Introduction
## Summary
xxx
<file_sep>
# Modeling spatial data using GLMM {#spatial_glmm}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Introduction
## Summary
xxx
<file_sep># Bayesian Data Analysis in Ecology with R and Stan
This repository mainly contains the files to produce the online book "Data Analysis in Ecology with R and Stan".
## Content
1. **Top level of the repository**
Te top level of the repository mainly contains the files that are used by [bookdown](https://bookdown.org/yihui/bookdown/) to build the online book. These mainly include the `.yml` files for different settings of the book and the `.Rmd` files that contain the content of the book with one such file for each chapter of the book.
2. **admin-Folder**
Contains administrative notes.
3. **docs-Folder**
This folder contains all the files that are used to produced the page that ist published under https://tobiasroth.github.io/BDAEcology. Note that if the book is locally built using [bookdown](https://bookdown.org/yihui/bookdown/) the resulting files for the html version are saved in the folder `docs_local` . Whenever we added new content that we are happy with, we manually copy the content of `docs_local` to `docs` and thus publish the new content. As a consequence, https://tobiasroth.github.io/BDAEcology may correspond alder versions of the `.Rmd` files at the top level of the repository.
4. **future_chapters-Folder**
This folder contains the `.Rmd` files for chapters that we may eventually include into the book. Currently, these chapters are mostly under construction.
5. **images**
Contains pictures that are used in the book. Note that this folder does not include the figures produced by the `.Rmd` files.
6. **RData-Folder**
This folder contains some of the data that we use in the book. The data files are storred as `.RData` files that can be directly loaded into R using the funtion `load()`. By convention the name of the file is also the name of the R-object that will be loaded. E.g. using `load("RData/elevation.RData")` will load a tibble named elevation. Note, that most of the data-files that we use in the book are instead available from the R-package [`blmeco`](https://github.com/fraenzi/blmeco).
7. **references-Folder**
Contains the `.bib` files that contain the data-base of references that we partly used in the book. The file `References_fk.bib` is the export of the data-base maintained by Fränzi. This file should not be changed. Additional references or references that you like to improve you should add to `References_new.bib`.
8. **settings-Folder**
9. **stanmodels-Folder**
Contains the stan model description for all the models used in the book.
## How to contribute
In order to contribute you need to [join GitHub](https://github.com/join).
You can contribute in several ways:
- To make a general comment or add a wish for new content you can add an [issue](https://github.com/TobiasRoth/BDAEcology/issues).
- The second way is to contribute content directly through the edit button at the top of the page (i.e. a symbol showing a pencil in a square). That button is linked to the rmarkdown source file of each page. You can correct typos or add new text and then submit a [GitHub pull request](https://help.github.com/articles/about-pull-requests/).
- You can download the entire repository to your local computer, improve the text or R code, run the code to test and as soon you are happy with the improvement submit the entire change as a pull request.
We try to respond to you as quickly as possible. We are looking forward to your contribution!
## Contributors
Thank a lot to the [people that contributed to this book](https://github.com/TobiasRoth/BDAEcology/graphs/contributors). We also like to acknowledge the following people that used other means than GitHub to contribute to this book:
- <NAME>
## Dependencies
- The main data files we use as examples in the book are contained in the R-Package `blmeco` available from cran or from https://github.com/fraenzi/blmeco. <file_sep>
# Reproducible research {#reproducibleresearch}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Summary
## Further reading
- [Rmarkdown](https://bookdown.org/yihui/rmarkdown/): The first official book authored by the core R Markdown developers that provides a comprehensive and accurate reference to the R Markdown ecosystem. With R Markdown, you can easily create reproducible data analysis reports, presentations, dashboards, interactive applications, books, dissertations, websites, and journal articles, while enjoying the simplicity of Markdown and the great power of R and other languages.
- [Bookdown by <NAME>](https://bookdown.org/yihui/bookdown/): A guide to authoring books with R Markdown, including how to generate figures and tables, and insert cross-references, citations, HTML widgets, and Shiny apps in R Markdown. The book can be exported to HTML, PDF, and e-books (e.g. EPUB). The book style is customizable. You can easily write and preview the book in RStudio IDE or other editors, and host the book wherever you want (e.g. bookdown.org). Our book is written using bookdown.
<file_sep># Beschlüsse / Anleitungen
Hier schreiben wir verschiedene Beschlüsse und Anleitungen auf.
## Grafiken
- Wir verwenden `ggplot` für Standardplots, base für spezielle Plots.
## Textverweise
- Im Text verwenden wir immer die Abkürzung `Fig.`, wenn wir auf eine Abbildung verweisen. Beispiele:
- In Fig. 1 we show ...
- This is how it is (Fig. 1).
- Im Text verwenden wir immer die Abkürzung `Tab.`, wenn wir auf eine Tabelle verweisen. Beispiele:
- In Tab. 1 we show ...
- This is how it is (Tab. 1).
## Formeln
Wir versuchen die Bezeichnungen der Parameter möglichst einheitlich zu verwenden:
- *Estimates*: Wir verwenden ^ (hat) wenn wir das Estimate von einem Parameter meinen. Zum Beispiel wenn der Mittelwert aus den Daten als Schätzung für den Populations-Mean gebraucht wird (mu^ = mean(der Daten), mu = wahrere Mittelwert der Grundgesamtheit) oder wenn wir eine Schätzung angeben $\hat{beta} = 0.34$).
$\hat{y}$ sollten wir vermeiden, da die Beobachtung ja bekannt ist und nicht geschätzt wird.
## Externe Daten
- Datenfiles werden, wenn möglich, im Package blmeco abgelegt
## Referenzen
Aus `References_fk.bib` (aus Citavi fk) verwenden. Neue Referenzen in `References_neu.bib`.
<file_sep># (PART) APPENDICES {-}
# Referenzen {-}
<file_sep># Zero-inflated Poisson Mixed Model {#zeroinflated-poisson-lmm}
## Introduction
Usually we describe the outcome variable with a single distribution, such as the normal distribution in the case of linear (mixed) models, and Poisson or binomial distributions in the case of generalized linear (mixed) models. In life sciences, however, quite often the data are actually generated by more than one process. In such cases the distribution of the data could be the result of two or more different distributions. If we do not account for these different processes our inferences are likely to be biased. In this chapter, we introduce a mixture model that explicitly include two processes that generated the data. The zero-inflated Poisson model is a mixture of a binomial and a Poisson distribution. We belief that two (or more)-level models are very useful tools in life sciences because they can help uncover the different processes that generate the data we observe.
## Example data
We used the `blackstork` data from the `blmeco`-package. They contain the breeding success of Black-stork in Latvia. The data was collected and kindly provided by <NAME>. The data contains the number of nestlings of more then 300 [Black-stork](https://en.wikipedia.org/wiki/Black_stork) nests in different years.
Counting animals or plants is a typical example of data that contain a lot of zero counts. For example, the number of nestlings produced by a breeding pair is often zero because the whole nest was depredated or because a catastrophic event occurred such as a flood. However, when the nest succeeds, the number of nestlings varies among the successful nests depending on how many eggs the female has laid, how much food the parents could bring to the nest, or other factors that affect the survival of a nestling in an intact nest. Thus the factors that determine how many zero counts there are in the data differ from the factors that determine how many nestlings there are, if a nest survives. Count data that are produced by two different processes--one produces the zero counts and the other the variance in the count for the ones that were not zero in the first process--are called zero-inflated data. Histograms of zero-inflated data look bimodal, with one peak at zero (Figure \@ref(fig:histblackstork)).
<div class="figure">
<img src="3.1-zeroinflated-poisson-lmm_files/figure-html/histblackstork-1.png" alt="Histogram of the number of nestlings counted in black stork nests *Ciconia nigra* in Latvia (n = 1130 observations of 279 nests)." width="672" />
<p class="caption">(\#fig:histblackstork)Histogram of the number of nestlings counted in black stork nests *Ciconia nigra* in Latvia (n = 1130 observations of 279 nests).</p>
</div>
## Model
The Poisson distribution does not fit well to such data, because the data contain more zero counts than expected under the Poisson distribution. Mullahy (1986) and Lambert (1992) formulated two different types of models that combine the two processes in one model and therefore account for the zero excess in the data and allow the analysis of the two processes separately.
The hurdle model (Mullahy, 1986) combines a left-truncated count data model (Poisson or negative binomial distribution that only describes the distribution of data larger than zero) with a zero-hurdle model that describes the distribution of the data that are either zero or nonzero. In other words, the hurdle model divides the data into two data subsets, the zero counts and the nonzero counts, and fits two separate models to each subset of the data. To account for this division of the data, the two models assume left truncation (all measurements below 1 are missing in the data) and right censoring (all measurements larger than 1 have the value 1), respectively, in their error distributions. A hurdle model can be fitted in R using the function hurdle from the package pscl (Jackman, 2008). See the tutorial by Zeileis et al. (2008) for an introduction.
In contrast to the hurdle model, the zero-inflated models (Mullahy, 1986; Lambert, 1992) combine a Bernoulli model (zero vs. nonzero) with a conditional Poisson model; conditional on the Bernoulli process being nonzero. Thus this model allows for a mixture of zero counts: some zero counts are zero because the outcome of the Bernoulli process was zero (these zero counts are sometimes called structural zero values), and others are zero because their outcome from the Poisson process was zero. The function `zeroinfl from the package pscl fits zero-inflated models (Zeileis et al., 2008).
The zero-inflated model may seem to reflect the true process that has generated the data closer than the hurdle model. However, sometimes the fit of zero-inflated models is impeded because of high correlation of the model parameters between the zero model and the count model. In such cases, a hurdle model may cause less troubles.
Both functions (hurdle and zeroinfl) from the package pscl do not allow the inclusion of random factors. The functions MCMCglmm from the package MCMCglmm (Hadfield, 2010) and glmmadmb from the package glmmADMB (http://glmmadmb.r-forge.r-project.org/) provide the possibility to account for zero-inflation with a GLMM. However, these functions are not very flexible in the types of zero-inflated models they can fit; for example, glmmadmb only includes a constant proportion of zero values. A zero-inflation model using BUGS is described in Ke ́ry and Schaub (2012). Here we use Stan to fit a zero- inflated model. Once we understand the basic model code, it is easy to add predictors and/or random effects to both the zero and the count model.
The example data contain numbers of nestlings in black stork Ciconia nigra nests in Latvia collected by <NAME> and collaborators at 279 nests be- tween 1979 and 2010. Black storks build solid and large aeries on branches of large trees. The same aerie is used for up to 17 years until it collapses. The black stork population in Latvia has drastically declined over the last decades. Here, we use the nestling data as presented in Figure 14-2 to describe whether the number of black stork nestlings produced in Latvia decreased over time. We use a zero-inflated Poisson model to separately estimate temporal trends for nest survival and the number of nestlings in successful nests. Since the same nests have been measured repeatedly over 1 to 17 years, we add nest ID as a random factor to both models, the Bernoulli and the Poisson model. After the first model fit, we saw that the between-nest variance in the number of nest- lings for the successful nests was close to zero. Therefore, we decide to delete the random effect from the Poisson model. Here is our final model:
zit is a latent (unobserved) variable that takes the values 0 or 1 for each nest i during year t. It indicates a “structural zero”, that is, if zit 1⁄4 1 the number of nestlings yit always is zero, because the expected value in the Poisson model lit(1 zit) becomes zero. If zit 1⁄4 0, the expected value in the Poisson model becomes lit.
To fit this model in Stan, we first write the Stan model code and save it in a separated text-file with name “zeroinfl.stan”.
Here is a handy package:
https://cran.r-project.org/web/packages/GLMMadaptive/vignettes/ZeroInflated_and_TwoPart_Models.html
<file_sep># Data preparation {#datamanip}
## Basic operations
Alle Packete laden `library(tidyverse)` oder nur `library(dplyr)`.
```{r}
dat <- iris %>%
as.tibble() %>%
filter(Sepal.Length > 5) %>%
group_by(Species) %>%
summarise(n = n(),
mittel = mean(Petal.Length))
```
## Joining tables {#joindata}
Beschreiben wie left_join funktioniert.
## Further reading
- [R for Data Science by <NAME> and <NAME>](http://r4ds.had.co.nz): Introduces the tidyverse framwork. It explains how to get data into R, get it into the most useful structure, transform it, visualise it and model it.
<file_sep># run multinomial model in stan
# fit model
library(blmeco)
library(birdring)
library(rstan)
p <- c(0.1, 0.3, 0.6)
N <- rep(40, 20)
y <- rmultinom(20, N, prob=p)
datax <- list(y=t(y), nrow=20, ncol=3, ncolp1= 4)
parameters <- c("p")
str(datax)
head(datax)
test.mod <- stan(file="MR mnl_2_ohneJahrohneSite.stan", data=datax, iter=1000, chains=2)
<file_sep>
# Posterior predictive model checking {#modelchecking}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Introduction
## Summary
xxx
<file_sep># (PART) BASIC STATISTICS FOR ECOLOGISTS {-}
# Introduction to PART I {#PART-I}
```{r fig.align='center', echo=FALSE, fig.link=''}
knitr::include_graphics('images/part_I.jpg', dpi = 150)
```
------
During our courses we are sometimes asked to give an introduction to some R-related stuff covering data analysis, presentation of results or rather specialist topics in ecology. In this part we present collected these introduction and try to keep them updated. This is also a commented collection of R-code that we documented for our own work. We hope this might be useful olso for other readers.
## Further reading
- [R for Data Science by <NAME> and <NAME>](http://r4ds.had.co.nz): Introduces the tidyverse framwork. It explains how to get data into R, get it into the most useful structure, transform it, visualise it and model it.
<file_sep>```{r}
source("settings/settings.R")
```
# Spatial analyses and maps {#rgis}
```{r, echo=FALSE, cache=TRUE}
load("RData/elevation.RData")
data("frogs")
frogs <- frogs %>% as.tibble()
```
- Almost all the things that we can do with traditional geographic information system we can also do within R.
- If we do it in R we get the useful benefits of a script approach that allows for reproducible analyses (see Chapter \@ref(reproducible)) and that can be scaled to many more objects or larger data sets.
- Here we simply introduce the packages and functions that we most often use when working with spatial data.
## Data types
### Raster data {#rasterdata}
Very broadly speaking, we divide spatial data into two categories, raster data and all other types of data for points, lines or polygons. Raster data consists of a grid (i.e. a matrix) where each cell of the grid contains one or several values representing the spatial information. The R-package `raster` is very efficient for raster data.
We can use the function `raster()` to load raster data from files. Most of the common file formats for raster data such as .geotiff, .tif or .grd are supported. However, raster data can also be converted from tables with one column for the x-coordinates, one column for the y-coordinates and one column for the spatial information. The coordinates must be from a regular grid. For example the freely available [topographic data of Switzerland](https://www.bfs.admin.ch/bfs/de/home/dienstleistungen/geostat/geodaten-bundesstatistik/topografie.html) are collected on a 100m x 100m grid. In the following example the tibble `elevations` contains the elevation data from the canton of Aargau and is converted into raster data using the function `rasterFromXYZ()`.
```{r, fig.asp=1, fig.cap="Meter above sea leavel (m) accross the canton Aargau in Switzerland."}
library(raster)
ra <- rasterFromXYZ(elevation)
plot(ra)
```
### Geometry data {#geometrydata}
All geometry data types are composed of points. The spatial location of a point is defined by its x and y coordinates. Using several points one can then define lines (sequence of points connected by straight lines) and polygons (sequence of points that form a closed ring). Points, lines and plygons are the geometries we usually work with. We use the package `sf` to work with geometry data types. Its functions are very efficient to work with all spatial data other than raster data. It also links to GDAL (i.e. a computer software library for reading and writing raster and vector geospatial data formats) and proj.4 (i.e. a library for performing conversions between cartographic projections), which are important tools when woring with different sources of spatial data.
We can used the function `st_read()` to read geometry data from file or database. In the following example, however, we convert the tibble `frogs` into a simple feature collection. The data file `frogs`, formatted as a tibble, contains different columns including the counts, variables that describe the ponds as well as the spatial coordinates of the counts. The simple feature collection looks rather similar to the original tibble, however instead of the x and y colomns it now contains the column geometry. With the simple feature collection we can work pretty much in the same way as we use to work with tibbles. For example we can filter only the data from 2011, select the geometries and plot them on the top of the raster with the elevation across the entire canton of Aargau (see \@ref(rasterdata) for the raster data).
```{r, fig.asp=1, fig.cap="Locations of the ponds where frogs were counted in 2011. The background image shows the elevation (meter above sea level)."}
library(sf)
dat <- frogs %>% st_as_sf(coords = c("x", "y"), crs = 21781)
plot(ra)
dat %>% filter(year == 2011) %>%
st_geometry() %>%
plot(add = TRUE, pch = "x")
```
## Basic functions
In this chapter we shortly describe some functions that we often use when working with spatial data in R.
### Coordinate systems
An important aspect of spatial data is the coordinate reference system (CRS). A CRS determines for instance where the center of the map is, the units for the coordinates and others. [PROJ.4](https://proj4.org/#) is an open source software library that is commonly used for CRS transformation. Most commonly used CRSs have been assigned a
>HERE IS SOMETHING MISSING.
The EPSG (European Petroleum Survey Group) code is a unique ID that can be used to identify a CRS. Thus if we know the EPSG code it is rather simple to transform spatial data into other CRS. To search for the correct EPSG code we can use https://www.epsg-registry.org or http://www.spatialreference.org
The following code shows how to assign the CRS of existing data and how to transform the coordinate system for raster data and sf data, respectively.
```{r, results='hide'}
# Assign CRS for raster data
crs(ra) <- CRS("+init=epsg:21781")
# Assign CRS for sf data
st_crs(dat) <- 21781
# Transfrom raster data to WGS84
projectRaster(ra, crs = CRS("+init=epsg:4326"))
# Transfrom sf data to WGS84
st_transform(dat, crs = 4326)
```
### Joining spatial data
Joining two non-spatial datasets relies on a shared variable (`key`) using for instance the function `left_join()`, as described in chapter \@ref(joindata). In a spatial context, we apply the exact same concept except for the key being a shared areas of geographic space. Note, however, that people with a background in geographic information systems may call these operations differently, such as spatial overlay or intersection.
For [geometry data](#geometrydata) we can use the function `st_join()`. As an example, we aim to add the [biogegraphic Region](biogeographische regionen der schweiz) to the counts of the number of frogs in ponds of the Canton Aargau. We transform the `frogs` data into a spatial object (i.e. a simple feature collection). The polygons of the biogegraphic regions in Switzerland are available in the object `bgr`. To each of the polygons additional data are available and we aim to extract the information from the column `BIOGREG_R6`, which contains the name of the biogeographic region. We add this column to the frogs data using the function `st_join()` as following.
```{r}
load("RData/bgr.RData")
bgr <- bgr %>%
st_transform(21781)
dat <- frogs %>% st_as_sf(coords = c("x", "y"), crs = 21781)
dat <- dat %>% st_join(bgr["BIOGREG_R6"])
```
## Further reading
- [Geocomputation with R](https://geocompr.robinlovelace.net): This online-book is aimed at people who want to do spatial data analysis, visualization and modeling using open source software and reproducible workflows.
- [Spatial Data Analysis and Modeling with R](http://rspatial.org/index.html): Online introduction to do spatial analyses with R. Good introduction to coordinate systems and projections.
<file_sep>
# Prior distributions {#priors}
THIS CHAPTER IS UNDER CONSTRUCTION!!!
## Introduction
The prior is an integral part of a Bayesian model. We must specify one.
When to use informative priors: In practise (management, politics etc.) we would like to base our decisions on all information available. Therefore, we consider it to be responsible including informative priors in applied research whenever possible. Priors allow combining information from the literature with information in data or combining information from different data sets.
When using non-informative priors: in basic research when results should only report the information in the current data set. Results from a case study may later be used in a meta-analyses that assumes independence across the different studies included.
## How to choose a prior {#choosepriors}
<!-- Tabelle von Fränzi (CourseIII_glm_glmmm/course2018/presentations_handouts/presentations) -->
important reference: @Lemoine.2019
TODO
### Priors for variance parameters
@Gelman.2006 discusses advantages of using folded t-distributions or cauchy distributions as prior distributions for variance parameters in hierarchical models.
When specifying t-distributions, we find it hard to imagine how the distributions looks like with what parameter values. Therefore, we simulate values from different distributions and look at the histograms. Because the parameterisation of the t-distribution differst among software language, it is important to use the software the model is finally fitted in Figure \@ref(fig:tdjags) we give some examples of folded t-distributions specified in jags using different values for the precision (second parameter) and degrees of freedom (third parameter).
<div class="figure" style="text-align: center">
<a href="" target="_blank"><img src="images/example_t_distributions_jags.jpg" alt="Folded t-distributions with different precisions and degrees of freedom. The panel titles give the jags code of the distribution. Dark blue vertical lines indicate 90% quantiles, light-blue lines indicate 98% quantiles." width="3840" /></a>
<p class="caption">(\#fig:tdjags)Folded t-distributions with different precisions and degrees of freedom. The panel titles give the jags code of the distribution. Dark blue vertical lines indicate 90% quantiles, light-blue lines indicate 98% quantiles.</p>
</div>
GIVE EXAMPLE FOR STAN TOO
## Prior sensitivity
xxx
|
40fff3ef8c128f794697cb23231099001566c6b6
|
[
"Markdown",
"R",
"RMarkdown"
] | 45 |
RMarkdown
|
TobiasRoth/BDAEcology
|
74423faad7a978e60c36d54d9c9fc7dc83415198
|
d6cb01b2822316a21949dd6655aa46ba265e5fe7
|
refs/heads/main
|
<file_sep># Summer-15Days-Internship-AkashTechnolabs
Mobile App Development - Android
<file_sep>package com.example.emi.Activity;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Bundle;
import com.example.emi.Api.Api;
import com.example.emi.Api.ApiServices;
import com.example.emi.Model.ReminderdataModel;
import com.example.emi.Model.Result;
import com.example.emi.R;
import com.google.android.material.snackbar.Snackbar;
import androidx.annotation.NonNull;
import androidx.appcompat.app.ActionBarDrawerToggle;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import androidx.drawerlayout.widget.DrawerLayout;
import androidx.recyclerview.widget.ItemTouchHelper;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
import static com.example.emi.Activity.LoginActivity.login_userdata;
public class HomeActivity extends AppCompatActivity implements Drawer_Adepter.selectdraweritem {
Homepage_Recycler_Adepter homepage_recycler_adepter;
RecyclerView order_recycler_view_home;
ArrayList<ReminderdataModel> reminderdataModels = new ArrayList<>();
LinearLayout linear;
RecyclerView recycler_drawer;
ArrayList<String> Drawer_item_name = new ArrayList<>();
ArrayList<Integer> Drawer_item_icon = new ArrayList<Integer>();
DrawerLayout drawerLayout;
Drawer_Adepter drawer_adepter;
public static final String login_user_mobile = "login_user_mobile";
public static final String login_user_name = "login_user_name";
SharedPreferences sharedPreferences;
TextView user_name_show, user_phone_show;
Dialog dialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.drawerlyout);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
if (android.os.Build.VERSION.SDK_INT >= 21) {
Window window = this.getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
window.setStatusBarColor(this.getResources().getColor(R.color.skyblue));
}
order_recycler_home();
drawer();
final ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(HomeActivity.this, drawerLayout, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawerLayout.addDrawerListener(toggle);
toggle.syncState();
sharedPreferences = getSharedPreferences(login_userdata, MODE_PRIVATE);
user_reminder_get();
linear = findViewById(R.id.linear);
user_name_show = findViewById(R.id.user_name_show);
user_phone_show = findViewById(R.id.user_phone_show);
user_phone_show.setText(sharedPreferences.getString(login_user_mobile, ""));
user_name_show.setText(sharedPreferences.getString(login_user_name, ""));
dialog = new Dialog(HomeActivity.this, R.style.MyAlertDialogStyle);
dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
enableSwipeToDeleteAndUndo();
}
public void order_recycler_home() {
order_recycler_view_home = findViewById(R.id.order_recycler_view_home);
homepage_recycler_adepter = new Homepage_Recycler_Adepter(reminderdataModels, HomeActivity.this);
order_recycler_view_home.setLayoutManager(new LinearLayoutManager(this));
order_recycler_view_home.setAdapter(homepage_recycler_adepter);
}
public void drawer() {
Drawer_item_name.add("New Reminder");
Drawer_item_icon.add(R.drawable.ic_add_reminder);
Drawer_item_name.add("Change Password");
Drawer_item_icon.add(R.drawable.ic_padlock);
Drawer_item_name.add("Notification");
Drawer_item_icon.add(R.drawable.ic_notification);
Drawer_item_name.add("Share Appliction");
Drawer_item_icon.add(R.drawable.ic_share__1_);
Drawer_item_name.add("About Us");
Drawer_item_icon.add(R.drawable.ic_info);
recycler_drawer = findViewById(R.id.recycler_drawer);
drawerLayout = findViewById(R.id.drawerlayout);
drawer_adepter = new Drawer_Adepter(Drawer_item_name, Drawer_item_icon, HomeActivity.this);
recycler_drawer.setLayoutManager(new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false));
recycler_drawer.setAdapter(drawer_adepter);
}
@Override
public void openselecteddraweritem(int pos) {
switch (pos) {
case 0:
drawerLayout.close();
Intent intent = new Intent(HomeActivity.this, AddReminder.class);
startActivity(intent);
break;
case 1:
drawerLayout.close();
Intent intent1 = new Intent(HomeActivity.this, ChangePassword.class);
startActivity(intent1);
break;
case 2:
drawerLayout.close();
Intent intent2 = new Intent(HomeActivity.this, NotificationActivity.class);
startActivity(intent2);
break;
case 3:
drawerLayout.close();
break;
case 4:
dialog.setContentView(R.layout.about_as_app_dialog);
dialog.show();
break;
case 5:
drawerLayout.close();
break;
}
}
public void notifiction_home(View view) {
Intent intent2 = new Intent(HomeActivity.this, NotificationActivity.class);
startActivity(intent2);
}
public void logout(View view) {
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.clear();
editor.commit();
Intent intent2 = new Intent(HomeActivity.this, MainActivity.class);
startActivity(intent2);
finish();
}
void user_reminder_get() {
ProgressDialog progressDialog = new ProgressDialog(HomeActivity.this);
progressDialog.setMessage("Please wait...");
progressDialog.show();
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(Api.BASE_URL)
.addConverterFactory(GsonConverterFactory.create())
.build();
ApiServices apiServices = retrofit.create(ApiServices.class);
Call<Result> call = apiServices.get_reminder(sharedPreferences.getString(login_user_mobile, ""));
call.enqueue(new Callback<Result>() {
@Override
public void onResponse(Call<Result> call, Response<Result> response) {
progressDialog.dismiss();
if (response.body() != null) {
if (response.body().getSuccess()) {
reminderdataModels.clear();
reminderdataModels.addAll(response.body().getReminderdataModels());
homepage_recycler_adepter.notifyDataSetChanged();
} else {
Toast.makeText(HomeActivity.this, response.body().getMsg(), Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(HomeActivity.this, "Something Wrong", Toast.LENGTH_SHORT).show();
}
}
@Override
public void onFailure(Call<Result> call, Throwable t) {
progressDialog.dismiss();
Toast.makeText(HomeActivity.this, t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
private void enableSwipeToDeleteAndUndo() {
SwipeToDeleteCallback swipeToDeleteCallback = new SwipeToDeleteCallback(this) {
@Override
public void onSwiped(@NonNull RecyclerView.ViewHolder viewHolder, int i) {
int position = viewHolder.getAdapterPosition();
// String item = homepage_recycler_adepter.getData().get(reminderdataModels.get(position));
homepage_recycler_adepter.removeItem(position);
Snackbar snackbar = Snackbar
.make(linear, "Item was removed from the list.", Snackbar.LENGTH_LONG);
snackbar.setAction("UNDO", new View.OnClickListener() {
@Override
public void onClick(View view) {
// homepage_recycler_adepter.restoreItem(item, position);
order_recycler_view_home.scrollToPosition(position);
}
});
snackbar.setActionTextColor(Color.YELLOW);
snackbar.show();
}
};
ItemTouchHelper itemTouchhelper = new ItemTouchHelper(swipeToDeleteCallback);
itemTouchhelper.attachToRecyclerView(order_recycler_view_home);
}
}<file_sep>package com.example.emi.Activity;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.example.emi.Model.ReminderdataModel;
import com.example.emi.R;
import java.util.ArrayList;
public class Homepage_Recycler_Adepter extends RecyclerView.Adapter<Homepage_Recycler_Adepter.ViewHolder> {
ArrayList<ReminderdataModel> reminderdataModels;
Context context;
public Homepage_Recycler_Adepter(ArrayList<ReminderdataModel> reminderdataModels, Context context) {
this.reminderdataModels = reminderdataModels;
this.context = context;
}
@NonNull
@Override
public ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
LayoutInflater layoutInflater = LayoutInflater.from(parent.getContext());
View listitem = layoutInflater.inflate(R.layout.home_recycler_design, parent, false);
ViewHolder viewHolder = new ViewHolder(listitem);
return viewHolder;
}
@Override
public void onBindViewHolder(@NonNull ViewHolder holder, int position) {
holder.order_id.setText(reminderdataModels.get(position).getReminder_id());
holder.emi_company.setText(reminderdataModels.get(position).getEmi_company());
holder.premium_date.setText(reminderdataModels.get(position).getPremium_date());
holder.premium_price.setText(reminderdataModels.get(position).getEmi_premium());
}
@Override
public int getItemCount() {
return reminderdataModels.size();
}
public void removeItem(int position) {
reminderdataModels.remove(position);
notifyItemRemoved(position);
}
public void restoreItem(int position) {
reminderdataModels.add(reminderdataModels.get(position));
notifyItemInserted(position);
}
public ArrayList<ReminderdataModel> getData() {
return reminderdataModels;
}
public class ViewHolder extends RecyclerView.ViewHolder {
TextView order_id,emi_company,premium_date,premium_price;
public ViewHolder(@NonNull View itemView) {
super(itemView);
order_id = itemView.findViewById(R.id.order_id);
emi_company = itemView.findViewById(R.id.emi_company);
premium_date = itemView.findViewById(R.id.premium_date);
premium_price = itemView.findViewById(R.id.premium_price);
}
}
}
<file_sep>package com.example.gridview;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import java.util.ArrayList;
public class RecyclerAdepter extends RecyclerView.Adapter<RecyclerAdepter.ViewHolder> {
ArrayList<String> details;
Context context;
public RecyclerAdepter(ArrayList<String> details, Context context) {
this.details = details;
this.context = context;
}
@NonNull
@Override
public ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
LayoutInflater layoutInflater = LayoutInflater.from(parent.getContext());
View listitem = layoutInflater.inflate(R.layout.recycler_design, parent, false);
ViewHolder viewHolder = new ViewHolder(listitem);
return viewHolder;
}
@Override
public void onBindViewHolder(@NonNull ViewHolder holder, int position) {
holder.news_heading.setText(details.get(position));
}
@Override
public int getItemCount() {
return details.size();
}
public class ViewHolder extends RecyclerView.ViewHolder {
TextView news_heading;
public ViewHolder(@NonNull View itemView) {
super(itemView);
news_heading = itemView.findViewById(R.id.newsheading);
}
}
}
|
fcc3ce9b91f6e098c970ecec358b62d70e8e69d9
|
[
"Markdown",
"Java"
] | 4 |
Markdown
|
arjav208/Summer-15Days-Internship-AkashTechnolabs
|
622aa6923fc44e608212560457f4fc415b182bf6
|
26cb1813ffd6e802a073cd709592296a7218b058
|
refs/heads/master
|
<repo_name>Ezhs/briefjs<file_sep>/.eslintrc.js
module.exports = {
extends: "eslint-config-sprite",
plugins: ["html"],
globals: {
briefjs: true,
},
}
<file_sep>/README.md
# BriefJS
Deadly simple declarative JavaScript framework for building UI.
## Why BriefJS?
- Tiny size. (_< 3kb gzipped_)
- Zero dependence.
- Pure ES6.
- No compiler. (_Directly use taged template strings_).
- Stateless.
- Fast and extendable.
## Installation
From CDN:
```html
<script src="https://unpkg.com/briefjs/dist/brief.min.js"></script>
```
With NPM:
```bash
npm install briefjs
```
## Example
```js
const {tags, component, render} = briefjs;
const {div, span} = tags;
function randomColor() {
const r = Math.round(Math.random() * 255);
const g = Math.round(Math.random() * 255);
const b = Math.round(Math.random() * 255);
return `rgb(${r},${g},${b})`;
}
const MyTag = component({
props: {
color: 'red;',
onclick: 'void(0)',
},
render(props, slot) {
return div({style: {color: props.color}, onclick: props.onclick})`
${span({ref: 'foo'})`1`}
${span`${props.color}`}
${slot}
`;
},
updated() {
console.log(this.refs);
},
});
const Outer = component({
render(props, slot) {
let color = randomColor();
const onclick = () => {
color = randomColor();
this.update();
};
return MyTag({color, onclick})`${slot}`;
},
updated() {
this.node.addEventListener('mousedown', () => {
console.log('mousedown');
});
},
});
const tpl = div`
${Outer`
${span`abc`}
`}
${span`3`}
${span`4`}
${div`
${span`5`}
`}
`;
render(tpl, document.getElementById('app'));
```
|
ad729c98528a2bac490d4ded86027df110635725
|
[
"JavaScript",
"Markdown"
] | 2 |
JavaScript
|
Ezhs/briefjs
|
10be38face9279c075b5025844fdbca57d0dcba0
|
162f24ad9c86626a52bb00e3e9d445ed2961aeca
|
refs/heads/master
|
<file_sep>create database if not exists bamazon;
use bamazon;
create table products (
item_id int not null auto_increment,
product_name varchar (80) null,
department_name varchar (200) null,
price decimal (10,4) null,
stick_quantity int (100) null,
primary key (item_id)
);
select * from products;
<file_sep>const MySQL = require("mysql");
const inquirer = require("inquirer");
const connection = MySQL.createConnection ({
host: "localhost",
port: 3306,
user: "root",
password: "",
database: "bamazon",
});
connection.connect(function(error){
if (error) throw error;
inventory();
});
function inventory() {
connection.query("select * from products", function(error, res){
if (error) throw error;
for (var i=0; i<res.length; i++) {
console.log("_____________");
console.log("id:" + res[i].item_id);
console.log("name:" + res[i].product_name);
console.log("price:" + res[i].price);
}
purchases();
})
};
function purchases() {
inquirer.prompt([{
type: "input",
name: "product_id",
message: "Please insert the ID of you want to buy?",
filter: Number
},
{
type: "input",
name: "Quantity",
message: "How many units of the product do you want to buy?",
filter: Number
}
])
.then(function(purchases){
var items_idy = purchases.product_id;
var item_qty = purchases.Quantity;
var query = "select * from products where ?";
connection.query(query, {item_id:items_idy}, function(error, res){
if (error) throw error;
if (res.length===0){
console.log("Error! That ID doesn't exist. Please try with a valid ID");
inventory();
}
else {
var product_info = res[0];
if (item_qty<=product_info.stick_quantity) {
console.log(product_info.product_name + " is available in stock\n");
var subtract = "UPDATE products SET stick_quantity = " + (product_info.stick_quantity-item_qty) + " WHERE item_id = " + items_idy;
connection.query(subtract, function(error, res) {
if (error) throw error;
console.log("Your order has been placed");
console.log("You ordered " + product_info.product_name );
console.log("The quantity you ordered was " + item_qty);
console.log("TOTAL: " + (product_info.price*item_qty));
connection.end();
})
}
else {
console.log("Your order cannot be placed, the quantity you require is not as long available in our inventory\n");
setTimeout(function(){
inventory()
},5000)
}
}
})
})
}
<file_sep>use bamazon;
insert into products (product_name, department_name, price, stick_quantity) values
(
"PULLEY", "BEECHCRAFT", 4000, 3,
"PISTON ASSEMBLY", "BEECHCRAFT", 8000, 2,
"BEARING", "BEECHCRAFT", 16000, 17,
"COVER SLIP", "BEECHCRAFT", 3000, 0,
"<NAME>", "CESSNA", 10000, 50,
"TENSION UNIT ANTENA", "CESSNA", 9000, 38,
"<NAME>", "CESSNA", 2000, 19,
"THERMOCOUPLE", "FOKKER-50/60", 7500, 21,
"<NAME>", "FOKKER-50/60", 27000, 27,
"BOARD TERMINAL", "FOKKER-50/60", 60000, 31
);
select * from products;
|
a065f365a9838ecb2bf0a322134a0f16d1d37c0b
|
[
"JavaScript",
"SQL"
] | 3 |
SQL
|
CamilaChica/Bamazon
|
d520b254519ad3ed258162fb3eb858686a3f0fc6
|
f5b57545979d200fe74b74b7e67dc84033169d2a
|
refs/heads/master
|
<file_sep><?php
/**
* phalcon_phpunit.
*
* @author Haow1 <<EMAIL>>
* @version $Id$
*/
use Phalcon\Config;
return new Config([
'view' => [
'dir' => 'view'
]
]);
<file_sep><?php
/**
* phalcon_phpunit.
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
namespace Model;
/**
* 响应主体
*
* @package Model
*/
class ResponseBody implements \JsonSerializable
{
/**
* 状态
*
* @var int
*/
protected $status = 1;
/**
* 信息
*
* @var string
*/
protected $info = '';
/**
* 数据
*
* @var mixed
*/
protected $data = [];
/**
* @return int
*/
public function getStatus()
{
return $this->status;
}
/**
* @param int $status
* @return ResponseBody
*/
public function setStatus($status)
{
$this->status = $status;
return $this;
}
/**
* @return string
*/
public function getInfo()
{
return $this->info;
}
/**
* @param string $info
* @return ResponseBody
*/
public function setInfo($info)
{
$this->info = $info;
return $this;
}
/**
* @return mixed
*/
public function getData()
{
return $this->data;
}
/**
* @param mixed $data
* @return ResponseBody
*/
public function setData($data)
{
$this->data = $data;
return $this;
}
/**
* 实现JSON序列化接口
*/
function jsonSerialize()
{
return get_object_vars($this);
}
}
<file_sep><?php
/**
* phalcon_phpunit.
*
* 入口文件
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
use Config\Bootstrap;
use Phalcon\DI\FactoryDefault;
require __DIR__ . '/../config/Bootstrap.php';
$bootstrap = new Bootstrap();
$application = $bootstrap->getApplication($di = new FactoryDefault(), 'index');
<file_sep><?php
/**
* phalcon_phpunit.
*
* 服务设置文件
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
namespace Config;
use Phalcon\Config;
use Phalcon\DI;
use Phalcon\DI\FactoryDefault;
use Phalcon\DiInterface;
use Phalcon\Loader;
use Phalcon\Mvc\Application;
use Phalcon\Mvc\View;
/**
*
*
* @package Config
*/
class Bootstrap
{
/**
*
*
* @var string
*/
protected $appName;
/**
*
*
* @var string
*/
protected $projectDir;
/**
*
*
* @var string
*/
protected $appDir;
/**
*
*
* @var Config
*/
protected $config;
/**
*
*
* @var string
*/
protected $controllerDefaultNamespace;
/**
* @param string $appName
* @return Bootstrap
*/
private function setAppName($appName)
{
$this->appName = $appName;
$this->projectDir = __DIR__ . '/..';
$this->appDir = $this->projectDir . '/apps/' . $appName;
if (!stream_resolve_include_path($this->appDir)) {
throw new \InvalidArgumentException('Please make sure the app dir name same as app name');
}
$this->controllerDefaultNamespace = ucfirst($appName) . '\\Controller';
return $this;
}
/**
*
*
* @param string $appName
* @param FactoryDefault $di
* @return Application
*/
public function getApplication(FactoryDefault $di, $appName)
{
$this->setAppName($appName);
$this->initDi($di);
$di->get('dispatcher')->setDefaultNamespace($this->controllerDefaultNamespace);
$application = new Application($di);
return $application;
}
/**
*
*
* @param DiInterface $di
*/
private function initDi($di)
{
$methodsWithOrder = [
'registerConfig',
'registerLoader',
'registerVolt',
'registerView',
'registerResponseBody',
];
foreach ($methodsWithOrder as $method) {
$callback = [$this, $method];
if (is_callable($callback)) {
call_user_func($callback, $di);
}
}
}
/**
*
*
* @param DI $di
*/
public function registerConfig($di)
{
/** @var Config $config */
$config = require $this->projectDir . '/config/app.php';
$appConfigPath = $this->appDir . '/config/app.php';
if (stream_resolve_include_path($appConfigPath)) {
$config->merge(require $appConfigPath);
}
$this->config = $config;
$di->set('config', $config, true);
}
/**
*
*
* @param DI $di
*/
public function registerLoader($di)
{
$loader = new Loader();
$config = $this->config;
$namespaces = [
$this->controllerDefaultNamespace => $this->appDir . '/Controller',
'Model' => $this->projectDir . '/model',
];
if ($config->offsetExists('namespaces')) {
$namespaces += $config->offsetExists('namespaces');
}
$loader->registerNamespaces($namespaces);
if ($config->offsetExists('classes')) {
$loader->registerClasses($config->get('classes'));
}
$loader->register();
$di->set('loader', $loader, true);
}
/**
*
*
* @param DI $di
*/
public function registerView($di)
{
$di->set('view', function () {
$view = new View();
$view->setViewsDir($this->appDir . '/' . $this->config->view->dir);
$view->registerEngines([
'.volt' => 'volt',
]);
return $view;
}, true);
}
/**
*
*
* @param DI $di
*/
public function registerVolt($di)
{
$di->set('volt', function () use ($di) {
$volt = new View\Engine\Volt($di->get('view'));
$volt->setOptions([
'compiledPath' => '/tmp/compiled',
]);
return $volt;
}, true);
}
/**
*
*
* @param FactoryDefault $di
*/
public function registerResponseBody($di)
{
$di->set('responseBody', 'Model\ResponseBody', true);
}
}
<file_sep><?php
/**
* phalcon_phpunit.
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
namespace Api\Controller;
/**
* 默认控制器
*
* @package Api\Controller
*/
class IndexController extends ControllerBase
{
/**
* 默认action
*
* @return \Model\ResponseBody
*/
public function indexAction()
{
$id = $this->dispatcher->getParam(0);
$sql = "SELECT * FROM robots WHERE id = :id";
$result = $this->db->query($sql, ['id' => $id]);
return $this->setData($result->fetch());
}
}
<file_sep><?php
/**
* phalcon_phpunit.
*
* @author Haow1 <<EMAIL>>
* @version $Id$
*/
namespace Test\Index\Controller;
use Index\Controller\IndexController;
use Model\ResponseBody;
use Phalcon\DI;
use Phalcon\Http\Request;
/**
*
*
* @package Test\Index\Controller
*/
class IndexControllerTest extends \PHPUnit_Framework_TestCase
{
/**
*
*
* @var IndexController
*/
protected $controller;
/**
*
*
* @var DI
*/
protected $di;
protected function setUp()
{
parent::setUp();
$this->controller = new IndexController();
$this->di = DI::getDefault();
}
public function testIndexAction()
{
$name = 'gaga';
$_POST['name'] = $name;
/** @var ResponseBody $responseBody */
$responseBody = $this->controller->indexAction();
$this->assertEquals($responseBody->getStatus(), 1);
$this->assertEquals($responseBody->getData(), $name);
}
}
<file_sep><?php
/**
* phalcon_phpunit.
*
* @author Haow1 <<EMAIL>>
* @version $Id$
*/
namespace Index\Controller;
use Model\ResponseBody;
use Phalcon\Mvc\Controller;
/**
*
*
* @property ResponseBody responseBody
* @package Index\Controller
*/
class IndexController extends Controller
{
/**
*
*
* @return ResponseBody
*/
public function indexAction()
{
$name = $this->request->getPost('name', 'string');
$this->responseBody->setData($name);
return $this->responseBody;
}
}
<file_sep><?php
/**
* phalcon_phpunit.
*
* 公共配置文件
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
namespace Config;
use Phalcon\Config;
return new Config([
'debug' => 1,
'view' => [
'dir' => '',
],
'db' => [
'host' => '127.0.0.1',
'dbname' => 'phalcon_test',
'username' => 'root',
'password' => '<PASSWORD>',
]
]);
<file_sep><?php
/**
* phalcon_phpunit.
*
* 入口文件
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
use Config\Bootstrap;
use Phalcon\DI\FactoryDefault;
require __DIR__ . '/../config/Bootstrap.php';
$bootstrap = new Bootstrap();
$application = $bootstrap->getApplication($di = new FactoryDefault(), 'index');
try {
$application->handle();
} catch (Exception $e) {
echo $e;
} finally {
echo $application->response->getContent();
}
<file_sep><?php
/**
* phalcon_phpunit.
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
namespace Api\Controller;
use Model\ResponseBody;
use Phalcon\Mvc\Controller;
/**
* 控制器基类
*
* @package Api\Controller
*/
class ControllerBase extends Controller
{
/**
* @param int $status
* @return ResponseBody
*/
public function setStatus($status)
{
return $this->responseBody->setStatus($status);
}
/**
* @param string $info
* @return ResponseBody
*/
public function setInfo($info)
{
return $this->responseBody->setInfo($info);
}
/**
* @param mixed $data
* @return ResponseBody
*/
public function setData($data)
{
return $this->responseBody->setData($data);
}
}
<file_sep># phalcon_phpunit
为了学习单元测试,并基于 phalcon框架应用的测试
<file_sep><?php
/**
* phalcon_phpunit.
*
* @author wumouse <<EMAIL>>
* @version $Id$
*/
namespace Api\Config;
use Phalcon\Config;
return new Config([
'view' => [
'dir' => APP_DIR . '/view/',
]
]);
|
8de334e68e5b9078705a35f519a82a81f05b3f7d
|
[
"Markdown",
"PHP"
] | 12 |
PHP
|
wumouse/phalcon_phpunit
|
c799363d03eb5ee6361277e4eaae61b55dcd49f9
|
af8431dad61a8f754c8fede292abeba3e809caf7
|
refs/heads/master
|
<file_sep><?php
$q=$_GET["id"];
$url="http://ext.nicovideo.jp/api/getthumbinfo/".$q;
echo file_get_contents($url);
?><file_sep><?php
$q=$_GET["query"];
$searchurl="http://zio3.net/nicoRss/Search.ashx?p=".$q;
$file = file_get_contents( $searchurl );
echo $file;
?><file_sep>function result(){
var r=document.getElementById("result");
r.innerHTML="";
document.getElementById("q").focus();
var q=document.getElementById("q").value;
document.getElementById("resultword").textContent=("「"+q+"」での検索結果");
var url=("nicosearch.php?query="+q);
var xhr=new XMLHttpRequest();
xhr.open("GET",url,true);
xhr.overrideMimeType('text/xml');
xhr.send(null);
xhr.onreadystatechange=function(){
if (xhr.readyState == 4) {
var resultxml = xhr.responseXML;
if (resultxml) {
var items = resultxml.getElementsByTagName("item");
for (var i = 0; i < items.length; i++) {
var item = items[i];
var title = item.getElementsByTagName("title")[0].textContent;
var link = item.getElementsByTagName("link")[0].textContent;
var id = link.replace("http:\/\/www.nicovideo.jp\/watch\/sm", "");
if (id.match(/^[0-9]/)) {
var movielink=$(document.createElement("a")).attr("href",link).addClass("movielink");
var movieinfo=$(document.createElement("div")).append(
movielink.append(
$(document.createElement("img")).attr(
"src", ("http://tn-skr2.smilevideo.jp/smile?i=" + id)
).addClass("thumbnail"))
).addClass("movieinfo");
movielink.append(document.createElement("br"));
movielink.append(
$(document.createElement("span")).text(title).addClass("movietitle")
)
//movieinfo.append(document.createElement("br"));
//movieinfo.append(document.createElement("br"));
movieinfo.append(
$(document.createElement("a")).attr("href",link).attr("target","_blank").text("■").css({
textDecoration:"none"
})
);
$("#result").append(movieinfo);
}
}
}
}
}
}
|
66a67e115faa2af8c3a7a632fd34e1f10be36031
|
[
"JavaScript",
"PHP"
] | 3 |
PHP
|
pastak/niconico_faster_search
|
be857e0331c5b2fc3375ebd41796bd04742707e4
|
0809dbe53900c39d83aedad62b7111e26dcdb8ee
|
refs/heads/master
|
<file_sep># @jcoreio/condition-solano
[semantic-release](https://github.com/semantic-release/semantic-release) plugin to check [Solano CI](https://solanolabs.com/) environment before publishing.
[](https://travis-ci.org/jcoreio/condition-solano)
[](https://codecov.io/gh/jcoreio/condition-solano)
[](https://greenkeeper.io/)
Verify that `semantic-release` is running:
- on Solano CI
- on the right git branch and not on a PR build
### Options
| Option | Description | Default |
| --------------------- | -------------------------------------------------------------------- | ------------------------------------------------------ |
| `githubToken` | **Required.** The Github token used to authenticate with Travis API. | `process.env.GH_TOKEN` or `process.env.GITHUB_TOKEN` |
| `githubUrl` | The GitHub Enterprise endpoint. | `process.env.GH_URL` or `process.env.GITHUB_URL` |
| `githubApiPathPrefix` | The GitHub Enterprise API prefix. | `process.env.GH_PREFIX` or `process.env.GITHUB_PREFIX` |
## Configuration
The plugin is used by default by [semantic-release](https://github.com/semantic-release/semantic-release) so no specific configuration is required if `githubToken`, `githubUrl`, and `githubApiPathPrefix` are set via environment variable.
If you are building with multiple versions of node, use Solano build `profiles` and check the `$SOLANO_PROFILE_NAME` in
the `post_build` hook to make sure you only run `semantic-release` on the desired version of node.
```yml
plan:
- node_4
- node_6
- node_8
profiles:
node_4:
nodejs:
version: '4.4'
node_6:
nodejs:
version: '6.11.5'
node_8:
nodejs:
version: '8.9.0'
hooks:
post_build: |
# Only publish if all tests have passed
if [[ "passed" != "$TDDIUM_BUILD_STATUS" ]]; then
echo "\$TDDIUM_BUILD_STATUS = $TDDIUM_BUILD_STATUS"
echo "Will only publish on passed builds"
exit
fi
# Only publish on 'node_8' profile
if [[ "node_8" != "$SOLANO_PROFILE_NAME" ]]; then
echo "Will only publish on 'node_8' profile"
exit
fi
npm run semantic-release
<file_sep>import nock from 'nock';
export default function authenticate({
githubToken = '<PASSWORD>',
githubUrl = 'https://api.github.com',
githubApiPathPrefix = '',
} = {}) {
return nock(`${githubUrl}/${githubApiPathPrefix}`, {reqheaders: {authorization: `token ${githubToken}`}});
}
<file_sep>import test from 'ava';
import SemanticReleaseError from '@semantic-release/error';
import nock from 'nock';
import authenticate from './helpers/mock-github';
// Save the current process.env
const envBackup = Object.assign({}, process.env);
test.beforeEach(() => {
// Delete env variables in case they are on the machine running the tests
delete process.env.GH_TOKEN;
delete process.env.GITHUB_TOKEN;
delete process.env.GH_URL;
delete process.env.GITHUB_URL;
delete process.env.GH_PREFIX;
delete process.env.GITHUB_PREFIX;
delete process.env.SOLANO_PROFILE_NAME;
delete process.env.TDDIUM_CURRENT_BRANCH;
delete process.env.TDDIUM_PR_ID;
});
test.afterEach.always(() => {
// Restore process.env
process.env = envBackup;
// Reset nock
nock.cleanAll();
});
test.serial('Only runs on solano', async t => {
const condition = require('../');
const error = await t.throws(condition({}, {options: {}}));
t.true(error instanceof SemanticReleaseError);
t.is(error.code, 'ENOSOLANO');
});
test.serial('Not running on pull requests', async t => {
const condition = require('../');
process.env.SOLANO_PROFILE_NAME = 'default';
process.env.TDDIUM_PR_ID = '105';
const error = await t.throws(condition({}, {options: {}}));
t.true(error instanceof SemanticReleaseError);
t.is(error.code, 'EPULLREQUEST');
});
test.serial('Does not run on non-master branch by default', async t => {
const condition = require('../');
process.env.SOLANO_PROFILE_NAME = 'default';
process.env.TDDIUM_CURRENT_BRANCH = 'notmaster';
const error = await t.throws(condition({}, {options: {branch: 'master'}}));
t.true(error instanceof SemanticReleaseError);
t.is(error.code, 'EBRANCHMISMATCH');
});
test.serial('Does not run on master if branch configured as "foo"', async t => {
const condition = require('../');
process.env.SOLANO_PROFILE_NAME = 'default';
process.env.TDDIUM_CURRENT_BRANCH = 'master';
const error = await t.throws(condition({}, {options: {branch: 'foo'}}));
t.true(error instanceof SemanticReleaseError);
t.is(error.code, 'EBRANCHMISMATCH');
});
test.serial('Runs otherwise', async t => {
const condition = require('../');
process.env.SOLANO_PROFILE_NAME = 'default';
process.env.TDDIUM_CURRENT_BRANCH = 'master';
const owner = 'test_user';
const repo = 'test_repo';
const githubToken = 'github_token';
const pro = false;
const github = authenticate({githubToken})
.get(`/repos/${owner}/${repo}`)
.reply(200, {private: pro});
const result = await condition(
{githubToken},
{options: {branch: 'master', repositoryUrl: `git+https://github.com/${owner}/${repo}.git`}}
);
t.falsy(result);
t.true(github.isDone());
});
|
2e94dabdd8a2ef107ab2b38ae65cd441e0279dc3
|
[
"Markdown",
"JavaScript"
] | 3 |
Markdown
|
jcoreio/condition-solano
|
99063adc1aaa02510b7f9d974e6e7df38cdb2ed9
|
381f641f38e6201382dbee8a7e41236f36817bcb
|
refs/heads/master
|
<repo_name>pongsakorn-ruadsong/SwissRE<file_sep>/app/javascript/containers/termLifeInsurance.jsx
import React, { PropTypes } from 'react'
import { render } from 'react-dom'
import { Link } from 'react-router-dom'
import header from '../../../public/images/header-icon-01.png'
import icon01 from '../../../public/images/Icon-01.png'
import icon02 from '../../../public/images/Icon-02.png'
import icon03 from '../../../public/images/Icon-03.png'
import icon04 from '../../../public/images/Icon-04.png'
import Reason from '../components/shared/reason'
import CurrentUser from '../services/currentUser'
export default class CiProtection extends React.Component {
constructor(props) {
super(props)
this.state = {
currentUser: (new CurrentUser).get(),
}
}
render() {
console.log(this.props)
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Term life Insurance</h2>
<h4>
Best Insurance helps you get to get the best
insurance fast and easily!
</h4>
</div>
</div>
</div>
</div>
<div className='container'>
<div className='row'>
<div className='col-12'>
<div>
<div className='sr-termLifeInsurance__container'>
<div>
<div className="row">
<div className="col-12">
<div className="row">
<div className="col-12 sr-termLifeInsurance__div__title">
<text className="sr-termLifeInsurance__title">Term Life insurance</text>
</div>
<div className="col-12 sr-termLifeInsurance__div__image">
<img className="sr-termLifeInsurance__image" src={header} />
</div>
<div className="col-12 sr-termLifeInsurance__div__subtitle">
<text className="sr-termLifeInsurance__subtitle">Some things are worth wverything. protect them.</text>
</div>
<div className="col-12 sr-termLifeInsurance__div__verticalLine">
<div className="sr-termLifeInsurance__verticalLine" />
</div>
<div className="col-12 sr-termLifeInsurance__div__title_blue">
<text className="sr-termLifeInsurance__title_blue">Reasons that you should purchase this instance</text>
</div>
</div>
</div>
<div className="col-12">
<div >
<Reason image={icon01} title="Support your life" content="Ensure your loved one's financial future is protected, in the event you become terminally ill or unexpectedly pass away." />
<Reason image={icon02} title="Gamified Journey" content="Ensure your loved one's financial future is protected, in the event you become terminally ill or unexpectedly pass away." />
<Reason image={icon03} title="Get Reward for subscriber" content="Ensure your loved one's financial future is protected, in the event you become terminally ill or unexpectedly pass away." />
</div>
</div>
<div className="col-12">
<div className="row">
<div className="col-12">
<Link className="button-224 btn btn-dark-blue btn-lg btn-dark-blue--large-text" to={"/"}>Back</Link>
<Link className="button-224 btn btn-green btn-lg btn-green--large-text" to={this.state.currentUser.id ? `/step/TERM` : `/login/TERM`}>Get a quote</Link>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
)
}
}
<file_sep>/app/javascript/containers/layouts/navLayout.jsx
import React from 'react'
import {
BrowserRouter,
Route,
Link,
Redirect
} from 'react-router-dom'
import Nav from '../../components/shared/nav'
import Footer from '../../components/shared/footer'
import CurrentAdmin from '../../services/currentAdmin'
export default class NavLayout extends React.Component {
constructor(props) {
super(props)
}
renderComponent(matchProps) {
if ((new CurrentAdmin).loggedIn()) {
return (
<div className='sr-page'>
<div className='container-fluid np'>
<div>
<Nav />
<this.props.component {...matchProps} {...this.props} />
<Footer />
</div>
</div>
</div>
)
} else {
return (
<Redirect to={{
pathname: '/password',
state: { from: this.props.location }
}}/>
)
}
}
render() {
return(
<Route exact={this.props.exact == undefined ? true : this.props.exact}
path={this.props.path}
render={matchProps => (this.renderComponent(matchProps))} />
)
}
}
<file_sep>/lib/playbasis/content.rb
module Playbasis
class Content < Model
def all(language = 'English')
http_get("/Content?language=#{language}")
end
end
end
<file_sep>/app/javascript/containers/sms.jsx
import React from 'react'
export default class Sms extends React.Component {
constructor(props) {
super(props)
}
render() {
return (
<div className='grey-bg page-sms'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Contact you soon!</h2>
<h4>
Best Insurance helps you get to get the best
insurance fast and easily!
</h4>
</div>
</div>
</div>
</div>
<div className="wrap-content p-l-15 p-r-15 container">
<div className='sr-page-box'>
<div className="container">
<div className='row'>
<div className='col-12'>
<div className='sr-page-box__white text-center'>
<h3>Check your phone</h3>
<p className="page-sms__content">Our offer and reedeemed prize have been sent to your phone</p>
<div className="icon-sms m-b-20"></div>
<a href="/" className="redeem-btn">OK</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
)
}
}
<file_sep>/app/javascript/components/shared/reward.jsx
import React from 'react';
export default class Reward extends React.Component {
constructor(props) {
super(props)
}
render() {
console.log(this.props)
return (
<div className="col-12 col-md-6">
<div className="item">
<div className="item-detail">
<div className="desc">
<div className="row">
<div className="col-md-12 col-xs-12 col-sm-12 col-lg-6">
<img className="item-cover" src={this.props.image}></img>
</div>
<div className="col-md-12 col-xs-12 col-sm-12 col-lg-6 item-desc">
<h4>{this.props.title}</h4>
<div>{this.props.description}</div>
<div className="item-bottom">
<h3 className="text-right">
{this.props.point} pts
</h3>
<a href={"/reward/" + this.props.goods_id} className="redeem-btn">Redeem</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
)
}
}
// <hr></hr>
// <p><strong><img alt="" src="https://images.pbapp.net/data/983f0ece60743bab0e2f379ade6583c8.png"></img></strong> <img alt="" src="https://images.pbapp.net/data/cab332d328cfba01fea3d75585117fbd.png"></img></p>
// <p><img alt="" src="https://images.pbapp.net/data/8ec355c7563b6cbf51a6085f4c57846f.png"></img> <big><strong>300 points</strong></big></p>
// <p><strong><big>Cinnabon® ClassNameic Roll</big></strong></p>
// <p><big>Warm dough, legendary Makara® Cinnamon, topped with rich cream cheese frosting. </big></p>
<file_sep>/app/javascript/services/currentUser.js
import _ from 'underscore'
import humps from 'humps'
class CurrentUser {
constructor() {
}
set(user) {
user = humps.camelizeKeys(user);
localStorage.setItem('currentUser.id', user.id)
localStorage.setItem('currentUser.email', user.email)
localStorage.setItem('currentUser.firstName', user.firstName)
localStorage.setItem('currentUser.lastName', user.lastName)
}
get() {
return {
id: localStorage.getItem('currentUser.id'),
email: localStorage.getItem('currentUser.email'),
firstName: localStorage.getItem('currentUser.firstName'),
lastName: localStorage.getItem('currentUser.lastName')
}
}
reset() {
localStorage.removeItem('currentUser.id')
localStorage.removeItem('currentUser.email')
localStorage.removeItem('currentUser.firstName')
localStorage.removeItem('currentUser.lastName')
}
deleteAllCookies(document) {
console.log(document)
var cookies = document.cookie.split("; ");
for (var i = 0; i < cookies.length; i++) {
var cookie = cookies[i];
var eqPos = cookie.indexOf("=");
var name = eqPos > -1 ? cookie.substr(0, eqPos) : cookie;
document.cookie = name + "=;expires=Thu, 01 Jan 1970 00:00:00 GMT";
}
}
}
export default CurrentUser
<file_sep>/app/javascript/utils/connector.js
import _ from 'underscore'
class Connector {
constructor() {
}
get(url, callback) {
fetch(url, {
method: 'GET',
credentials: "same-origin"
}).then(function (response) {
return response.json();
}).then(function (json) {
callback(json);
});
}
post(url, body, callback) {
fetch(url, {
method: 'POST',
credentials: "same-origin",
body: JSON.stringify(body),
headers: this.defaultHeaders(),
}).then(function (response) {
return response.json();
}).then(function (json) {
callback(json);
});
}
delete(url, callback) {
fetch(url, {
method: 'DELETE',
credentials: "same-origin",
headers: this.defaultHeaders(),
}).then(function (response) {
return response.json();
}).then(function (json) {
callback(json);
});
}
getCSRFToken() {
return _.find(document.getElementsByTagName("meta"), (meta) => {
return meta.name === "csrf-token"
}).content
}
defaultHeaders() {
return {
'X-CSRF-Token': this.getCSRFToken(),
'Accept': 'application/json',
'Content-Type': 'application/json'
}
}
}
export default Connector
<file_sep>/app/javascript/components/shared/greenFooter.jsx
import React from 'react'
import { render } from 'react-dom'
import { Link } from 'react-router-dom'
import logo from '../../assets/images/logo.png'
class GreenFooter extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div>
<div className='sr-green-footer'>
<div className='row'>
<div className='col-9 col-lg-6 np'>
<div className='sr-green-footer__main'>
<div className='sr-green-footer__terms'>
<a href='#' className='btn btn-dark-blue sr-green-footer__language'>Language</a>
<br/>
Terms of business<br/>
Copyright 2017 SwissRe all rights reserved.
</div>
</div>
</div>
<div className='col-3 col-lg-6 np'>
<div className='sr-green-footer__social-icon-container'>
<div className='social-icon sr-green-footer__social-icon'><a href='#'><i className='fa fa-twitter'></i></a></div>
<div className='social-icon sr-green-footer__social-icon'><a href='#'><i className='fa fa-linkedin'></i></a></div>
<div className='social-icon sr-green-footer__social-icon sr-green-footer__social-icon--last'><a href='#'><i className='fa fa-youtube-play'></i></a></div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default GreenFooter
<file_sep>/app/javascript/containers/journeyQuestions.jsx
import React, { PropTypes } from 'react'
import Question from '../components/shared/question'
import moment from 'moment';
import Connector from '../utils/connector'
import CurrentUser from '../services/currentUser'
import Translations from '../services/translations'
import { Route, Redirect } from 'react-router'
import StepBar from '../components/shared/stepBar'
import Stepper from '../components/shared/stepper'
import 'whatwg-fetch'
const QUIZZ_1 = '58fdd6cad81e46ce778b456e';
const QUIZZ_2A = '58ff26a23b6fd2cb358b45e0';
const QUIZZ_2B = '59434c013b6fd24f2b8b456d';
const QUIZZ_3 = '58ff20e03b6fd2cb358b45df';
class JourneyQuestions extends React.Component {
constructor(props) {
super(props);
this.state = { redirect: '' }
this.renderQuestion = this.renderQuestion.bind(this);
this.getData = this.getData.bind(this);
}
onAnswer(answer) {
var _this = this;
_this.setState({ question: { component: "LOADING" }, redirect: answer.redirect });
(new Connector).post('/api/questions/' + _this.props.match.params.quizid + '/answer',
{
player_id: (new CurrentUser).get().id,
question_id: _this.state.question.question_id,
option_id: answer.option_id,
answer: answer.response
},
function (json) {
if (answer.redirect) {
_this.props.history.push(answer.redirect);
}
else {
_this.getData();
}
}
);
}
componentDidMount() {
this.getData();
}
getData() {
var _this = this;
(new Connector).get(
'/api/quizzes/' + _this.props.match.params.quizid + '/question' +
'?player_id=' + localStorage.getItem('currentUser.id'),
function (json) {
if (json.error) {
if (_this.props.match.params.quizid == QUIZZ_1) {
_this.props.history.push('/congratulation1');
} else if (_this.props.match.params.quizid == QUIZZ_2A) {
_this.props.history.push('/congratulation2');
} else if (_this.props.match.params.quizid == QUIZZ_2B) {
_this.props.history.push('/congratulation2');
} else {
_this.props.history.push('/congratulation2');
}
} else {
_this.setState({ question: json });
}
});
}
renderQuestion() {
return <Question {...this.state.question } quizzId={this.props.match.params.quizid} translations={this.props.translations} response={this.onAnswer.bind(this)} />;
}
render() {
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Journey Description</h2>
<h4>
Best Insurance helps you get to get the best
insurance fast and easily!
</h4>
</div>
</div>
</div>
</div>
<Stepper />
<div className="wrap-content">
{this.renderQuestion()}
</div>
</div>
);
}
}
export default JourneyQuestions
<file_sep>/app/javascript/components/question/singleChoice.jsx
import React, {PropTypes} from 'react';
import Translations from '../../services/translations';
import CurrentLocal from '../../services/currentLocal'
class SingleChoice extends React.Component {
constructor(props) {
super(props);
this.state = {
question: props,
response: "",
option_id: null
}
this.handleOptionChange = this.handleOptionChange.bind(this);
}
componentDidMount() {
var _this = this;
$('input').iCheck({checkboxClass: 'icheckbox_flat-blue', radioClass: 'iradio_flat-blue'}).on('ifChecked', function(changeEvent) {
_this.handleOptionChange(changeEvent);
});
}
handleOptionChange(changeEvent) {
var gifUrl = this.props.componentValue.choices[changeEvent.target.value].image;
var image = document.getElementById('gif-image-' + changeEvent.target.value);
image.src = gifUrl
this.setState({
response: this.props.componentValue.choices[changeEvent.target.value].value,
option_id: this.props.componentValue.choices[changeEvent.target.value].id
}, () => {
this.props.response(this.state);
});
}
render() {
var local = (new CurrentLocal).get()
var questionsImage = [];
var questionsInput = [];
for (var i = 0; i <= this.props.componentValue.choices.length - 1; i++) {
var classname = "sr-question__label col-" + Math.round(Math.min((12 / this.props.componentValue.choices.length), 5))
questionsImage.push(
<label className={classname} key={i}>
<img className='sr-question__radio_image' id={'gif-image-' + i} src={this.props.componentValue.choices[i].description}/>
</label>
)
questionsInput.push(
<label className={classname} key={i}>
<input className='sr-question__radio' type="radio" name="gender" value={i} onChange={this.handleOptionChange}/>
<div className="label">{Translations.trans(this.props.translations[local.local], this.props.componentValue.choices[i].label)}</div>
</label>
)
}
return (
<div>
<div className="question-bg" style={{ 'background': "url("+this.props.question_image+")", 'background-size': 'contain' }}>
{questionsImage}
</div>
<div>
{questionsInput}
</div>
</div>
);
}
}
export default SingleChoice
<file_sep>/app/controllers/api/questions_controller.rb
module Api
class QuestionsController < ApplicationController
def answer
result = client.questions.answer(params[:id], params[:player_id], params[:token], params[:question_id], params[:option_id], params[:answer] ? params[:answer] : nil)
if result[:body]['success'] == false
result = { message: result[:body]['message'] }
else
result = result[:body]['response']['result']
end
render json: result
end
end
end
<file_sep>/lib/playbasis/auth.rb
module Playbasis
class Auth < Model
def auth
request.call(:post,
uri: "/Auth",
body: {
api_key: @config.api_key,
api_secret: @config.api_secret
})
end
end
end
<file_sep>/app/javascript/containers/congratulation.jsx
import React, { PropTypes } from 'react'
import { Link } from 'react-router-dom'
import Timer from 'react-timer'
import AppStatus from '../services/appStatus'
import Wheel from '../assets/images/wheel.svg'
class Congratulation extends React.Component {
constructor(props) {
super(props);
this.state = {
timer: null,
counter: 15,
timerEnded: false
};
}
componentDidMount() {
let timer = setInterval(this.tick.bind(this), 1000);
this.setState({ timer });
}
componentWillUnmount() {
clearInterval(this.state.timer);
}
tick() {
var counter = this.state.counter;
if (counter > 0) {
this.setState({
counter: counter - 1
});
} else {
this.setState({
timerEnded: true
}, () => { this.props.history.push('/step'); });
}
}
getStep() {
return Number((new AppStatus).get().step) + 1;
}
render() {
var congratulation_title = "You successfully completed the Section " + this.getStep() + ".";
var button = this.state.timerEnded ?
<Link to={`/step`}
className='col-12 btn btn-accent btn-accent--large-text sr-congratulation__button'>Continue</Link> :
<Link to={this.props.link}
className='col-12 btn btn-accent btn-accent--large-text sr-congratulation__button'>Play ({this.state.counter})</Link>
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Journey Description</h2>
<h4> Best insurance helps you to get the best <br /> insurance it's fast and easily !</h4>
</div>
</div>
</div>
</div>
<div className="wrap-content container m-t--100">
<div className='sr-question__container p-15'>
<div className='row' style={{ maxWidth: "400px", margin: "auto" }}>
<div className='col-12'>
<div>
<div>
<h2 className="sr-question__accent__title">Congratulations!</h2>
</div>
<div>
<text>{congratulation_title}</text>
</div>
<div className="sr-congratulation__container">
<div className="sr-congratulation__image">
<img src={Wheel} className="sr-congratulation__image" />
</div>
<div className="sr-congratulation__image_text">
<text className="sr-congratulation__main_text">Play a mini game, Win additional points, Redeem your points by completing all missions.</text>
</div>
</div>
</div>
</div>
<div className="col-12">
{button}
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Congratulation
<file_sep>/lib/playbasis/question.rb
module Playbasis
class Question < Model
COMPONENT = {
ADDRESS: 'ADDRESS',
BIRTHDAY: 'BIRTHDAY',
SC: 'SC',
SCAS: 'SCAS',
SLIDER: 'SLIDER',
BINARY: 'BINARY',
ERROR: 'ERROR'
}
def get(quiz_id, player_id)
http_get("Quiz/#{quiz_id}/question", { query_params: { player_id: player_id, random: 2 } })
end
def answer(quiz_id, player_id, token, question_id, option_id, answer)
http_post("Quiz/#{quiz_id}/answer", {
player_id: player_id,
token: access_token,
question_id: question_id,
option_id: option_id,
answer: answer
})
end
end
end
<file_sep>/app/javascript/components/shared/footer.jsx
import React from 'react'
import { render } from 'react-dom'
import { Link } from 'react-router-dom'
import logo from '../../../../public/images/swissre_logo.png'
class Footer extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className="bg-main-blue sr-footer">
<div className="container">
<div className=''>
<div className='row'>
<div className='col-9'>
<div className='row mb-10'>
<div className='col-12'>
<img src={logo} alt="logo" className='sr-footer__logo-footer' />
<div className='clearfix'></div>
</div>
</div>
<div className='row mb-10'>
<div className='col-12'>
{/*<Link to={`/local`} className='btn btn-dark-blue btn-sm lang'>
Language
</Link>*/}
</div>
</div>
<div className='row'>
<div className='col-12'>
<p className='sr-footer__terms'>Terms of business<br />Copyright@2017 SwissRe all rights reserved.</p>
</div>
</div>
</div>
<div className='col-3'>
<div className='sr-footer__social-icon-container'>
<div className='social-icon sr-footer__social-icon'><i className='fa fa-twitter'></i></div>
<div className='social-icon sr-footer__social-icon'><i className='fa fa-facebook'></i></div>
<div className='social-icon sr-footer__social-icon'><i className='fa fa-youtube'></i></div>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Footer
<file_sep>/app/javascript/components/shared/stepBar.jsx
import React from 'react'
import AppStatus from '../../services/appStatus'
export default class StepBar extends React.Component {
constructor(props) {
super(props);
this.state = {
step: {
active1: "",
active2: "",
active3: "",
active4: ""
}
}
}
componentDidMount() {
var step = (new AppStatus).get().step;
var steps = {};
for (var i = 1; i <= step; i++) {
var key = "active"+i;
steps[key] = "active"
}
this.setState({ step: steps })
}
render() {
return (
<div className="step-bar">
<ul className="step-list">
<li className={"step " + this.state.step.active1}>
<div className="circle">1</div>
</li>
<li className={"step " + this.state.step.active2}>
<div className="circle">2</div>
</li>
<li className={"step " + this.state.step.active3}>
<div className="circle">3</div>
</li>
<li className={"step " + this.state.step.active4}>
<div className="circle">4</div>
</li>
</ul>
</div>
)
}
}
<file_sep>/app/javascript/services/currentLocal.js
import _ from 'underscore'
class CurrentLocal {
constructor() {
}
localEnum() {
return {
EN: "en",
CH: "ch",
TA: "tw",
HK: "hk",
SNGP: "sg",
ML: "my",
TH: "th",
VIET: "vn",
PH: "ph",
KOR: "kr",
JPN: "jp",
ID: "id",
IND: "in"
}
}
set(local) {
localStorage.setItem('currentUser.local', local)
}
setCountry(local) {
localStorage.setItem('currentUser.country', local)
}
get() {
return {
local: localStorage.getItem('currentUser.local') == null ? (new CurrentLocal).localEnum().EN : localStorage.getItem('currentUser.local'),
country: localStorage.getItem('currentUser.country') == null ? (new CurrentLocal).localEnum().EN : localStorage.getItem('currentUser.country')
}
}
reset() {
localStorage.removeItem('currentUser.local')
localStorage.removeItem('currentUser.country')
}
}
export default CurrentLocal
<file_sep>/lib/playbasis/configuration.rb
module Playbasis
class Configuration
OPTIONS = {
api_key: -> { ENV['PLAYBASIS_API_KEY'] },
api_secret: -> { ENV['PLAYBASIS_API_SECRET'] },
base_url: 'https://api.pbapp.net',
storage: {}
}.freeze
attr_accessor(*OPTIONS.keys)
def initialize
OPTIONS.each do |name, val|
value = val.respond_to?(:lambda?) && val.lambda? ? val.call : val
instance_variable_set("@#{name}", value)
end
end
def [](option)
send(option)
end
def to_hash
OPTIONS.keys.each_with_object({}) do |option, hash|
hash[option.to_sym] = send(option)
end
end
def merge(options)
OPTIONS.keys.each do |name|
instance_variable_set("@#{name}", options[name]) if options[name]
end
end
end
end
<file_sep>/app/javascript/containers/step.jsx
import React, { PropTypes } from 'react'
import { Link } from 'react-router-dom'
import Stepper from '../components/shared/stepper'
import Loading from '../components/shared/loading'
import logo from '../assets/images/logo.png'
import lock from '../assets/images/lock.png'
import Connector from '../utils/connector'
import Translations from '../services/translations'
import StepBar from '../components/shared/stepBar'
import CurrentLocal from '../services/currentLocal'
import StepController from '../services/step'
import ReactLoading from 'react-loading';
import Lodash from 'lodash'
import AppStatus from '../services/appStatus'
class LastRow extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<Link to={'/welldone/'} className='sr-step__link'>
<StepRow name='Check out offers' description='Section final' selected={this.props.step > this.props.quizzes.length} displaySeparator={false} />
</Link>
)
}
}
class StepRow extends React.Component {
constructor(props) {
super(props);
}
render() {
var boxClass = this.props.selected ? "sr-step__box__container selected" : "sr-step__box__container";
var dividerClass = this.props.selected ? "sr-step__box__container selected" : "sr-step__box__container";
var divider = this.props.displaySeparator ? <div className={this.props.selected ? "sr-step__vertical-line selected" : "sr-step__vertical-line"} /> : <div />;
var roundedIcon = this.props.selected ? "sr-step-image__rounded-circle green" : "sr-step-image__rounded-circle bleu";
var icon = this.props.selected ? "sr-step-image__icon icon_gift" : "sr-step-image__icon icon_lock";
return (
<div className=''>
<div className='sr-step__box__overlay'>
<div className={boxClass}>
<div className='media'>
<div className='d-flex align-self-center mr-3'>
<div className={roundedIcon}>
<div className={icon}></div>
</div>
</div>
<div className='media-body sr-step__table'>
<div className='sr-step__table_cell'>
<h4>{this.props.description}</h4>
<h2>{this.props.name}</h2>
</div>
</div>
</div>
</div>
{divider}
</div>
</div>
)
}
}
class Step extends React.Component {
constructor(props) {
super(props);
this.state = {
quizzes: [],
step: 0,
loading: true
}
this.getLastQuizz.bind(this)
this.getType.bind(this)
}
getLastQuizz() {
var _this = this;
(new Connector).get('/api/quizzes/player/' + localStorage.getItem('currentUser.id') + '/recent',
function (json) {
_this.setState(
{
step: (new StepController).getQuizzCompletedIndex(_this.state.quizzes, Lodash.isEmpty(json) ? null : json),
loading: false
}, () => { _this.forceUpdate() })
});
}
componentDidMount() {
var _this = this;
_this.setState({ loading: true });
(new Connector).get('/api/quizzes?tags='+ this.getType(), function (json) {
_this.setState({ quizzes: json }, () => { _this.getLastQuizz(); })
});
}
getType(){
if(this.props.match.params.type != null){
var appStatus = (new AppStatus).get();
appStatus.type = this.props.match.params.type;
(new AppStatus).set(appStatus);
return this.props.match.params.type;
}else{
return (new AppStatus).get().type;
}
}
render() {
var body = null;
var local = (new CurrentLocal).get()
let lastRow = null;
if (this.state.step == this.state.quizzes.length) {
lastRow = <LastRow {... this.state} />
} else {
lastRow = <StepRow name='Check out offers' description='Section final' selected={this.state.step > this.state.quizzes.length} displaySeparator={false} />
}
if (this.props.translations && this.state.loading) {
body = (<div className='sr-step__container'>
<div style={{display: 'table', margin: '44px auto'}}>
<ReactLoading type={"spin"} color={"#FC3DBF"} height='124' width='124'/>
</div>
</div>)
} else if (this.props.translations && !this.state.loading) {
body = (
<div className='sr-step__container'>
{
Lodash.orderBy(this.state.quizzes, ['weight'], ['asc']).map((quiz, i) => {
if (this.state.step == i) {
return (
<Link key={i} to={'/questions/' + quiz.quiz_id} className='sr-step__link'>
<StepRow name={Translations.trans(this.props.translations[local.local], quiz.name)} description={Translations.trans(this.props.translations[local.local], quiz.description)} selected={this.state.step > i} displaySeparator={true}
translations={this.props.translations} />
</Link>
)
} else {
return (
<StepRow name={Translations.trans(this.props.translations[local.local], quiz.name)} description={Translations.trans(this.props.translations[local.local], quiz.description)} key={i} selected={this.state.step > i} displaySeparator={true}
translations={this.props.translations} />
)
}
})
}
{lastRow}
</div>
)
} else {
body = (<Loading />)
}
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Insurance Journey</h2>
<h4>
Best Insurance helps you get to get the best
insurance fast and easily!
</h4>
</div>
</div>
</div>
</div>
<Stepper {... this.state}/>
<div className="wrap-content p-l-15 p-r-15">
{body}
</div>
</div>
);
}
}
export default Step
<file_sep>/lib/playbasis/model.rb
module Playbasis
class Model
def initialize(config)
@config = config
end
protected
def http_get(uri, opts = { query_params: {} })
request.call(:get, uri: uri, query_params: {
api_key: @config.api_key,
api_secret: @config.api_secret,
}.merge(opts[:query_params]))
end
def http_post(uri, body)
request.call(:post, uri: uri, body: body)
end
private
def access_token
@access_token ||= Playbasis::AccessToken.new(@config).get
end
def request
Request.new(@config)
end
end
end
<file_sep>/app/javascript/components/shared/infoBox.jsx
import React, { PropTypes } from 'react'
import { ViewPager, Frame, Track, View } from 'react-view-pager'
class InfoBox extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<View>
<div className="sr-infoBox__corner col-10" style={{border: "2px solid " + this.props.color}}>
<div>
<img src={this.props.icon} className="sr-infoBox__img" />
</div>
<div className="sr-infoBox__text">
{ (this.props.badge) ?
(
<div className="row">
<div className="col-md-5"><img className="sr-infoBox__badge" src={this.props.badge} /></div>
<div className="col-md-7"><text>{this.props.text}</text></div>
</div>
)
:
(
<text>{this.props.text}</text>
)
}
</div>
</div>
</View>
);
}
}
export default InfoBox
<file_sep>/app/javascript/containers/rewardDetail.jsx
import React from 'react'
import Connector from '../utils/connector'
export default class RewardDetail extends React.Component {
constructor(props) {
super(props)
this.state = {
good: {
image: "",
title: "",
desc: "",
redeem: { point: { point_value: "" } }
}
}
this.getData = this.getData.bind(this);
}
componentDidMount() {
this.getData();
}
getData() {
var _this = this;
(new Connector).get('/api/goods/' + _this.props.match.params.id,
function (json) {
_this.setState(
{ good: json }, () => { })
});
}
render() {
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Get your Reward!</h2>
<h4>
Best Insurance helps you get to get the best
insurance fast and easily!
</h4>
</div>
</div>
</div>
</div>
<div className="wrap-content p-l-15 p-r-15">
<div className='sr-page-box'>
<div className="container">
<div className='row'>
<div className='col-12'>
<div className='sr-page-box__white'>
<div className="row m-t-15 item">
<div className="col-md-12 col-sm-12">
<div className="desc" dangerouslySetInnerHTML={{ __html: this.state.good.description }} />
<a href="/sms" className="redeem-btn">Redeem</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
)
}
}
<file_sep>/app/javascript/containers/payment.jsx
import React, { PropTypes } from 'react'
import { render } from 'react-dom'
import { Link } from 'react-router-dom'
import creditCard from '../assets/images/credit_cards.svg'
class Total extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className="sr-payment__total__container">
<div className='row sr-payment__total__product'>
<div className='col-6'>
<div className="align-left">
<text>Insurance Product</text>
</div>
</div>
<div className='col-6'>
<div className="align-right">
<text>$20,000</text>
</div>
</div>
</div>
<div className="sr-payment__total__separator" />
<div className='row sr-payment__total__total'>
<div className='col-6'>
<div className="align-left">
<text>Total</text>
</div>
</div>
<div className='col-6'>
<div className="align-right">
<text>$20,000</text>
</div>
</div>
</div>
</div>
);
}
}
class CreditCard extends React.Component {
constructor(props) {
super(props);
this.state = {
number: '',
expirationMonth: '',
expirationYear: '',
ccv: '',
name: ''
}
}
onNumberChange(e) {
this.setState({ number: e.target.value });
}
onExpirationMonthChange(e) {
this.setState({ expirationMonth: e.target.value });
}
onExpirationYearChange(e) {
this.setState({ expirationYear: e.target.value });
}
onCCVChange(e) {
this.setState({ ccv: e.target.value });
}
onNameChange(e) {
this.setState({ name: e.target.value });
}
render() {
return (
<form>
<div className='row'>
<div className='col-12'>
<div className="form-group">
<input type='text' name='cardNumber'
value={this.state.number}
onChange={this.onNumberChange.bind(this)}
className='form-control sr-payment__credit-card__input' placeholder='Card Number' />
</div>
</div>
</div>
<div className='row'>
<div className='col-12 '>
<div className="form-group">
<div className="sr-payment__credit-card__input">
<div className="row ">
<text className="col-6 col-sm-6 sr-payment__credit-card__hint">Expiration</text>
<input type='number' name='expiration'
value={this.state.expirationMonth}
onChange={this.onExpirationMonthChange.bind(this)}
className='col-3 col-sm-3 sr-payment__credit-card__picker' placeholder='MM' />
<input type='number' name='expiration'
value={this.state.expirationYear}
onChange={this.onExpirationYearChange.bind(this)}
className='col-3 col-sm-3 sr-payment__credit-card__picker' placeholder='YYYY' />
</div>
</div>
</div>
</div>
</div>
<div className='row'>
<div className='col-12'>
<div className="form-group">
<input type='text' name='credit-card'
value={this.state.ccv}
onChange={this.onCCVChange.bind(this)}
className='form-control sr-payment__credit-card__input' placeholder='CCV' />
</div>
</div>
</div>
<div className='row'>
<div className='col-12'>
<div className="form-group">
<input type='text' name='name'
value={this.state.name}
onChange={this.onNameChange.bind(this)}
className='form-control sr-payment__credit-card__input' placeholder='Name on card' />
</div>
</div>
</div>
</form>
);
}
}
class Payment extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Check<br />your payment</h2>
<h4> Best insurance helps you to get the best <br /> insurance it's fast and easily !</h4>
</div>
</div>
</div>
</div>
<div className="wrap-content container m-t--100">
<div className='sr-question__container p-15'>
<div className='row'>
<div className='col-12'>
<div>
<div>
<h2 className="sr-question__accent__title">Payment</h2>
</div>
<div>
<img src={creditCard} className="sr-payment__image" />
</div>
<div>
<Total />
</div>
<div>
<CreditCard />
</div>
</div>
<div>
<Link
to={`/reward`}
className='sr-page-box__btn btn btn-green btn-lg btn-green--large-text fw'>
Continue
</Link>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Payment
<file_sep>/app/javascript/components/shared/imageButton.jsx
import React, { PropTypes } from 'react'
class ImageButton extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className="ribtn-container">
<img src={this.props.image} className="ribtn-image" />
<div className="ribtn-middle"/>
</div>
);
}
}
export default ImageButton<file_sep>/app/javascript/components/shared/noNav.jsx
import React from 'react'
import { render } from 'react-dom'
import { Link } from 'react-router-dom'
import logo from '../../../../public/images/swissre_logo.png'
import logout from '../../../../public/images/logout.png'
import CurrentUser from '../../services/currentUser'
import CurrentLocal from '../../services/currentLocal'
import AppStatus from '../../services/appStatus'
import Connector from '../../utils/connector'
class NoNav extends React.Component {
constructor(props) {
super(props);
this.toggleUserModal = this.toggleUserModal.bind(this)
this.logout = this.logout.bind(this)
this.state = {
showModal: false
}
}
toggleUserModal(e) {
e.preventDefault();
this.setState({
showModal: !this.state.showModal
})
}
logout(e) {
var _this = this;
e.preventDefault();
(new Connector).delete('/api/sessions', function(json) {
if(_.isEmpty(json.errors)) {
(new CurrentUser).reset();
(new CurrentLocal).reset();
(new AppStatus).reset();
(new CurrentUser).deleteAllCookies(document);
window.location.href = '/'
} else {
// Handle errors
}
})
}
render() {
var user = null;
var modal = null;
var current = (new CurrentUser).get()
if(this.state.showModal) {
modal = (
<div className='logout-modal'>
<ul>
<li><a href='#' onClick={this.logout}>Sign Out</a></li>
</ul>
</div>
)
}
if (current.id) {
user = (
<div className='col-6'>
<div className='sr-nav__user'>
<div className='sr-nav__user-image'>
<svg width="50px" height="50px" viewBox="0 0 50 50" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlnsXlink="http://www.w3.org/1999/xlink">
<defs />
<g id="Page-1" stroke="none" strokeWidth='01' fill="none" fillRule="evenodd">
<g id='03' transform="translate(-258.000000, -122.000000)" fill="#FFFFFF">
<path d="M283.087057,160.55453 L283.087057,160.554147 L282.911602,160.554147 L270.6389,160.554147 C270.6389,151.579152 278.521379,151.581258 280.270573,149.234459 L280.470738,148.16411 C278.013209,146.918881 276.278382,143.916234 276.278382,140.404463 C276.278382,135.777904 279.287944,132.026702 282.999521,132.026702 C286.711098,132.026702 289.720661,135.777904 289.720661,140.404463 C289.720661,143.886353 288.016672,146.870229 285.591898,148.135378 L285.819837,149.351492 C287.738931,151.584515 295.359185,151.730088 295.359185,160.55453 L283.087057,160.55453 Z M282.999583,122 C269.192853,122 258,133.193112 258,146.99975 C258,160.806055 269.192853,172 282.999583,172 C296.806313,172 308,160.806055 308,146.99975 C308,133.192779 296.806313,122 282.999583,122 L282.999583,122 Z" id="Fill-1" />
</g>
</g>
</svg>
</div>
<h6 className='sr-nav__user-name'>Hi, {current.firstName} <div display="inline" onClick={this.logout}><img src={logout} alt="logo" className='logout-button' /> Log out</div></h6>
</div>
</div>
)
}
return (
<div className='sr-nav'>
<div className="container">
<div className='row no-gutters'>
<div className='col-6'>
<div className='sr-nav__container'>
<Link to="/">
<img src={logo} alt="logo" className='sr-nav__logo' />
</Link>
<div className='clearfix'></div>
</div>
</div>
{user}
</div>
</div>
</div>
);
}
}
export default NoNav
<file_sep>/app/javascript/containers/adminLogout.jsx
import React, { PropTypes } from 'react'
import { Route, Redirect } from 'react-router'
import CurrentAdmin from '../services/currentAdmin'
class AdminLogout extends React.Component {
constructor(props) {
super(props);
}
componentWillMount() {
(new CurrentAdmin).logout()
window.location.href = '/'
}
render() {
return null;
}
}
export default AdminLogout
<file_sep>/app/javascript/containers/passwordProtection.jsx
import React, { PropTypes } from 'react'
import Connector from '../utils/connector'
import { Route, Redirect } from 'react-router'
import _ from 'underscore'
import CurrentUser from '../services/currentUser'
import Nav from '../components/shared/nav'
import GreenFooter from '../components/shared/greenFooter'
import CurrentAdmin from '../services/currentAdmin'
import 'whatwg-fetch'
class PasswordProtection extends React.Component {
constructor(props) {
super(props);
this.state = {
password: '',
error: null
}
}
onPasswordChange(e) {
this.setState({ password: e.target.value });
}
handleSubmit(e) {
e.preventDefault()
if ((new CurrentAdmin).login(this.state.password)) {
window.location.href = '/'
} else {
this.setState({ error: 'Invalid Password' })
}
}
render() {
var error = null;
if (this.state.error) {
error = (
<div className='row'>
<div className='col-12'>
<div className='sr-password__error'>{this.state.error}</div>
</div>
</div>
)
}
return (
<div className='sr-page'>
<div className='container-fluid np'>
<div>
<Nav />
<div className='grey-bg'>
<div className='sr-password'>
<div className='row'>
<div className='col-12 col-md-6 offset-md-3 col-lg-4 offset-lg-7'>
<form onSubmit={this.handleSubmit.bind(this)}>
<div className='row'>
<div className='col-12'>
<div className="form-group">
<input type='password' name='password' value={this.state.password} onChange={this.onPasswordChange.bind(this)}
className='form-control sr-password__input' placeholder='<PASSWORD>'/>
</div>
</div>
</div>
{error}
<div className='row'>
<div className='col-12'>
<button type='submit' className='sr-page-box__btn btn btn-dark-blue btn-lg btn-green--large-text fw'>Log In</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
<GreenFooter />
</div>
</div>
</div>
);
}
}
export default PasswordProtection
<file_sep>/README.md
# README
## Starting the app
Starting the rails server:
```
rails s
```
And the Webpack compilation:
```
./bin/webpack-dev-server
```
## deployment
Precompile the assets.
```
RAILS_ENV=production rails assets:precompile
```
Commit them and push to production.
<file_sep>/app/javascript/components/question/singleChoiceAndSlider.jsx
import React, { PropTypes } from 'react'
import ReactBootstrapSlider from 'react-bootstrap-slider';
import SingleChoice from '../../components/question/singleChoice';
import Slider from '../../components/question/slider';
import Translations from '../../services/translations';
class SingleChoiceSlider extends React.Component {
constructor(props) {
super(props);
this.state = {
response: {
radio: "",
slider: 0
}
}
}
singleChoiceChangeValue(event) {
var response = this.state.response;
response.radio = event;
this.setState({ response: response }, () => { this.props.response(this.state.response) });
}
sliderChangeValue(event) {
var response = this.state.response;
response.slider = event;
this.setState({ response: response }, () => { this.props.response(this.state.response) });
}
render() {
return (
<div>
<SingleChoice {...this.props} translations={this.state.translations} response={this.singleChoiceChangeValue.bind(this)} />
<Slider {...this.props} translations={this.state.translations} response={this.sliderChangeValue.bind(this)} />
</div>
);
}
}
export default SingleChoiceSlider<file_sep>/lib/playbasis/good.rb
module Playbasis
class Good < Model
def all
http_get('/Goods')
end
def get(id)
http_get("/Goods/#{id}")
end
end
end
<file_sep>/lib/playbasis/request.rb
module Playbasis
class Request
def initialize(config)
@config = config
end
def call(method, uri:, body: nil, query_params: nil, token: nil,
auth: true, conn: new_conn, content_type: nil)
options = { uri: uri, conn: conn }
options[:body] = body if body
if auth && token
options[:body] ||= {}
options[:body][:token] = token
end
options[:query_params] = query_params if query_params
options[:content_type] = content_type if content_type
resp = send(method, options)
begin
{ status: resp.status, body: JSON.parse(resp.body) }
rescue JSON::ParserError
{ status: resp.status, body: {} }
end
end
def get(uri:, query_params: nil, conn: new_conn)
conn.get do |req|
req.url uri
req.params = query_params if query_params
end
end
def post(uri:, body:, conn: new_conn, content_type: nil)
content_type ||= 'application/x-www-form-urlencoded' #'application/json'
conn.post do |req|
req.url uri
req.headers['Content-Type'] = content_type
req.body = content_type == 'application/json' ? body.to_json : body
end
end
private
def new_conn
@conn = Faraday.new(url: @config.base_url)
end
end
end
<file_sep>/app/controllers/api/quizzes_controller.rb
module Api
class QuizzesController < ApplicationController
def index
result = client.quizzes.all(params[:tags])
result = result[:body]['response']['result']
render json: result
end
def recent
result = client.quizzes.recent(params[:id])
result = result[:body]['response']['result']
render json: result
end
def details
result = client.quizzes.get(params[:id])
result = result[:body]['response']['result']
render json: result
end
def question
result = client.questions.get(params[:id], params[:player_id])
result = result[:body]['response']['result']
if !result.nil?
question = result['question']
component = Playbasis::Question::COMPONENT[:ERROR]
if result['question'].include? "#"
question_arr = result['question'].split('#')
question = question_arr.last
component = question_arr.first
end
json_data = {
component: component,
title: '',
question_id: result['question_id'],
question: question,
question_image: result['question_image'],
defaultValue: result['default_answer'],
componentValue: {
choices: []
}
}
if result['options']
result['options'].each do |option|
json_data[:componentValue][:choices] << {
id: option['option_id'],
label: option['option'],
value: option['option_id'],
image: option['option_image'],
range_interval: option['range_interval'],
min: option['range_min'],
max: option['range_max'],
description: option['description'],
}
end
end
else
json_data = {error: "no data"}
end
render json: json_data
end
end
end
<file_sep>/app/javascript/components/shared/reason.jsx
import React from 'react';
export default class Reason extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className="row">
<div className="col-1 col-md-2">
</div>
<div className="col-10 col-md-8">
<div className='sr-reasons__container'>
<img className="sr-reasons__image" src={this.props.image} />
<div className="sr-reasons__textbox card-text">
<text className="sr-reasons__title">{this.props.title}</text>
<text className="sr-reasons__content">{this.props.content}</text>
</div>
</div>
</div>
<div className="col-1 col-md-10">
</div>
</div>
)
}
}<file_sep>/config/initializers/playbasis.rb
Playbasis.configure do |config|
config.api_key = '1639882295'
config.api_secret = '<KEY>'
end
<file_sep>/app/javascript/components/shared/benefit.jsx
import React from 'react'
export default class Benefit extends React.Component {
render() {
return (
<div className="circle">
<div className={this.props.icon}></div>
<div className="text benefit__text">{this.props.title}</div>
</div>
)
}
}
<file_sep>/config/routes.rb
Rails.application.routes.draw do
namespace :api do
get '/contents', to: 'contents#index'
get '/quizzes', to: 'quizzes#index'
get '/goods', to: 'goods#all'
get '/goods/:id', to: 'goods#details'
get '/quizzes/:id/details', to: 'quizzes#details'
get '/quizzes/:id/question', to: 'quizzes#question'
get '/quizzes/player/:id/recent', to: 'quizzes#recent'
post '/questions/:id/answer', to: 'questions#answer'
delete '/sessions', to: 'sessions#destroy'
resources :users, only: [:create]
end
get '*path', :to => 'pages#home'
root to: 'pages#home'
end
<file_sep>/lib/playbasis/quiz.rb
module Playbasis
class Quiz < Model
def all(tags)
http_get('/Quiz/list',{query_params: {tags:tags}})
end
def recent(id)
http_get("Quiz/player/#{id}/1")
end
def get(id)
http_get("/Quiz/#{id}/detail")
end
end
end
<file_sep>/app/javascript/components/question/binary.jsx
import React, {PropTypes} from 'react'
import Translations from '../../services/translations';
import CurrentLocal from '../../services/currentLocal'
import Connector from '../../utils/connector'
import { Route, Redirect } from 'react-router'
class Binary extends React.Component {
constructor(props) {
super(props);
this.state = {
response: null,
option_id: null,
quizz: {
name: "",
description: ""
},
redirect: '',
}
this.renderImage = this.renderImage.bind(this);
this.getData = this.getData.bind(this);
}
componentDidMount() {
this.getData();
var _this = this;
$('input').iCheck({
checkboxClass: 'icheckbox_flat-blue',
radioClass: 'iradio_flat-blue'
}).on('ifChecked', function (changeEvent) {
_this.handleChange(changeEvent);
});
}
handleChange(event) {
console.log(event)
var local = (new CurrentLocal).get()
var gifUrl = this.props.componentValue.choices[event.target.value].image;
var image = document.getElementById('gif-image-' + event.target.value);
image.src = gifUrl
var choiceId = event.target.dataset.choiceId;
var val = Translations.trans((this.props.translations ? this.props.translations[local.local] : ''), this.props.componentValue.choices[event.target.value].label).toLowerCase();
var redirect = '';
if(val.indexOf('yes') >= 0) {
redirect = '/thankyou'
}
this.setState({
redirect: redirect,
response: this.props.componentValue.choices[choiceId].value,
option_id: this.props.componentValue.choices[choiceId].id
}, () => {
this.props.response(this.state);
});
}
getData() {
var _this = this;
(new Connector).get('/api/quizzes/' + _this.props.quizzId + '/details' + '?player_id=' + localStorage.getItem('currentUser.id'), function(json) {
_this.setState({quizz: json});
});
}
renderImage() {
if (this.props.question_image != null && !this.props.question_image.includes("no_image")) {
// return <img className="sr-slider__image" src={this.props.question_image;}/>
return this.props.question_image;
}
}
render() {
var local = (new CurrentLocal).get()
var questionsImage = [];
var questionsInput = [];
for (var i = 0; i <= this.props.componentValue.choices.length - 1; i++) {
questionsImage.push(
<label key={i} className="col-6">
<img className='sr-question__radio_image' id={'gif-image-' + i} src={this.props.componentValue.choices[i].description} />
</label>
)
questionsInput.push(
<label key={i} className="col-6">
<input className='sr-question__radio' type="radio" name="gender" data-choice-id={i} value={i} />
<div className="label">{Translations.trans((this.props.translations ? this.props.translations[local.local] : ''), this.props.componentValue.choices[i].label)}</div>
</label>
)
}
return (
<div className='sr-question__container'>
<div>
<div>
<div style={{ "padding-bottom": "30px" }}>
<div className="question-bg" style={{ 'background': "url("+this.renderImage()+")", 'background-size': 'contain' }}>
{questionsImage}
</div>
<div>
{questionsInput}
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Binary
<file_sep>/lib/playbasis/access_token.rb
module Playbasis
class AccessToken
attr_accessor :config, :storage
def initialize(config)
@config = config
@storage = @config.storage
end
def get
auth = storage['authentication']
return auth['token'] if auth && Time.new(auth['date_expire']) > Time.now.to_i
auth = Auth.new(@config).auth[:body]['response']
storage['authentication'] = auth
auth['token']
end
end
end
<file_sep>/app/javascript/containers/login.jsx
import React, { PropTypes } from 'react'
import { render } from 'react-dom'
import { Link } from 'react-router-dom'
import Connector from '../utils/connector'
import { Route, Redirect } from 'react-router'
import _ from 'underscore'
import CurrentUser from '../services/currentUser'
import 'whatwg-fetch'
class Login extends React.Component {
constructor(props) {
super(props);
this.state = {
email: '',
firstName: '',
lastName: '',
redirect: false
}
}
onEmailChange(e) {
this.setState({ email: e.target.value });
}
onFirstNameChange(e) {
this.setState({ firstName: e.target.value });
}
onLastNameChange(e) {
this.setState({ lastName: e.target.value });
}
handleSubmit(e) {
e.preventDefault()
var _this = this;
var data = {
user: {
email: this.state.email,
first_name: this.state.firstName,
last_name: this.state.lastName
}
}
var connector = new Connector
connector.post('/api/users', data, function (json) {
// Handle result
if (_.isEmpty(json.errors)) {
(new CurrentUser).set(json)
_this.setState({ redirect: true })
} else {
// Handle errors
}
})
}
render() {
if (this.state.redirect) {
return <Redirect push to={"/validateSms/" + this.props.match.params.type + "/" + this.state.email} />;
}
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Journey Description</h2>
<h4>
Best Insurance helps you get to get the best insurance fast and easily!
</h4>
</div>
</div>
</div>
</div>
<div className="wrap-content container">
<div className='sr-page-box'>
<div className='row'>
<div className='col-12'>
<div className='sr-page-box__container'>
<h3>
Complete the steps<br />
Unlock the gift box<br />
Get a personalized pricing
</h3>
</div>
</div>
</div>
<div className='row'>
<div className='col-12'>
<div className='sr-page-box__container'>
<form onSubmit={this.handleSubmit.bind(this)}>
<div className='row'>
<div className='col-6'>
<div className="form-group">
<input type='text' name='firstName' value={this.state.firstName} onChange={this.onFirstNameChange.bind(this)} className='form-control' placeholder='<NAME>' />
</div>
</div>
<div className='col-6'>
<div className="form-group">
<input type='text' name='lastName' value={this.state.lastName} onChange={this.onLastNameChange.bind(this)} className='form-control' placeholder='<NAME>' />
</div>
</div>
</div>
<div className='row'>
<div className='col-12'>
<div className="form-group">
<input type='phone' name='phone' value={this.state.email} onChange={this.onEmailChange.bind(this)} className='form-control' placeholder='Mobile no' />
</div>
</div>
</div>
<div className='row'>
<div className='col-12'></div>
</div>
<div className='row'>
<div className='col-10 offset-1'>
<Link className="button-224 btn btn-dark-blue btn-lg btn-dark-blue--large-text" to={"/"}>Back</Link>
<button type='submit' className='button-224 btn btn-green btn-lg btn-green--large-text'>Start</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Login
<file_sep>/app/javascript/containers/winning.jsx
import React, { PropTypes } from 'react'
import { Link } from 'react-router-dom'
class Winning extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>{this.props.title}</h2>
<h4> Best insurance helps you to get the best <br /> insurance it's fast and easily !</h4>
</div>
</div>
</div>
</div>
<div className="wrap-content container m-t--100">
<div className='sr-question__container p-15'>
<div className='row'>
<div className='col-12'>
<div>
<div>
<h2 className="sr-question__accent__title">{this.props.subtitle}</h2>
</div>
<div>
<text>{this.props.match.params.text + " points"}</text>
</div>
<div>
<img className="sr-winning__image" src={this.props.image} />
</div>
</div>
<div>
<Link to={`/step`} className='redeem-btn'>
Continue
</Link>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Winning
<file_sep>/app/controllers/api/users_controller.rb
module Api
class UsersController < ApplicationController
def create
@user = User.new(user_params)
if @user.valid?
result = client.users.register(@user.id, @user.email, @user.first_name, @user.last_name)
if result[:body]['error_code'] == '0201'
result = client.users.login(@user.id)
end
session[:current_user_id] = @user.id
render json: @user
else
render json: @user
end
end
private
def user_params
params.require(:user).permit(:email, :first_name, :last_name)
end
end
end
<file_sep>/lib/playbasis/client.rb
module Playbasis
class Client
attr_reader :config
RESOURCES = {
auth: Auth,
users: User,
contents: Content,
quizzes: Quiz,
questions: Question,
goods: Good
}
def initialize(options = nil)
@config = load_config(options)
end
RESOURCES.each do |resource, klass|
define_method resource do
klass.new(@config)
end
end
private
def load_config(options)
return Playbasis.configuration unless options
config = Configuration.new
config.merge(Playbasis.configuration.to_hash.merge(options))
config
end
end
end
<file_sep>/public/game/js/loader.js
////////////////////////////////////////////////////////////
// CANVAS LOADER
////////////////////////////////////////////////////////////
/*!
*
* START CANVAS PRELOADER - This is the function that runs to preload canvas asserts
*
*/
function initPreload(){
toggleLoader(true);
checkMobileEvent();
$(window).resize(function(){
resizeGameFunc();
});
resizeGameFunc();
loader = new createjs.LoadQueue(false);
manifest=[{src:'assets/background.jpg', id:'background'},
{src:'assets/logo.png', id:'logo'},
{src:'assets/human_face_Spritesheet6x5.png', id:'humanFace'},
{src:'assets/human_left_Spritesheet5x4.png', id:'humanLeft'},
{src:'assets/human_right_Spritesheet5x4.png', id:'humanRight'},
{src:'assets/table.png', id:'table'},
{src:'assets/roll.png', id:'sushiRoll'},
{src:'assets/numberBackground.png', id:'numberBackground'},
{src:'assets/timer.png', id:'timer'},
{src:'assets/timerIndicator.png', id:'timerIndicator'},
{src:'assets/plate.png', id:'plate'},
{src:'assets/leftover.png', id:'leftover'}];
for(n=1;n<=humanSequence;n++){
manifest.push({src:'assets/human00'+formatNumber(n,2)+'.png', id:'human'+n})
}
for(n=0;n<food_arr.length;n++){
manifest.push({src:food_arr[n].src, id:'food'+n})
}
soundOn = true;
if($.browser.mobile || isTablet){
if(!enableMobileSound){
soundOn=false;
}
}
if(soundOn){
manifest.push({src:'assets/sounds/music.ogg', id:'music'})
manifest.push({src:'assets/sounds/click.ogg', id:'soundClick'})
manifest.push({src:'assets/sounds/fail.ogg', id:'soundFail'})
//manifest.push({src:'assets/sounds/eat.ogg', id:'soundEat'})
manifest.push({src:'assets/sounds/plate1.ogg', id:'soundPlate1'})
manifest.push({src:'assets/sounds/plate2.ogg', id:'soundPlate2'})
manifest.push({src:'assets/sounds/reset.ogg', id:'soundReset'})
manifest.push({src:'assets/sounds/score.ogg', id:'soundScore'})
manifest.push({src:'assets/sounds/type.ogg', id:'soundType'})
manifest.push({src:'assets/sounds/result.ogg', id:'soundResult'})
createjs.Sound.alternateExtensions = ["mp3"];
loader.installPlugin(createjs.Sound);
}
loader.addEventListener("complete", handleComplete);
loader.on("progress", handleProgress, this);
loader.loadManifest(manifest);
}
/*!
*
* CANVAS PRELOADER UPDATE - This is the function that runs to update preloder progress
*
*/
function handleProgress() {
$('#mainLoader').html(Math.round(loader.progress/1*100)+'%');
}
/*!
*
* CANVAS PRELOADER COMPLETE - This is the function that runs when preloader is complete
*
*/
function handleComplete() {
toggleLoader(false);
initMain();
};
/*!
*
* TOGGLE LOADER - This is the function that runs to display/hide loader
*
*/
function toggleLoader(con){
if(con){
$('#mainLoader').show();
}else{
$('#mainLoader').hide();
}
}<file_sep>/lib/playbasis/user.rb
module Playbasis
class User < Model
def register(id, email, first_name, last_name)
request.call(:post,
uri: "/Player/#{id}/register",
body: {
token: access_token,
username: id,
email: email,
first_name: first_name,
last_name: last_name
}
)
end
def login(id)
request.call(:post,
uri: "/Player/#{id}/login",
body: {
token: access_token,
id: id
}
)
end
end
end
<file_sep>/app/javascript/components/shared/comment.jsx
import React from 'react'
import { ViewPager, Frame, Track, View } from 'react-view-pager'
export default class Comment extends React.Component {
render() {
var stars = []
for (var i = 1; i <= this.props.rate; i++) {
stars.push(<div className="icon-star" key={i}></div>)
}
return (
<View>
<div className="comment-box comment-box-margin">
<div className="comment-body">
<div className="ratings">
{stars}
</div>
<div className="comment">
{this.props.comment}
</div>
<div className="sub-comment">{this.props.sub_comment}</div>
</div>
<div className="comment-footer">
<div className="username">{this.props.username}</div>
</div>
</div>
</View>
)
}
}
<file_sep>/app/javascript/services/step.js
import Lodash from 'lodash'
import AppStatus from '../services/appStatus'
class StepController {
constructor() {
}
getQuizzCompletedIndex(quizzes, json) {
var step = 0;
if (json != null) {
var quiz_id = json[0].quiz_id;
quizzes.map((quiz, i) => {
if (Lodash.isEqual(quiz_id, quiz.quiz_id)) {
step = i + 1;
}
});
}
var appStatus = (new AppStatus).get();
appStatus.step = step;
(new AppStatus).set(appStatus);
return step;
}
}
export default StepController
<file_sep>/app/javascript/components/shared/question.jsx
import React, { PropTypes } from 'react'
import SingleChoice from '../../components/question/singleChoice'
import Slider from '../../components/question/slider'
import Birthdate from '../../components/question/birthdate'
import Address from '../../components/question/address'
import SingleChoiceAndSlider from '../../components/question/singleChoiceAndSlider'
import Binary from '../../components/question/binary'
import Error from '../../components/question/error'
import Loading from './loading'
import LoadingComponent from '../../components/question/loading'
import Translations from '../../services/translations'
import StepBar from '../../components/shared/stepBar'
import CurrentLocal from '../../services/currentLocal'
import ReactCSSTransitionReplace from 'react-css-transition-replace';
class QuestionComponent extends React.Component {
constructor(props) {
super(props);
this.state = {
response: null
}
this.onAnswer = this.onAnswer.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
this.getSingle = this.getSingle.bind(this);
this.renderImage = this.renderImage.bind(this);
}
onAnswer(changeEvent) {
this.setState({
response: changeEvent
});
}
handleSubmit(event) {
console.log('this is:', this.state.response);
if (this.state.response != null && this.state.response != undefined) {
console.log('Yes');
this.props.response(this.state.response);
} else {
console.log('Nooo');
//this.props.response(this.props.defaultValue);
}
}
getSingle() {
switch (this.props.component) {
case "SC":
return (<SingleChoice key="SingleChoice" {...this.props} response={this.onAnswer} />);
case "SLIDER":
return (<Slider key="Slider" {...this.props} response={this.onAnswer} />);
case "BIRTHDAY":
return (<Birthdate key="Birthdate" {...this.props} response={this.onAnswer} />);
case "ADDRESS":
return (<Address key="Address" {...this.props} response={this.onAnswer} />);
case "SCAS":
return (<SingleChoiceAndSlider key="SingleChoiceAndSlider" {...this.props} response={this.onAnswer} />);
case "ERROR":
return (<Error key="Error" {...this.props} response={this.onAnswer} />);
case "BINARY":
return (<Binary key="Binary" {...this.props} response={this.onAnswer} />);
case "LOADING":
return (<LoadingComponent key="Loading" {...this.props} response={this.onAnswer} />);
}
}
getButton() {
switch (this.props.component) {
case "LOADING":
return <div />
default:
return <button className='sr-question__button' onClick={this.handleSubmit}>ok</button>
}
}
renderImage() {
if (this.props.question_image != null && !this.props.question_image.includes("no_image")) {
// return <img className="sr-slider__image" src={this.props.question_image} />
return this.props.question_image;
}
}
render() {
var questions = this.getSingle();
var local = (new CurrentLocal).get()
var okButton = this.getButton();
return (
<div className='container'>
<div className='row'>
<div className='col-12'>
{
questions ?
<div className='sr-question__container'>
<div>
<div className="sr-question__text__title__box">
<text className="sr-question__text__title">{Translations.trans(this.props.translations[local.local], this.props.title)}</text>
</div>
<div>
<text className="sr-question__text__question">{Translations.trans(this.props.translations[local.local], this.props.question)}</text>
</div>
</div>
<div className="question-bg" style={{ background: "url1(" + this.renderImage() + ")" }}>
<form>
<ReactCSSTransitionReplace transitionName="fade-wait" transitionEnterTimeout={200} transitionLeaveTimeout={200}>
{questions}
</ReactCSSTransitionReplace>
</form>
</div>
<div>
{okButton}
</div>
</div>
:
<Loading />
}
</div>
</div>
</div>
);
}
}
class Question extends React.Component {
constructor(props) {
super(props);
this.state = {
response: null
}
this.onAnswer = this.onAnswer.bind(this);
}
onAnswer(changeEvent) {
this.setState({ response: changeEvent }, () => { this.props.response(this.state.response); });
}
getQuestion() {
switch (this.props.component) {
// case "BINARY":
// return (<Binary {...this.props} response={this.onAnswer} />);
default:
return (<QuestionComponent {...this.props} response={this.onAnswer} />);
}
}
render() {
return (
<div className='sr-question__box'>
{this.getQuestion()}
</div>
);
}
}
export default Question
<file_sep>/app/javascript/services/currentAdmin.js
import _ from 'underscore'
import humps from 'humps'
class CurrentAdmin {
login(password) {
// TODO: Replace with remote password check
if (password !== '<PASSWORD>') { return false; }
localStorage.setItem('currentAdmin.loggedIn', true)
return true;
}
logout() {
localStorage.removeItem('currentAdmin.loggedIn')
}
loggedIn() {
// TODO: Remote check
return localStorage.getItem('currentAdmin.loggedIn') == 'true'
}
}
export default CurrentAdmin
<file_sep>/app/javascript/components/question/address.jsx
import React, {PropTypes} from 'react'
import Translations from '../../services/translations';
import CurrentLocal from '../../services/currentLocal';
class Address extends React.Component {
constructor(props) {
super(props);
this.state = {
response: "China",
option_id: props.componentValue.choices[0].id
}
var onCountryChange = this.onCountryChange.bind(this);
}
componentDidMount() {
// send default value to question.jsx (handleSubmit())
this.props.response(this.state);
var _this = this;
var country = $("#countries").msDropdown({
on: {
change: function(data, ui) {
_this.onCountryChange(data);
}
}
}).data("dd");
// $("select#countries").prop('selectedIndex', 3);
}
onCountryChange(event) {
// console.log(event)
this.setState({
response: event.title
}, () => {
this.props.response(this.state);
});
}
render() {
var local = (new CurrentLocal).get()
return (
<div className="sr-address__container">
<select value={local.country} name="countries" id="countries" style={{
width: 300 + "px"
}}>
<option value='cn' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag cn" data-title="China">China</option>
<option value='tw' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag tw" data-title="Taiwan">Taiwan</option>
<option value='hk' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag hk" data-title="Hong Kong">Hong Kong</option>
<option value='sg' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag sg" data-title="Singapore">Singapore</option>
<option value='my' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag my" data-title="Malaysia">Malaysia</option>
<option value='th' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag th" data-title="Thailand">Thailand</option>
<option value='vn' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag vn" data-title="Vietnam">Vietnam</option>
<option value='ph' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag ph" data-title="Philippines">Philippines</option>
<option value='kr' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag kr" data-title="South Korea">South Korea</option>
<option value='jp' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag jp" data-title="Japan">Japan</option>
<option value='id' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag id" data-title="Indonesia">Indonesia</option>
<option value='in' data-image="images/msdropdown/icons/blank.gif" data-imagecss="flag in" data-title="India">India</option>
</select>
</div>
);
}
}
export default Address
<file_sep>/app/javascript/components/shared/pagger.jsx
import React, { PropTypes } from 'react'
import AppStatus from '../../services/appStatus'
import next from '../../../../public/images/next_buttom.png'
import previous from '../../../../public/images/previous_buttom.png'
import dot from '../../../../public/images/dot.png'
class Pagger extends React.Component {
constructor(props) {
super(props);
this.state = {
page: this.props.page
}
}
nextClick(changeEvent) {
this.setState({
page: this.props.page - 1
}, () => { this.props.response(this.state); });
}
previousClick(changeEvent) {
this.setState({
page: this.props.page + 1
}, () => { this.props.response(this.state); });
}
render() {
var rows = [];
for (var i = 0; i < this.props.numberOfItems; i += 4) {
rows.push(<img key={i} src={dot} className="sr-pagger__dot"/>)
}
var previousClass = this.state.page > 0 ? "sr-pagger__button sr-pagger__previous" : "sr-pagger__button sr-pagger__previous sr-pagger__visible";
var nextClass = this.state.page < (this.props.numberOfItems/4) && this.props.numberOfItems > 4 ? "sr-pagger__button sr-pagger__next" : "sr-pagger__button sr-pagger__next sr-pagger__visible";
return (
<div className='col-12'>
<div className='center'>
<img src={previous} className={previousClass} onClick={this.nextClick.bind(this)} />
{rows}
<img src={next} className={nextClass} onClick={this.previousClick.bind(this)}/>
</div>
</div>
)
}
}
export default Pagger<file_sep>/app/validators/user.rb
class User
include ActiveModel::Validations
attr_accessor :email, :first_name, :last_name
def initialize(params)
@email = params[:email]
@first_name = params[:first_name]
@last_name = params[:last_name]
end
def id
@id ||= Digest::SHA1.hexdigest("#{@email}#{@first_name}#{@last_name}")
end
validates :email, presence: true
validates :first_name, presence: true
validates :last_name, presence: true
end
<file_sep>/app/controllers/api/goods_controller.rb
module Api
class GoodsController < ApplicationController
def all
result = client.goods.all()
result = result[:body]['response']['goods_list']
render json: result
end
def details
result = client.goods.get(params[:id])
result = result[:body]['response']['goods']
render json: result
end
end
end
<file_sep>/app/javascript/containers/layouts/noNavLayout.jsx
import React from 'react'
import {
BrowserRouter,
Route,
Link
} from 'react-router-dom'
import NoNav from '../../components/shared/noNav'
import Footer from '../../components/shared/footer'
export default class NoNavLayout extends React.Component {
constructor(props) {
super(props)
}
render() {
return(
<Route exact={this.props.exact == undefined ? true : this.props.exact} path={this.props.path} render={matchProps => (
<div className='sr-page'>
<div className='container-fluid np'>
<div>
<NoNav />
<this.props.component {...matchProps} {...this.props} />
<Footer />
</div>
</div>
</div>
)} />
)
}
}
<file_sep>/public/game/js/game.js
////////////////////////////////////////////////////////////
// GAME
////////////////////////////////////////////////////////////
/*!
*
* GAME SETTING CUSTOMIZATION START
*
*/
var startButtonText = 'TAP TO START'; //text for start button
var howToPlayText = 'select two plates that have calories equal to the number in the scale.'
var humanSequence = 20; //total human sequence
var humanX = 300; //human position x
var humanY = 120; //human position y
//food array
var food_number_arr = [100, 150, 200, 300, 350, 400, 450, 500, 550, 600, 700, 750, 800, 900, 2750, 2500, 2200];
var food_arr = [{src:'assets/food1.png', regX:39, regY:-37},
{src:'assets/food2.png', regX:33, regY:-33},
{src:'assets/food3.png', regX:36, regY:-41},
{src:'assets/food4.png', regX:28, regY:-33},
{src:'assets/food5.png', regX:50, regY:-6},
{src:'assets/food6.png', regX:40, regY:-14},
{src:'assets/food7.png', regX:35, regY:-7},
{src:'assets/food8.png', regX:30, regY:-36},
{src:'assets/food9.png', regX:33, regY:-22},
{src:'assets/food10.png', regX:34, regY:-17},
{src:'assets/food11.png', regX:50, regY:-22},
{src:'assets/food12.png', regX:38, regY:-27},
{src:'assets/food13.png', regX:30, regY:-17},
{src:'assets/food14.png', regX:45, regY:-17},
{src:'assets/food15.png', regX:34, regY:-34},
{src:'assets/food16.png', regX:45, regY:-17},
{src:'assets/food17.png', regX:45, regY:-37}];
//plate place position
var place_arr = [{x:433, y:174},{x:586, y:174}];
var totalRollPlates = 8; //total plate slot to roll
var mathQuestionTextSpeed = .5; //math question text animation speed
var gameTimer = 45000; //game timer for each math question
var gameScoreText = '[NUMBER]'; //game score text
var gameScoreNum = 50; //game score number
var gameScoreOnTime = true; //enable to get score base on game timer left
var sumCorrectColour = '#2DB200'; //sum correct colour
var sumWrongColour = '#D90000'; //sum wrong colour
//level settings
var level_arr = {sum:5, //starting sum
sumIncrease:5, //sum increase
plateTotal:6, //starting total plate
plateRollingSpeed:1, //starting plate rolling speed
targetScore:20, //next target score increase
rollSpeedIncrease:.5, //roll sushi speed when reach target score
plateTotalIncrease:0}; //total plate increase when reach target score
var resultScoreText = 'YOUR SCORE [NUMBER]'; //text for score, [NUMBER] will replace to score
/*!
*
* GAME SETTING CUSTOMIZATION END
*
*/
var playerData = {score:0, speed:1, sum:0, targetScore:0, fat:1, plate:0, timer:0, timerCount:0};
var gameData = {foodArray:[], foodNum:0, foodIndexArray:[], numberArray:[], plateArray:[], rollArray:[], placeArray:[]};
var endRangeNumber = 55;
/*!
*
* GAME BUTTONS - This is the function that runs to setup button event
*
*/
function buildGameButton(){
setupGameArray();
}
function setupGameButton(){
stage.cursor = "pointer";
stage.addEventListener("click", handlerMethod);
}
function removeGameButton(){
stage.cursor = null;
stage.removeEventListener("click", handlerMethod);
}
function handlerMethod(evt) {
switch (evt.type){
case 'click':
playSound('soundClick');
goPage('game');
break;
}
}
/*!
*
* DISPLAY PAGES - This is the function that runs to display pages
*
*/
var curPage=''
function goPage(page){
curPage=page;
mainContainer.visible=false;
humanContainer.visible=false;
gameContainer.visible=false;
resultContainer.visible=false;
removeGameButton();
stopAnimateButton(buttonStart);
var targetContainer = ''
switch(page){
case 'main':
targetContainer = mainContainer;
setupGameButton();
startAnimateButton(buttonStart);
createMainSushiNumber();
break;
case 'game':
targetContainer = gameContainer;
humanContainer.visible=true;
startGame();
break;
case 'result':
targetContainer = resultContainer;
playSound('soundResult');
resultScoreTxt.text = resultScoreText.replace('[NUMBER]', playerData.score);
stopGame();
saveGame(playerData.score);
setTimeout(function () {
window.top.location.href = "/winningpoint/"+ playerData.score; //will redirect to your blog page (an ex: blog.html)
}, 3000);
break;
}
targetContainer.alpha=0;
targetContainer.visible=true;
$(targetContainer)
.clearQueue()
.stop(true,true)
.animate({ alpha:1 }, 500);
}
/*!
*
* START ANIMATE BUTTON - This is the function that runs to play blinking animation
*
*/
function startAnimateButton(obj){
obj.alpha=0;
$(obj)
.animate({ alpha:1}, 500)
.animate({ alpha:0}, 500, function(){
startAnimateButton(obj);
});
}
/*!
*
* STOP ANIMATE BUTTON - This is the function that runs to stop blinking animation
*
*/
function stopAnimateButton(obj){
obj.alpha=0;
$(obj)
.clearQueue()
.stop(true,true);
}
/*!
*
* SETUP GAME ARRAY - This is the function that runs to setup game array
*
*/
function setupGameArray(){
for(n=0;n<food_arr.length;n++){
gameData.foodArray.push(n);
}
for(n=0;n<place_arr.length;n++){
gameData.placeArray.push(false);
}
var positionSplit = (canvasW+(endRangeNumber*2))/totalRollPlates;
for(n=0;n<totalRollPlates;n++){
gameData.rollArray.push({x:positionSplit*n, y:canvasH/100 *77}); // graph plate position
}
}
/*!
*
* START GAME - This is the function that runs to start play game
*
*/
function startGame(){
playerData.score = 0;
playerData.timer = 0;
playerData.timerCount = gameTimer;
scoreTxt.text = gameScoreText.replace('[NUMBER]', playerData.score);
playerData.fat = 1;
playerData.sum = level_arr.sum;
playerData.targetScore = level_arr.targetScore;
playerData.plate = level_arr.plateTotal;
playerData.speed = level_arr.plateRollingSpeed;
humanLeftAnimate.y = humanRightAnimate.y = humanY - 60;
humanFaceAnimate.gotoAndStop('static');
humanLeftAnimate.gotoAndStop('static');
humanRightAnimate.gotoAndStop('static');
updateHuman();
//shuffleFoodArray();
createNumber();
beforeDate = new Date();
}
/*!
*
* STOP GAME - This is the function that runs to stop play game
*
*/
function stopGame(){
toggleGameTimer(false);
TweenMax.killTweensOf(humanContainer);
TweenMax.killTweensOf(humanFaceAnimate);
TweenMax.killTweensOf(humanLeftAnimate);
TweenMax.killTweensOf(humanRightAnimate);
TweenMax.killTweensOf(playerData);
TweenMax.killTweensOf(numberTxt);
TweenMax.killTweensOf(numberAnimateTxt);
TweenMax.killTweensOf(sumData);
if ( typeof displayB == 'function' ) {
displayB();
}
}
/*!
*
* SAVE GAME - This is the function that runs to save game
*
*/
function saveGame(score){
/*$.ajax({
type: "POST",
url: 'saveResults.php',
data: {score:score},
success: function (result) {
console.log(result);
}
});*/
}
/*!
*
* UPDATE LEVEL - This is the function that runs to update level
*
*/
var level = 0;
function updateLevel(con){
if (con){
playerData.sum += level_arr.sumIncrease;
if(gameScoreOnTime){
playerData.score += gameScoreNum + (level*10);
}else{
playerData.score += gameScoreNum;
}
level++;
scoreTxt.text = gameScoreText.replace('[NUMBER]', playerData.score);
if(playerData.sum >= playerData.targetScore){
playerData.fat++;
playerData.fat = playerData.fat > humanSequence ? humanSequence : playerData.fat;
updateHuman();
playerData.targetScore += level_arr.targetScore;
playerData.plate += level_arr.plateTotalIncrease;
playerData.plate = playerData.plate > (totalRollPlates-2) ? (totalRollPlates-2) : playerData.plate;
playerData.speed += level_arr.rollSpeedIncrease;
}
}
createNumber();
}
/*!
*
* CREATE SUM NUMBER - This is the function that runs to create new sum number
*
*/
function createNumber(){
numberTxt.alpha = 1;
numberAnimateTxt.alpha = 0;
toggleGameTimer(true);
sumTxt.text = '';
var rand1 = Math.floor(generateRandom(food_number_arr.length-3));
var rand2 = Math.floor(generateRandom(food_number_arr.length-3));
var correct1 = food_number_arr[rand1];
var correct2 = food_number_arr[rand2];
playerData.sum = correct1 + correct2;
numberTxt.text = numberAnimateTxt.text = playerData.sum;
animateNumber();
gameData.numberArray = [];
gameData.numberArray.push(correct1);
gameData.numberArray.push(correct2);
gameData.foodIndexArray = [];
gameData.foodIndexArray.push(rand1);
gameData.foodIndexArray.push(rand2);
for(n=0;n<playerData.plate;n++){
var rand = Math.floor(Math.random()*food_number_arr.length);
if(playerData.sum == food_number_arr[rand]){
rand = Math.floor(Math.random()*food_number_arr.length);
}
gameData.foodIndexArray.push(rand);
gameData.numberArray.push(food_number_arr[rand]);
}
shuffleFoodAndIndexArray(gameData.numberArray, gameData.foodIndexArray);
createPlates();
}
/*!
*
* CREATE MAIN SUSHI NUMBER - This is the function that runs to create main sushi number
*
*/
function createMainSushiNumber(){
gameData.numberArray = [];
gameData.foodIndexArray = [];
for(n=0;n<totalRollPlates-1;n++){
rand = Math.floor(generateRandom(food_number_arr.length));
gameData.foodIndexArray.push(rand);
gameData.numberArray.push(food_number_arr[rand]);
}
createPlates();
}
/*!
*
* GENERATE RANDOM NUMBER - This is the function that runs to generate random number
*
*/
function generateRandom(number){
var generateNumber = 0;
for(g=0;g<1;g++){
generateNumber = Math.floor(Math.random()*number);
if(generateNumber == 0 || isDecimalExist(generateNumber)){
g--;
}
}
return generateNumber;
}
/*!
*
* UPDATE HUMAN - This is the function that runs to update human
*
*/
function updateHuman(){
for(h=1;h<=humanSequence;h++){
$.human[h].visible = false;
}
$.human[Math.floor(playerData.fat)].visible = true;
humanFaceAnimate.y = humanY - 115;
humanFaceAnimate.y -= playerData.fat/20 * 30;
humanLeftAnimate.x = canvasW/100 * 40;
humanRightAnimate.x = canvasW/100 * 60;
humanLeftAnimate.x -= playerData.fat/20 * 130;
humanRightAnimate.x += playerData.fat/20 * 130;
}
/*!
*
* ANIMATE HUMAN - This is the function that runs to animate human
*
*/
function animateHuman(con,string){
if(con){
animateNumber('correct',string);
playSound('soundScore');
var speedNum = .2;
TweenMax.to(humanContainer, speedNum, {y:-20, overwrite:true, onComplete:function(){
TweenMax.to(humanContainer, speedNum, {y:0, overwrite:true});
}});
TweenMax.to(humanFaceAnimate, .5, {overwrite:true, onComplete:function(){
//playSound('soundEat');
for(n=0;n<gameData.placeArray.length;n++){
for(s=0;s<gameData.plateArray.length;s++){
if(gameData.plateArray[s].place == n){
gameData.plateArray[s].food.visible = false;
gameData.plateArray[s].leftover.visible = true;
}
}
}
humanFaceAnimate.gotoAndPlay('anime');
humanLeftAnimate.gotoAndPlay('anime');
humanRightAnimate.gotoAndPlay('anime');
TweenMax.to(humanFaceAnimate, 1.5, {overwrite:true, onComplete:updateLevel(con)});
}});
}else{
playSound('soundFail');
animateNumber('wrong',string);
humanFaceAnimate.gotoAndPlay('fail');
TweenMax.to(humanContainer, speedNum, {y:-20, overwrite:true, onComplete:function(){
TweenMax.to(humanContainer, speedNum, {y:0, overwrite:true});
}});
TweenMax.to(humanFaceAnimate, .5, {overwrite:true, onComplete:function(){
//playSound('soundEat');
for(n=0;n<gameData.placeArray.length;n++){
for(s=0;s<gameData.plateArray.length;s++){
if(gameData.plateArray[s].place == n){
gameData.plateArray[s].food.visible = false;
gameData.plateArray[s].leftover.visible = true;
}
}
}
humanFaceAnimate.gotoAndPlay('anime');
humanLeftAnimate.gotoAndPlay('anime');
humanRightAnimate.gotoAndPlay('anime');
TweenMax.to(humanFaceAnimate, 1.5, {overwrite:true, onComplete:updateLevel(con)});
}});
}
}
/*!
*
* ANIMATE SUM NUMBER - This is the function that runs to animate sum number
*
*/
function animateNumber(con,string){
var speedNum = .1;
if(con == undefined){
numberTxt.alpha = 1;
TweenMax.to(numberTxt, speedNum, {alpha:.2, overwrite:true, onComplete:function(){
TweenMax.to(numberTxt, speedNum, {alpha:1, overwrite:true, onComplete:function(){
TweenMax.to(numberTxt, speedNum, {alpha:.2, overwrite:true, onComplete:function(){
TweenMax.to(numberTxt, speedNum, {alpha:1, overwrite:true});
}});
}});
}});
}else {
if(con == 'correct'){
numberAnimateTxt.color = sumCorrectColour;
}else{
numberAnimateTxt.color = sumWrongColour;
}
sumData.count = 0;
sumData.oldCount = 0;
sumData.string = string;
sumData.length = string.length;
TweenMax.to(sumData, mathQuestionTextSpeed, {count:sumData.length, overwrite:true, ease:Linear.easeNone, onUpdate:function(){
if(sumData.oldCount != Math.floor(sumData.count)){
sumData.oldCount = Math.floor(sumData.count);
playSound('soundType');
numberAnimateTxt.text = sumData.string.substring(0, sumData.oldCount);
}
}});
numberTxt.alpha = 0;
numberAnimateTxt.alpha = 1;
TweenMax.to(numberAnimateTxt, speedNum, {alpha:.2, overwrite:true, onComplete:function(){
TweenMax.to(numberAnimateTxt, speedNum, {alpha:1, overwrite:true, onComplete:function(){
TweenMax.to(numberAnimateTxt, speedNum, {alpha:.2, overwrite:true, onComplete:function(){
TweenMax.to(numberAnimateTxt, speedNum, {alpha:1, overwrite:true, onComplete:function(){
}});
}});
}});
}});
}
}
/*!
*
* CREATE SUSHI PLATES - This is the function that runs to create sushi plates
*
*/
function createPlates(){
plateContainer.removeAllChildren();
gameData.plateArray = [];
for(n=0;n<place_arr.length;n++){
gameData.placeArray[n] = false;
}
var slot_arr = [];
for(n=0;n<gameData.rollArray.length;n++){
slot_arr.push(n);
}
for(n=0;n<gameData.numberArray.length;n++){
createPlate(n, slot_arr[n]);
}
playSound('soundReset');
}
/*!
*
* CREATE SUSHI PLATE - This is the function that runs to create sushi plate
*
*/
function createPlate(num, rollID){
var foodRandomNum = Math.floor(gameData.foodIndexArray[num]);
var newPlate = plate.clone();
var newFood = $.food[foodRandomNum].clone();
var newLeftover = leftover.clone();
newLeftover.visible = false;
if(randomBoolean()){
newLeftover.scaleX = -1;
}
var newPlateNumber = new createjs.Text();
newPlateNumber.font = "40px ostrich_sansheavy";
newPlateNumber.color = "#ffffff";
newPlateNumber.text = gameData.numberArray[num];
newPlateNumber.textAlign = "center";
newPlateNumber.textBaseline='alphabetic';
plateContainer.addChild(newPlate, newFood, newLeftover, newPlateNumber);
gameData.plateArray.push({plate:newPlate, leftover:newLeftover, food:newFood, number:newPlateNumber, amount:gameData.numberArray[num], roll:true, rollID:rollID, place:-1});
newPlate.id = num;
createPlateEvent(newPlate);
}
/*!
*
* CREATE PLATE EVENT - This is the function that runs to create plate click event
*
*/
function createPlateEvent(obj){
obj.cursor = "pointer";
obj.addEventListener("click", function(evt) {
if(curPage == 'game')
toggleTakePlate(evt.target.id);
});
}
/*!
*
* TOGGLE TAKE PLATE - This is the function that runs to toggle take plate
*
*/
function toggleTakePlate(id){
var randomPlateNum = Math.floor(Math.random()*2)+1;
playSound('soundPlate'+randomPlateNum);
if(gameData.plateArray[id].roll){
var checkPlace_arr = [];
for(t=0;t<gameData.placeArray.length;t++){
if(!gameData.placeArray[t]){
checkPlace_arr.push(t);
}
}
if(checkPlace_arr.length > 0){
var randomPlaceNum = Math.floor(Math.random()*checkPlace_arr.length);
gameData.placeArray[checkPlace_arr[randomPlaceNum]] = true;
gameData.plateArray[id].place = checkPlace_arr[randomPlaceNum];
gameData.plateArray[id].roll = false;
gameData.plateArray[id].plate.x = place_arr[checkPlace_arr[randomPlaceNum]].x;
gameData.plateArray[id].plate.y = place_arr[checkPlace_arr[randomPlaceNum]].y;
checkSumEquatation();
}
}else{
gameData.placeArray[gameData.plateArray[id].place] = false;
gameData.plateArray[id].place = -1;
gameData.plateArray[id].roll = true;
}
}
/*!
*
* CHECK SUM EQUATATION - This is the function that runs to check sum equatation is correct or wrong
*
*/
function checkSumEquatation(){
var sum_arr = [];
for(n=0;n<gameData.placeArray.length;n++){
for(s=0;s<gameData.plateArray.length;s++){
if(gameData.plateArray[s].place == n){
sum_arr.push(gameData.plateArray[s].amount);
}
}
}
if(sum_arr.length > 1){
toggleGameTimer(false);
var sumString = String(sum_arr[0]+'+'+sum_arr[1]+'='+(sum_arr[0]+sum_arr[1]));
var correctAnswer = false;
if((sum_arr[0]+sum_arr[1]) == playerData.sum){
correctAnswer = true;
}
animateHuman(correctAnswer,sumString);
}
}
/*!
*
* ANIMATE SUM - This is the function that runs to aniamte sum text
*
*/
var sumData = {count:0, oldCount:0, length:0};
function animateSum(string, result){
sumTxt.x = canvasW/100 * 45;
sumTxt.y = canvasH/100 * 61;
sumData.count = 0;
sumData.oldCount = 0;
sumData.string = string;
sumData.length = string.length;
TweenMax.to(sumData, mathQuestionTextSpeed, {count:sumData.length, overwrite:true, ease:Linear.easeNone, onUpdate:function(){
if(sumData.oldCount != Math.floor(sumData.count)){
sumData.oldCount = Math.floor(sumData.count);
playSound('soundType');
sumTxt.text = sumData.string.substring(0, sumData.oldCount);
}
}, onComplete:function(){
animateHuman(result,string);
}});
}
/*!
*
* SHUFFLE FOOD - This is the function that runs to shuffle food
*
*/
function shuffleFoodArray(){
shuffle(gameData.foodArray);
}
function shuffleFoodAndIndexArray(array,array2) {
var currentIndex = array.length
, temporaryValue
, randomIndex
;
// While there remain elements to shuffle...
while (0 !== currentIndex) {
// Pick a remaining element...
randomIndex = Math.floor(Math.random() * array.length);
currentIndex -= 1;
// And swap it with the current element.
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
temporaryValue = array2[currentIndex];
array2[currentIndex] = array2[randomIndex];
array2[randomIndex] = temporaryValue;
}
return array;
}
/*!
*
* GAME TIMER - This is the function that runs for game timer
*
*/
var gameTimerUpdate = false;
var nowDate;
var beforeDate;
function toggleGameTimer(con){
if(con){
playerData.timer = playerData.timer;
playerData.timerCount = gameTimer;
updateTimer();
}
gameTimerUpdate = con;
}
function updateTimer(){
var rotateNum = (playerData.timer/playerData.timerCount) * 360;
timerIndicator.rotation = rotateNum;
}
/*!
*
* GAME LOOP - This is the function that runs to loop game
*
*/
function updateGame(event){
if(gameTimerUpdate){
nowDate = new Date();
var elapsedTime = (nowDate.getTime() - beforeDate.getTime());
playerData.timer = elapsedTime;
updateTimer();
if(playerData.timer >= playerData.timerCount){
toggleGameTimer(false);
goPage('result');
}
}
sushiRoll.x = ((sushiRoll.x + playerData.speed) % sushiRoll.tileW)-(rollImg.width);
for(p=0;p<gameData.rollArray.length;p++){
gameData.rollArray[p].x += playerData.speed;
if(gameData.rollArray[p].x > canvasW+endRangeNumber){
gameData.rollArray[p].x = -(endRangeNumber);
}
}
for(p=0;p<gameData.plateArray.length;p++){
if(gameData.plateArray[p].roll){
gameData.plateArray[p].plate.x = gameData.rollArray[gameData.plateArray[p].rollID].x;
gameData.plateArray[p].plate.y = gameData.rollArray[gameData.plateArray[p].rollID].y;
}
gameData.plateArray[p].leftover.y = gameData.plateArray[p].plate.y;
gameData.plateArray[p].food.y = gameData.plateArray[p].plate.y;
gameData.plateArray[p].number.y = gameData.plateArray[p].plate.y - 25; // number position graph
gameData.plateArray[p].leftover.x = gameData.plateArray[p].plate.x;
gameData.plateArray[p].food.x = gameData.plateArray[p].plate.x;
gameData.plateArray[p].number.x = gameData.plateArray[p].plate.x + 2;
}
}
<file_sep>/app/javascript/containers/thankYou.jsx
import React, { PropTypes } from 'react'
import { Link } from 'react-router-dom'
import smile from '../assets/images/smile.svg'
import CurrentUser from '../services/currentUser'
class ThankYou extends React.Component {
constructor(props) {
super(props);
}
render() {
var current = (new CurrentUser).get()
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Contact<br />you soon!</h2>
<h4> Best insurance helps you to get the best <br /> insurance it's fast and easily !</h4>
</div>
</div>
</div>
</div>
<div className="wrap-content container m-t--100">
<div className='sr-question__container p-15'>
<div className='row'>
<div className='col-12'>
<div>
<div>
<h2 className="sr-question__accent__title">Thank you!</h2>
</div>
<div>
<text>Dear {current.firstName},<br />
An agent will contact you very soon<br />
to discuss further.<br />
Thank you for your patience.
</text>
</div>
<div>
<img src={smile} className="sr-thankyou__image" />
</div>
</div>
<div>
<Link to={`/`} className='col-12 btn btn-green btn-green--large-text'>
Continue
</Link>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default ThankYou
<file_sep>/app/javascript/components/shared/product.jsx
import React from 'react'
import { ViewPager, Frame, Track, View } from 'react-view-pager'
export default class Product extends React.Component {
constructor(props) {
super(props)
var getImageWidth = this.getImageWidth.bind(this);
}
handleOnMouseUp() {
if (this.props.index == this.props.indice) {
this.props.clickMethod(this.props.link)
}
}
getZindex() {
if (this.props.index == this.props.indice) {
return "quote-box-zindex10"
} else if (this.props.index == this.props.indice + 1 || this.props.index == this.props.indice - 1) {
return "quote-box-zindex9"
} else {
return "quote-box-zindex8"
}
}
getImageWidth() {
if (this.props.index == this.props.indice) {
return this.props.width + "px"
} else if (this.props.index == this.props.indice + 1 || this.props.index == this.props.indice - 1) {
return (this.props.width - 20) + "px"
} else {
return (this.props.width - 30) + "px"
}
}
render() {
var quoteClass = this.props.index == this.props.indice ? "quote-box-blue" : "quote-box-white"
var titleClass = this.props.index == this.props.indice ? "sr-home__title__white" : "sr-home__title__blue"
var zindex = this.getZindex.bind(this)
return (
<View>
<div href="javascript:void(0)" onMouseUp={this.handleOnMouseUp.bind(this)}>
<div className={"quote-box " + quoteClass + " " + zindex()} >
<div className={"title " + titleClass}>
{this.props.title}
</div>
<img className="sr-home__icon " style={{ width: this.getImageWidth() }} src={this.props.index == this.props.indice ? this.props.icon_selected : this.props.icon}></img>
</div>
</div>
</View>
)
}
}
<file_sep>/app/javascript/games/sickcrush/main.jsx
import React, { PropTypes } from 'react'
import { Link } from 'react-router-dom'
class SickCrush extends React.Component {
constructor(props) {
super(props);
this.state = {
}
}
componentDidMount() {
cr_createRuntime("c2canvas");
var onVisibilityChanged = function() {
if (document.hidden || document.mozHidden || document.webkitHidden || document.msHidden)
cr_setSuspended(true);
else
cr_setSuspended(false);
};
document.addEventListener("visibilitychange", onVisibilityChanged, false);
document.addEventListener("mozvisibilitychange", onVisibilityChanged, false);
document.addEventListener("webkitvisibilitychange", onVisibilityChanged, false);
document.addEventListener("msvisibilitychange", onVisibilityChanged, false);
function OnRegisterSWError(e)
{
console.warn("Failed to register service worker: ", e);
};
// Runtime calls this global method when ready to start caching (i.e. after startup).
// This registers the service worker which caches resources for offline support.
window.C2_RegisterSW = function C2_RegisterSW()
{
if (!navigator.serviceWorker)
return; // no SW support, ignore call
try {
navigator.serviceWorker.register("sw.js", { scope: "./" })
.then(function (reg)
{
console.log("Registered service worker on " + reg.scope);
})
.catch(OnRegisterSWError);
}
catch (e)
{
OnRegisterSWError(e);
}
};
$('#c2canvasdiv').css({ 'margin-top': '0px' })
$("html,body").animate({
scrollTop: $('#c2canvas').offset().top
}, "slow");
}
componentDidUpdate() {
console.log($('#c2canvas').offset())
$("html,body").animate({
scrollTop: $('#c2canvas').offset().top
}, "slow");
}
render() {
return (
<div className='grey-bg'>
<div className='sr-page-intro'>
<div className="container">
<div className='row'>
<div className='col-10'>
<h2>Play Healthy rush</h2>
<h4>
Best Insurance helps you to get the best
insurance It’s fast and easily !
</h4>
</div>
</div>
</div>
</div>
<div className='wrap-content'>
<div id="c2canvasdiv">
<canvas id="c2canvas" width="960" height="540">
<h1>Your browser does not appear to support HTML5. Try upgrading your browser to the latest version. <a href="http://www.whatbrowser.org">What is a browser?</a>
<br/><br/><a href="http://www.microsoft.com/windows/internet-explorer/default.aspx">Microsoft Internet Explorer</a><br/>
<a href="http://www.mozilla.com/firefox/">Mozilla Firefox</a><br/>
<a href="http://www.google.com/chrome/">Google Chrome</a><br/>
<a href="http://www.apple.com/safari/download/">Apple Safari</a></h1>
</canvas>
</div>
</div>
</div>
);
}
}
export default SickCrush
<file_sep>/app/javascript/containers/home.jsx
import React, { PropTypes } from 'react'
import { Link } from 'react-router-dom'
import { ViewPager, Frame, Track, View } from 'react-view-pager'
import CurrentUser from '../services/currentUser'
import Product from '../components/shared/product'
import Benefit from '../components/shared/benefit'
import Comment from '../components/shared/comment'
import hand from '../../../public/images/icon-hand-selected.svg'
import handSelected from '../../../public/images/icon-hand.png'
import car from '../../../public/images/icon-car.svg'
import carSelected from '../../../public/images/icon-car-selected.svg'
import shield from '../../../public/images/icon-shield.svg'
import shieldSelected from '../../../public/images/icon-shield-selected.svg'
import shopping from '../../../public/images/icon-shopping.svg'
import shoppingSelected from '../../../public/images/icon-shopping-selected.svg'
import health from '../../../public/images/icon-health.svg'
import healthSelected from '../../../public/images/icon-health-selected.svg'
import budgetCalculator from '../../../public/images/BudgetCalculator.png'
import left from '../../../public/images/left.png'
import right from '../../../public/images/right.png'
import WindowHelper from '../services/windowHelper';
class Home extends React.Component {
constructor(props) {
super(props);
this.state = {
currentUser: (new CurrentUser).get(),
indice: WindowHelper.isDesktop() ? 2 : 2,
draging: false
}
}
onViewChange(indice) {
console.log(indice)
if (indice.length > 1) {
this.setState({ indice: indice[0] }, () => { this.forceUpdate() });
} else {
this.setState({ indice: indice[0] }, () => { this.forceUpdate() });
}
}
handleOnSwipeMove() {
this.setState({ draging: true })
}
handleOnSwipeEnd() {
this.setState({ draging: false })
}
clickMethod(link) {
if (!this.state.draging) {
this.props.history.push(link);
}
}
render() {
var benefitsCol = (this.state.indice == 0 || this.state.indice == 1) ? "col-md-4 col-sm-6 " : "col-md-3 col-sm-6";
var benefitsCancer = (this.state.indice == 0 || this.state.indice == 1) ? <div></div> : (<div className={"col-md-3 col-sm-6 "}>
<Benefit title="Term CI Protection" icon="icon-guard" />
</div>);
return (
<div>
<div className="bg-main-blue-gradiant">
<div className="container">
<div className='sr-home__title'>
<h2>Get the best Our insurance!</h2>
<h4>
Best Insurance helps you get to get the best
insurance fast and easily!
</h4>
{/*<div className='sr-home__title-cta'>
<Link to={this.state.currentUser.id ? `/step` : `/login`} className='btn btn-dark-blue'>
Try to Find
</Link>
</div>*/}
</div>
</div>
</div>
<div className='sr-home row' id="scroll-product">
<div className="container">
<div className='sr-home__products-intro row'>
<div className="col-md-3 col-sm-12">
<h5>The Best Choice</h5>
<h3>Products</h3>
</div>
<div className="col-md-9 col-sm-12 ">
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing Lorem ipsum dolor sit amet, consectetuer adipiscing Lorem ipsum dolor sit amet, consectetuer adipiscing Lorem ipsum</p>
</div>
</div>
<div className='sr-home__products col-sm-12'>
<div className='sr-product'>
<ViewPager tag="main">
<div className="">
<img className="arrow__left" src={left} onClick={() => this.track2.prev()}/>
<Frame className="frame sr-home__frame " >
<Track
ref={c => this.track2 = c}
viewsToShow={WindowHelper.homeProductToShow()}
align={WindowHelper.isDesktop() ? 0.5 : 0.5}
onViewChange={this.onViewChange.bind(this)}
currentView={2}
swipe={false}
className="track">
<Product title="Car Insurance" width={100} icon_selected={carSelected} icon={car} indice={this.state.indice} index={0} link={`/`} clickMethod={this.clickMethod.bind(this)} />
<Product title="CI Protection" width={100} icon_selected={handSelected} icon={hand} indice={this.state.indice} index={1} link={`/ciProtection`} clickMethod={this.clickMethod.bind(this)} />
<Product title="Term life Insurance" width={85} icon_selected={healthSelected} icon={health} indice={this.state.indice} index={2} link={`/termLifeInsurance`} clickMethod={this.clickMethod.bind(this)} />
<Product title="Accident Protection" width={65} icon_selected={shieldSelected} icon={shield} indice={this.state.indice} index={3} link={`/`} clickMethod={this.clickMethod.bind(this)} />
<Product title="Shopping Insurance" width={75} icon_selected={shoppingSelected} icon={shopping} indice={this.state.indice} index={4} link={`/`} clickMethod={this.clickMethod.bind(this)} />
</Track>
</Frame>
<img className="arrow__right" src={right} onClick={() => this.track2.next()}/>
</div>
<div className="sr-subscription__arrow-container">
<div className="sr-subscription__arrow-down"></div>
</div>
</ViewPager>
</div>
</div>
<div className="col-12 sr-home__title__reason">
<h3>Reasons that you should purchase this insurance</h3>
</div>
<div className='sr-home__benefits col-sm-12'>
<div className='sr-benefit'>
<div className="row">
<div className={benefitsCol}>
<Benefit title="Support your life" icon="icon-hand" />
</div>
<div className={benefitsCol}>
<Benefit title="Gamified Journey" icon="icon-wheel" />
</div>
{benefitsCancer}
<div className={benefitsCol} >
<Benefit title="Get Reward for subscriber" icon="icon-gift" />
</div>
</div>
</div>
</div>
<div className=" col-sm-12 budget-calculator__container">
<div className="row">
<div className="col-12">
<hr className="budget-calculator__separator" />
</div>
<div className="col-12 budget-calculator__text_container">
<text className="budget-calculator__text">You can find products on your budget</text>
</div>
<div className="col-12">
<img className="budget-calculator__image img-fluid" src={budgetCalculator} />
</div>
</div>
</div>
</div>
</div>
<div className="bg-main-blue" id="scroll-contact">
<div className="container">
<div className='sr-home__testimonials'>
<div className="circle-bg circle-top-left"></div>
<div className="circle-bg circle-top-right"></div>
<div className='sr-testimonial'>
<div className="title">Customers Comments</div>
<ViewPager tag="main">
<Frame className="frame">
<Track
ref={c => this.track = c}
viewsToShow={WindowHelper.commentProductToShow()}
infinite
align={0.5}
className="track">
<Comment rate="4" comment="CommentCommentCommentCommentCommentCommentCommentComment CommentCommentCommentComment" sub_comment="SHAW THEARTER10% ticket" username="Username" />
<Comment rate="4" comment="CommentCommentCommentCommentCommentCommentCommentComment CommentCommentCommentComment" sub_comment="SHAW THEARTER10% ticket" username="Username" />
<Comment rate="4" comment="CommentCommentCommentCommentCommentCommentCommentComment CommentCommentCommentComment" sub_comment="SHAW THEARTER10% ticket" username="Username" />
<Comment rate="4" comment="CommentCommentCommentCommentCommentCommentCommentComment CommentCommentCommentComment" sub_comment="SHAW THEARTER10% ticket" username="Username" />
</Track>
</Frame>
</ViewPager>
</div>
<div className="circle-bg circle-bottom-left"></div>
<div className="circle-bg circle-bottom-right"></div>
</div>
<div className='col-sm-12 sr-home__contact'>
<div className='sr-contact '>
<div className="row">
<h3 className="col-md-3 col-sm-12">Contact</h3>
<div className="col-md-9 col-sm-12 desc">Lorem ipsum dolor sit amet, consectetuer adipiscing</div>
</div>
<div className="row">
<div className="col-md-3 col-sm-12"></div>
<div className="col-md-9 col-sm-12">
<div className="address">
<p>Swiss Re Singap</p>
<p>Address Office, Singaporeore</p>
<p>Phone 012345678</p>
<p>Email <EMAIL></p>
</div>
<div className="bottom-button">
<a href="#" className="btn btn-green btn-green--large-text">Online Chat</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Home
<file_sep>/app/javascript/components/question/slider.jsx
import React, { PropTypes } from 'react'
import ReactBootstrapSlider from 'react-bootstrap-slider';
import Translations from '../../services/translations';
import _ from 'lodash'
class Slider extends React.Component {
constructor(props) {
super(props);
console.log(props)
this.state = {
response: Number(props.defaultValue),
option_id: null,
min: this.props.componentValue.choices[0].min,
max: this.props.componentValue.choices[this.props.componentValue.choices.length - 1].max,
minImage: this.props.componentValue.choices[0].description,
maxImage: this.props.componentValue.choices[this.props.componentValue.choices.length - 1].description,
selectedImage: this.props.componentValue.choices[0].image,
}
this.changeValue = this.changeValue.bind(this);
this.setSelectedImage = this.setSelectedImage.bind(this);
}
componentDidMount() {
this.setSelectedImage(Number(this.props.defaultValue));
}
changeValue(event) {
var _this = this;
var currentValue = event.target.value;
this.setSelectedImage(currentValue);
}
setSelectedImage(currentValue) {
var _this = this;
_.forEach(this.props.componentValue.choices, function (choice) {
if (choice.min <= currentValue && currentValue <= choice.max) {
_this.setState({
selectedImage: choice.image,
option_id: choice.id
})
}
})
this.setState({
response: currentValue,
// option_id: this.props.componentValue.choices[0].id,
},
() => {
this.props.response(this.state)
}
);
}
render() {
return (
<div>
<div className=''>
<text className="">{String(this.state.response)}</text>
<div className="question-bg p-t-15 p-b-15" style={{ 'background': "url(" + this.props.question_image + ")", 'background-size': 'contain' }}>
<img className="selected-image" src={this.state.selectedImage} alt="" />
</div>
</div>
<div className='sr-slider__container'>
<ReactBootstrapSlider
value={this.state.response}
change={this.changeValue}
slideStop={this.changeValue.bind(this)}
step={Number(this.props.componentValue.choices[0].range_interval)}
max={Number(this.state.max)}
min={Number(this.state.min)}
orientation="horizontal"
reversed={false} />
</div>
<div className='row'>
<text className="col-6">{this.state.min}</text>
<text className="col-6">{this.state.max}</text>
</div>
</div>
);
}
}
export default Slider
<file_sep>/public/offline.js
{
"version": 1500888397,
"fileList": [
"data.js",
"c2runtime.js",
"jquery-2.1.1.min.js",
"offlineClient.js",
"images/bestscorefont.png",
"images/bonusfont.png",
"images/scorefont.png",
"images/loadingfont.png",
"images/animal-sheet0.png",
"images/animal-sheet1.png",
"images/gametittle-sheet0.png",
"images/particles.png",
"images/buttonpause-sheet0.png",
"images/buttonpause-sheet1.png",
"images/buttonplay-sheet0.png",
"images/buttonsound-sheet0.png",
"images/buttonsound-sheet1.png",
"images/line-sheet0.png",
"images/line-sheet1.png",
"images/line-sheet2.png",
"images/frame-sheet0.png",
"images/background-sheet0.png",
"images/buttonmenu-sheet0.png",
"images/times.png",
"images/tw-sheet0.png",
"images/fb-sheet0.png",
"images/detector-sheet0.png",
"images/bgpoin-sheet0.png",
"images/bgscore-sheet0.png",
"media/button.ogg",
"media/stick.ogg",
"media/button.m4a",
"media/stick.m4a",
"media/drag.m4a",
"media/match.m4a",
"media/match.ogg",
"media/drag.ogg",
"media/click.m4a",
"media/click.ogg",
"media/mattoglseby - 2.m4a",
"media/mattoglseby - 2.ogg",
"media/mattoglseby - 1.m4a",
"media/mattoglseby - 1.ogg",
"media/tw080.ogg",
"media/tw080.m4a",
"loading-logo.png",
"icon-16.png",
"icon-32.png",
"icon-114.png",
"icon-128.png",
"icon-256.png",
"animate.min.css"
]
}<file_sep>/app/javascript/services/windowHelper.js
import _ from 'underscore'
import humps from 'humps'
import Connector from '../utils/connector'
class WindowHelper {
constructor() {
}
static isDesktop() {
return window.innerWidth > 768;
}
static homeProductToShow() {
if (window.innerWidth > 1199) {
return 5;
} else if (window.innerWidth > 991) {
return 3;
} else if (window.innerWidth > 768) {
return 3;
} else if (window.innerWidth > 767) {
return 1.5;
} else {
return 1.5;
}
}
static homeProductSelected() {
if (window.innerWidth > 1199) {
return 2;
} else if (window.innerWidth > 991) {
return 2;
} else if (window.innerWidth > 768) {
return 2;
} else if (window.innerWidth > 767) {
return 2;
} else {
return 2;
}
}
static commentProductToShow() {
if (window.innerWidth > 1199) {
return 3;
} else if (window.innerWidth > 991) {
return 3;
} else if (window.innerWidth > 768) {
return 2;
} else if (window.innerWidth > 767) {
return 2;
} else {
return 1;
}
}
}
export default WindowHelper<file_sep>/app/controllers/application_controller.rb
class ApplicationController < ActionController::Base
protect_from_forgery with: :null_session
private
def client
@client ||= Playbasis::Client.new(storage: session)
end
end
|
7fce38c898a6f622b04f83d34663b28fbc1687b7
|
[
"JavaScript",
"Ruby",
"Markdown"
] | 63 |
JavaScript
|
pongsakorn-ruadsong/SwissRE
|
422d07f046320df34802a2632e057cfa79744f5d
|
822edd3a2456c6736a4dd6ac6e343bddbb51b1a6
|
refs/heads/master
|
<file_sep>var urlLib = require('url');
var http = require('http');
var querystring = require('querystring');
http.createServer(function(req, res) {
res.setHeader('Access-Control-Allow-Origin', '*');
console.log(req.url);
var json = urlLib.parse(req.url, true).query;
if (json.user == 'nini' && json.pass == '<PASSWORD>') {
res.write('登录成功');
} else {
res.write('登录失败');
}
res.end();
}).listen(1212);<file_sep># node-project
node.js 测试项目
<file_sep>//解析post发来的数据
var data = 'ie=utf-8&f=8&rsv_bp=1&tn=baidu&wd=hehe&oq=klad&rsv_pq=964cf2d40004395d&rsv_t=cf419Qk1Xu%2BwAV5FUITShqLBta5yhbogoXdaAzCqAoJfMqBfWnBlt0vzqw0&rqlang=cn&rsv_enter=1&rsv_dl=tb&rsv_sug3=9&rsv_sug1=7&rsv_sug7=100&rsv_sug2=0&inputT=3985&rsv_sug4=4971';
var querystring = require('querystring');
console.log(querystring.parse(data));<file_sep>var http = require('http'); //导入http模块
//创建一个服务器
http.createServer(function(request, response) {
//console.log(request.url)
response.setHeader('Access-Control-Allow-Origin', '*'); //允许跨域 第二个参数允许谁跨域
let url = request.url;
if (url.indexOf('html') != -1) {
//console.log(url);
if (url == '/index.html') {
response.write('Good! this is index.html')
} else {
response.write('404')
}
} else {
}
// console.log('有人访问')
// response.write('I am nini');
response.end();
}).listen(1212);
//监听接口<file_sep>var http = require('http');
var querystring = require('querystring');
http.createServer(function(req, res) {
res.setHeader('Access-Control-Allow-Origin', '*');
//监听数据包过来
var str = '';
req.on('data', function(data) {
str += data;
});
//接收完了数据
req.on('end', function() {
console.log(str);
var json = querystring.parse(str);
console.log(json);
if (json.user == 'nini' && json.pass == '<PASSWORD>') {
res.write('登录成功')
} else {
res.write('登录失败')
}
res.end();
});
}).listen(1212);<file_sep>//解析url模块
var urlLib = require('url');
var data = 'https://www.baidu.com/s?ie=utf-8&f=8&rsv_bp=1&tn=baidu&wd=baik&oq=baikk&rsv_pq=853b948e00015144&rsv_t=cc073tyOYuozqb1sbXu%2B4IBheRThoODL%2FeR5weAc5e1xJdafbhEhg7NyfbU&rqlang=cn&rsv_enter=1&rsv_dl=tb&rsv_sug3=2&rsv_sug1=1&rsv_sug7=100&rsv_sug2=0&inputT=648&rsv_sug4=1180'
console.log(urlLib.parse(data, true));
|
ffb31f4cfb990daa87d7d71244d0a4d913d0c654
|
[
"JavaScript",
"Markdown"
] | 6 |
JavaScript
|
94451/node-project
|
9bd0ce81d109b0ef15cae296b2f58fba63d4e141
|
2f0c609f9b5b8b46ef449517f8d9977c120d8748
|
refs/heads/main
|
<file_sep>import React, {useState} from "react";
import styles from "../assets/Registration.module.css";
import TextField from "@material-ui/core/TextField/TextField";
import axios from "../plugins/axios";
const SendEmailAndCode = ({handleChange,form,setStep,regTypes,setToken}) => {
const handleSubmit = () => {
axios.post('/auth/validate-otp/',{email:form.email,otp:form.code1+form.code2+form.code3+form.code4}).then(res=>{
console.log(res);
setToken(res.data.token)
setStep(regTypes.FIRST_STEP)
})
};
return(
<div className={styles.box}>
<div className={styles.box_title}>
Введите код
</div>
<div className={styles.box_description}>
Мы отправили письмо с кодом на почту
</div>
<div className={styles.code_container}>
<TextField
onChange={(e)=>handleChange(e)}
style={{marginTop:'60px',width:'100%',textAlign:'center'}}
id="outlined-basic"
label=""
placeholder="_"
name="code1"
variant="outlined" />
<TextField
onChange={(e)=>handleChange(e)}
style={{marginTop:'60px',width:'100%'}}
id="outlined-basic"
label=""
placeholder="_"
name="code2"
variant="outlined" />
<TextField
onChange={(e)=>handleChange(e)}
style={{marginTop:'60px',width:'100%'}}
id="outlined-basic"
label=""
placeholder="_"
name="code3"
variant="outlined" />
<TextField
onChange={(e)=>handleChange(e)}
style={{marginTop:'60px',width:'100%'}}
id="outlined-basic"
label=""
placeholder="_"
name="code4"
variant="outlined" />
</div>
<div onClick={handleSubmit} className={styles.btn}>
Отправить мне код
</div>
</div>
)
};
export default SendEmailAndCode;<file_sep>import React from "react";
import { makeStyles } from '@material-ui/core/styles';
import Stepper from '@material-ui/core/Stepper';
import Step from '@material-ui/core/Step';
import StepLabel from '@material-ui/core/StepLabel';
import Button from '@material-ui/core/Button';
import Typography from '@material-ui/core/Typography';
import styles from "../assets/Registration.module.css";
import TextField from '@material-ui/core/TextField';
const useStyles = makeStyles((theme) => ({
root: {
width: '100%',
},
backButton: {
marginRight: theme.spacing(1),
},
instructions: {
marginTop: theme.spacing(1),
marginBottom: theme.spacing(1),
},
}));
function getSteps() {
return ['', '', '','',''];
}
const FirstStep = () => {
const [activeStep, setActiveStep] = React.useState(0);
const steps = getSteps();
return(
<div className={styles.box2}>
<Stepper activeStep={activeStep} alternativeLabel>
{steps.map((label) => (
<Step key={label}>
<StepLabel>{label}</StepLabel>
</Step>
))}
</Stepper>
<div style={{padding:'0 64px',marginTop:'20px'}} className={styles.box_title2}>
Этап 1
</div>
<div style={{padding:'0 64px'}} className={styles.box_description}>
Напишите ваше имя и фамилию для проверки
совпадения с ИИН
</div>
<div className={styles.forms}>
<TextField style={{marginTop:'25px'}} className={styles.textField_ui} id="outlined-basic" label="Имя" variant="outlined" />
<TextField style={{marginTop:'25px'}} className={styles.textField_ui} id="outlined-basic" label="Фамилия" variant="outlined" />
<TextField style={{marginTop:'25px'}} className={styles.textField_ui} id="outlined-basic" label="ИИН" variant="outlined" />
</div>
<div style={{padding:'0 64px'}}>
<div className={styles.btn}>
Далее
</div>
</div>
</div>
)
};
export default FirstStep;<file_sep># DiplomaSDU2021
The "Tenet" project for Bachelor degree in Computer Systems and Software Suleyman Demirel University.
Authors:
- <NAME>
- <NAME>
- <NAME>
- <NAME>
- <NAME>
Design:
- https://www.figma.com/file/fwljCeRo4PEXBWNyB7nIic/Diplomka?node-id=0%3A1
- https://www.figma.com/file/43flQMEG37IKkZg9YqPYJO/DIplomka?node-id=0%3A1
<file_sep>import React, {useState} from "react";
import styles from "../assets/Registration.module.css"
import SendEmail from "../components/sendEmail";
import SendEmailAndCode from "../components/sendEmailAndCode";
import FirstStep from "../components/FirstStep";
const regTypes = {
SEND_EMAIL: 'SEND_EMAIL',
SEND_EMAIL_AND_CODE: 'SEND_EMAIL_AND_CODE',
FIRST_STEP: 'FIRST_STEP',
SECOND_STEP: 'SECOND_STEP',
THIRD_STEP: 'THIRD_STEP',
FOURTH_STEP : 'FOURTH_STEP',
FIFTH_STEP : 'FIFTH_STEP'
};
const Registration = () => {
const [step,setStep] = useState(regTypes.SEND_EMAIL);
const [token,setToken] = useState(null);
const [form,setForm] = useState({
email: '',
code1: '',
code2: '',
code3: '',
code4: ''
});
const handleChange = event => {
const copy = {...form};
copy[event.target.name] = event.target.value;
setForm(copy);
};
return(
<div className={styles.container}>
{step === regTypes.SEND_EMAIL && (
<SendEmail
regTypes={regTypes}
setStep={setStep}
email={form.email}
handleChange={handleChange}/>
)}
{step === regTypes.SEND_EMAIL_AND_CODE && (
<SendEmailAndCode
setToken={setToken}
regTypes={regTypes}
setStep={setStep}
form={form}
handleChange={handleChange}/>
)}
{step === regTypes.FIRST_STEP && (
<FirstStep
/>
)}
</div>
)
};
export default Registration;<file_sep>import React from 'react'
import {Route} from "react-router-dom"
import Registration from "./Registration";
const AuthContainer = () => {
return(
<>
<Route exact path={'/register'} render={()=><Registration/>}/>
</>
)
};
export default AuthContainer;<file_sep>MTCNN and InceptionResnetV1 models were fetched from https://github.com/timesler/facenet-pytorch with some changes. MNIST dataset were fetched from http://yann.lecun.com/exdb/mnist/<file_sep>import './App.css';
import AuthContainer from "./pages/AuthContainer";
function App() {
// if (!localStorage.getItem('token')){
// return (
// <div>
// <AuthContainer/>
// </div>
// )
// }
return (
<div className="App">
<AuthContainer/>
</div>
);
}
export default App;
|
b4f76508bc5ac21f6ae8515ae7ad9861a4840e60
|
[
"JavaScript",
"Markdown"
] | 7 |
JavaScript
|
foodsnow/DiplomaSDU2021
|
7eac195e3aa89ca360d9f6c4e858bf911cf59ae2
|
1f2ce806fdc4b38fc2e390e36ef8020556cc8d7f
|
refs/heads/master
|
<file_sep>const express = require("express");
const router = express.Router();
const { getBlogs, postBlogs, getOneBlog, updateOneBlog, deleteBlog, getAllBlogs } = require("../controllers/Blog")
router
.route('/')
.get(getBlogs)
.post(postBlogs)
router
.route('/:id')
.get(getOneBlog)
.put(updateOneBlog)
.delete(deleteBlog)
module.exports = router;<file_sep>const express = require("express");
const router = express.Router();
const { postComment, getComment, getAllComment, updateOneComment, deleteComment} = require("../controllers/comment");
router
.route('/')
.post(postComment);
router
.route('/:id')
.get(getComment)
.put(updateOneComment)
.delete(deleteComment)
router
.route('/blog/:blogId')
.get(getAllComment)
module.exports = router;<file_sep>const Comment = require("../models/comment")
// @desc POST blog comment
// @route /api/comment
exports.postComment = async(req, res, next) => {
try{
const { author, text, blogId } = req.body
const comment = await new Comment({ author, text, blogId }).save()
return res.status(201).json({
success: true,
data: comment
})
} catch(err) {
if(err.name === 'ValidationError') {
const messages = Object.values(err.errors).map(val => val.message);
return res.status(400).json({
success: false,
error: messages
})
} else {
return res.status(500).json({
success: false,
error: 'Server error'
})
}
}
}
// @desc GET single blog comment
// @route /api/comment/:id
exports.getComment = async(req, res, next) => {
try{
const comment = await Comment.findById(req.params.id)
if(!comment) {
return res.status(401).json({
success: false,
error: 'No comment with such id'
})
} else {
return res.status(200).json({
status: true,
data: comment
})
}
} catch(err) {
return res.status(500).json({
success: false,
error: 'Server error'
})
}
}
// @desc GET single blog comment
// @route /api/comment/blog/blogId
exports.getAllComment = async(req, res) => {
try{
const { blogId } = req.params
const comments = await Comment.find({ blogId })
if(!comments) {
return res.status(401).json({
success: false,
error: 'No comment with such id'
})
} else {
return res.status(200).json({
status: true,
count: comments.length,
data: comments
})
}
} catch(err) {
return res.status(500).json({
success: false,
error: 'Server error'
})
}
}
// @desc UPDATE comment
// @route /api/comment/:id
exports.updateOneComment = async(req, res, next) => {
try {
const comment = await Comment.findById(req.params.id);
if(!comment) {
return res.status(401).json({
success: false,
error: 'No blog with such comment id'
})
} else {
const { author, text } = req.body
const updatedComment = await Comment.findOneAndUpdate({_id: req.params.id}, {author, text})
return res.status(200).json({
status: true,
data: updatedComment
})
}
} catch (err) {
return res.status(500).json({
success: false,
error: 'Server error'
})
}
};
// @desc DELETE comment
// @route /api/comment/:id
exports.deleteComment = async(req, res, next) => {
try {
const comment = await Comment.findById(req.params.id);
if(!comment) {
return res.status(401).json({
success: false,
error: 'No comment with such id'
})
} else {
await comment.remove()
return res.status(200).json({
status: true,
data: {}
})
}
} catch (err) {
return res.status(500).json({
success: false,
error: 'Server error'
})
}
}
// @desc GET single blog comment
// @route /api/comment/blog/blogId
// exports.deleteAllComments = async(req, res) => {
// try{
// const { blogId } = req.params
// await Comment.deleteMany({ blogId }, null, (err) => console.log(err))
// // if(!deletedComments) {
// // return res.status(401).json({
// // success: false,
// // error: 'No comment with such id'
// // })
// // } else {
// // return res.status(200).json({
// // status: true,
// // data: {}
// // })
// // }
// } catch(err) {
// console.log(err);
// }
// }
<file_sep>## BlogAPI
BlogApi allows you to create a blog post and other users can comment on it. The blog post can be edited and deleted. Comments can also be edited and deleted. When a blog post is deleted, all comments under will also be deleted
## Installation
Use this link to access my repository [Github repository](https://github.com/Emperorfizzy01/Blog-Api) and clone. After cloning
- cd into the root directory
- run npm install to install all dependencies
- use npm run server to start the server
## Documentation
[Documentation](https://documenter.getpostman.com/view/14116066/TzseKmZa#c020c785-cf26-480b-92dc-61e54163731d)
## Link to the hosted api
[Api link](https://afternoon-bayou-47931.herokuapp.com)
## Tools Used
- express
- dotenv
- mongoose
- morgan
- colors
- nodemon<file_sep>const mongoose = require('mongoose');
const commentSchema = new mongoose.Schema({
author: {
type: String,
trim: true,
required: [true, 'Specify authors name']
},
text: {
type: String,
trim: true,
required: [true, 'Please add some text']
},
blogId: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Blog'
}
})
module.exports = mongoose.model('Comment', commentSchema)
|
9b7c65e9173ee6bff874e279fd7a376339c3c15a
|
[
"JavaScript",
"Markdown"
] | 5 |
JavaScript
|
Emperorfizzy01/Blog-Api
|
c87b0f433db32601fd892b8f071be3230cfff2dd
|
0b1b07f47e5d1113c4b376c9363223cb6a8d0cda
|
refs/heads/master
|
<file_sep>function initMap(){
var styledMap = new google.maps.StyledMapType(styles,{name:"Styled Map"});
var mapOptions = {
center:{lat:40,lng:-90},
zoom:3,
mapTypeControl: false,
streetViewControl: false,
zoomControlOptions: {
position: google.maps.ControlPosition.RIGHT_CENTER
},
mapTypeControlOptions:{mapTypeIds:[google.maps.MapTypeId.ROADMAP,'map_style']}
};
map = new google.maps.Map(document.getElementById('map-canvas'),mapOptions);
map.setOptions({ minZoom: 3, maxZoom: 15 });
map.mapTypes.set('map_style',styledMap);
map.setMapTypeId('map_style');
//console.log(1);
$.getJSON("/worldmap/getPoints", function(data){
//cacheMarkers = data;
console.log(data);
setMarkers(map, data);
});
//console.log(2);
window.infoWindow = new google.maps.InfoWindow({
maxWidth:400
});
}
/*
Initialized Map. Load data points from database and store in cacheMarkers var.
*/
function initNormMap(){
admin = false;
initMap();
initSearch();
}
/*
Initialized Admin Map. Load data points from database and store in cacheMarkers var.
*/
function initAdminMap(){
admin = true;
initMap();
//initSearch();
}
/*
Given map and array of data, add markers to the map and initialize marker cluster
*/
function setMarkers(map, people)
{
//console.log("points being set...");
for (var i = 0; i < people.length; i++) {
var person = people[i];
if(person["IsApproved"] == "1" || admin){
var myLatLng = new google.maps.LatLng(person["GeoLocLat"], person["GeoLocLong"]);
//Create New Marker with myLatLng, map, person id, and icon vars
var image = {
url: '/assets/images/MapIndicator.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
var image2 = {
url: '/assets/images/non-approved-map.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
if(person["IsApproved"] === "0" && admin){
image = image2;
}
var marker = new google.maps.Marker({
position: myLatLng,
map: map,
person:person,
id: person["AlumID"],
icon: image
});
//Add marker click event that runs marker_click function
google.maps.event.addListener(marker, 'click', function(){
if(admin){
admin_marker_click(this);
} else{
marker_click(this);
}
});
//Put marker is list of markers
cacheMarkers.push(marker);
}
}
//Set MarkerClusterer to collect markers
var markerCluster = new MarkerClusterer(map, cacheMarkers, {styles: [{
textColor: "#FFFFFF",
fontFamily: "Bebas,helvetica,arial,sans-serif",
textSize: "18",
fontWeight: "normal",
height: 60,
width: 60,
url: "/assets/images/cluster-icon2.svg"
}],zoomOnClick:false, maxZoom:13, gridSize:100, renderCluster:cluster_click });
}
$('document').ready(function() {
});
function adminApproveMapEntry(personID,btn){
$(btn).off('click');
$.ajax({
url: "/worldmap/approvePerson?id=" + personID,
success: function(data){
$(btn).addClass("unapprove");
$(btn).removeClass("approve");
$(btn).html("Unpublish");
$(btn).click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminUnapproveMapEntry(personID, btn);
});
}
});
}
function adminUnapproveMapEntry(personID,btn){
$(btn).off('click');
$.ajax({
url: "/worldmap/unapprovePerson?id=" + personID,
success: function(data){
$(btn).removeClass("unapprove");
$(btn).addClass("approve");
$(btn).html("Publish");
$(btn).click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminApproveMapEntry(personID, btn);
});
}
});
}
$(document).ready(function() {
$(".burger").click(function(e){
e.preventDefault();
$(".burger").toggleClass("close-overlay");
$(".nav").toggleClass("open");
});
$("#loginBtn").click(function(e){
e.preventDefault();
$(".login-container").animate({"width":"300px","height":"230px"},600);
$("#loginBtn").fadeOut(300,function(){
$(".login-form").fadeIn(300);
});
});
$("#profImgCtnr").change(function(){
$this = $(this);
$('#profImg').text($this.val());
//console.log(1);
});
$("#profImg").change(function(){
$(".update-img").html("File Uploaded!");
$(".update-img").off("click");
});
$(".update-img").click(function(e){
e.preventDefault();
$("#profImg").click();
//console.log(2);
}).show();
$(".closeBtn").click(function(e){
e.preventDefault();
$($(this).parent()).css("top","-100%");
});
$(".alumnus .approve").click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminApproveMapEntry(personID, btn);
});
$(".alumnus .unapprove").click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminUnapproveMapEntry(personID, btn);
});
$(".account-status a").click(function(e){
e.preventDefault();
$(".account-status-overlay").removeClass("hide").addClass("open");
});
$(".status-info .close-overlay").click(function(e){
e.preventDefault();
$(".account-status-overlay").removeClass("open").addClass("hide");
})
$(".send-msg").click(function(e){
e.preventDefault();
var name = $("#name").val();
var email = $("#email-real").val();
var msg = $("#message").val();
var personID = $("#personID").val();
$("#name").val("");
$("#email-real").val("");
$("#message").val("");
$("#personID").val("");
var formData = "name=" + name + "&email-real=" + email + "&message=" + msg + "&personID=" + personID;
$(".contact-form").css("top","-100%");
$.ajax({
type: "POST",
url: "/worldmap/contact",
data: formData,
success: function(data){
//console.log(data);
}
});
//console.log(formData);
});
});
///////////////// CLICK EVENTS /////////////////////////
/*
Marker Click Event Handler taking context "a" added when marker created in setMarkers
@input a - context assumed to be a Google maps marker object
*/
function marker_click(a)
{
//make sure an infoWindow has not been opened
//NOTE: window.infoWindow global variable
closeWindows();
//Load data for person and initiallize infoWindow to window.InfoWindow
//Set window contect with renderPerson(), passing in data retrieved.
$.getJSON("/worldmap/getPerson?id=" + a.id, function(data){
map.setZoom(12);
openWindow(renderPerson(data[0],false), getOffset( a.position,0,-66), map.getZoom());
$("#infoWindow").on("click",".contactMe",function(){
$(".contact-form").css("top","0");
$(".contact-form .headline").html("Contact " + $(this).attr("data-contactname"));
$(".contact-form #personID").val($(this).attr("data-contactnum"));
});
});
$.getJSON("/worldmap/getPersonImg?id=" + a.id, function(data){
//console.log("load img...");
//console.log("Data is: "+data[0]);
var profPic;
if(data[0] !== "")
profPic = "<div class='img-ctnr'><img width='200' src='data:image/jpeg;base64,"+data[0]+"' /></div>";
else
profPif = "";
//console.log("Img is: "+profPic);
$("#infoWindow h1").before(profPic);
});
}
function admin_marker_click(a)
{
//make sure an infoWindow has not been opened
//NOTE: window.infoWindow global variable
closeWindows();
//console.log("Opening detail view for id: "+a.id);
//Load data for person and initiallize infoWindow to window.InfoWindow
//Set window contect with renderPerson(), passing in data retrieved.
$.getJSON("/worldmap/getPerson?id=" + a.id, function(data){
//console.log("load window...");
openWindow(renderPerson(data[0],true), getOffset( a.position,0,-66), map.getZoom());
$("#infoWindow").on("click",".approve",function(){
approveMapEntry(a.id);
$(".statusUpdate").remove();
$(".approve").replaceWith("<h2 class='statusUpdate'>This entry has been approved.</h2><div class='unapprove btn'>Unpublish</div>");
var image = {
url: '/images/MapIndicator.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
a.icon = image;
a.setMap(null);
a.setMap(map);
});
$("#infoWindow").on("click",".unapprove",function(){
unapproveMapEntry(a.id);
$(".statusUpdate").remove();
$(".unapprove").replaceWith("<h2 class='statusUpdate'>This entry has been unapproved.</h2><div class='approve btn'>Publish</div>");
var image = {
url: '/images/non-approved-map.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
a.icon = image;
a.setMap(null);
a.setMap(map);
});
$("#infoWindow").on("click",".contactMe",function(){
$(".contact-form").css("top","0");
$(".contact-form .headline").html("Contact " + $(this).attr("data-contactname"));
$(".contact-form #personID").val($(this).attr("data-contactnum"));
});
});
//console.log("about to load img");
$.getJSON("/worldmap/getPersonImg?id=" + a.id, function(data){
//console.log("loaded img");
var profPic;
if(data[0] !== "")
profPic = "<img width='200' src='data:image/jpeg;base64,"+data[0]+"' />";
else
profPif = "";
$("#infoWindow h1").before(profPic);
});
}
/*
Function that renders the on Click window for
each group of markers inside cluster
@input latLong - latitude / longitude
@input markers - individual marker objects
*/
function cluster_click(latLong, markers)
{
var ids = [];
for (i=0; i<markers.length; i++)
{
ids.push(markers[i].id);
}
$.ajax({
type: 'POST',
url: "/worldmap/getPeople",
data: {'ids[]' : ids},
success: function(data, textStatus) {
//data contains the JSON object
openWindow(renderCluster(data),latLong, map.getZoom());
},
dataType: "json"
});
}
/*
On click function for an element inside a cluster window.
Displays information about person selected and navigates to that
person's map location
*/
function clusterPerson_click(object)
{
var person = people[parseInt(object.getAttribute("index"))];
//Switches the view inside the info Window to individual person indicated
openPopOut(renderPerson(person,admin), parseInt(object.getAttribute("data-id")));
//console.log("Person is: " + person);
//console.log(person);
//console.log(object);
$("#infoWindow").on("click",".contactMe",function(){
$(".contact-form").css("top","0");
$(".contact-form .headline").html("Contact " + $(this).attr("data-contactname"));
$(".contact-form #personID").val($(this).attr("data-contactnum"));
});
var a = person["AlumID"];
$.getJSON("/worldmap/getPersonImg?id=" + a, function(data){
//console.log("load img...");
//console.log("Data is: "+data[0]);
var profPic;
if(data[0] !== "")
profPic = "<div class='img-ctnr'><img width='200' src='data:image/jpeg;base64,"+data[0]+"' /></div>";
else
profPif = "";
//console.log("Img is: "+profPic);
$("#infoWindow h1").before(profPic);
});
}
function serchResult_click(result)
{
var index = result.getAttribute('index');
google.maps.event.trigger(cacheMarkers[index], 'click');
}
/////////////// Window Rendering Functions ////////////////////
/*
Function to return rendered HTML with
@input person (key referenced array)
*/
function renderPerson(person,isAdmin)
{
var html = "<div id='infoWindow' class='infoWindow'>";
if(person["IsApproved"] === "0" && isAdmin){
html += "<div class='approve btn'>Publish</div>";
}
if(person["IsApproved"] === "1" && isAdmin){
html += "<div class='unapprove btn'>Unpublish</div>";
}
if(loggedIn){
html += "<h1>" + person['AlumFName'] + " " + person['AlumLName'] + "</h1>";
if(person["GradYear"] || person['JobTitle']){
if(!person["GradYear"]){
html += "<h2 class='jobTitle gradYear'>"+person['JobTitle']+"</h2>";
} else if(!person["JobTitle"]){
html += "<h2 class='jobTitle gradYear'>Class of " + person['GradYear'] + "</h2>";
} else{
html += "<h2 class='jobTitle gradYear'>"+person['JobTitle']+", Class of " + person['GradYear'] + "</h2>";
}
}
} else{
if(person["GradYear"] || person['JobTitle']){
if(!person["GradYear"]){
html += "<h1 class='jobTitle gradYear'>"+person['JobTitle']+"</h1>";
} else if(!person["JobTitle"]){
html += "<h1 class='jobTitle gradYear'>Class of " + person['GradYear'] + "</h1>";
} else{
html += "<h1 class='jobTitle gradYear'>"+person['JobTitle']+", Class of " + person['GradYear'] + "</h1>";
}
}
}
if(person["RecChurch"]){
html += "<h2>Church I Attend: " + person['RecChurch'] + "</h2>";
}
if(loggedIn){
html += "<p>" + person['AlumBio'] + "</p>" +
"<p><a href='#' class='btn contactMe' data-contactnum='"+person['AlumID']+"' data-contactname='"+person['AlumFName']+" "+person['AlumLName']+"'>Contact Me</a></p>" +
"<div style='clear:both;'></div></div>";
}
return html;
}
/*
Function that renders the on Click window for
each group of markers inside a cluster
@input data - array of people to be included in window
*/
function renderCluster(data)
{
//set global variable people to the current cluster
people = data;
var strClusterWindow = "<div id='infoWindow' class='clusterWindow'><div id='clusterWindow' class = 'clusterWindow'>";
for (i=0; i<people.length; i++)
{
strClusterWindow += renderClusterRow(people[i], i);
}
strClusterWindow += "</div></div>";
return strClusterWindow;
}
/*
Renders each individual row for a person inside marker list window (used primarily inside renderCluster)
@input person - an dictionary of attributes about a person
*/
function renderClusterRow(person, index)
{
if(loggedIn){
return "<div id='clusterRow' class='clusterRow'>" +
"<a onclick='clusterPerson_click(this)' href='#' index='" +
index + "' data-id='" + person["AlumID"] + "'>" +
person["AlumFName"] + " " + person["AlumLName"] +
"</a>"+
"</div>";
} else{
return "<div id='clusterRow' class='clusterRow'>" +
"<a onclick='clusterPerson_click(this)' href='#' index='" +
index + "' data-id='" + person["AlumID"] + "'>" +
person["JobTitle"] +
"</a>"+
"</div>";
}
}
/*
Switches view in already opened info window to popUp, placing html inside if provided
@input optional - html content if window
*/
function openPopOut(html, markerCacheIndex)
{
if (!html){html = "";}
cancelButton = "<button class='backButton' onClick='closePopOut()'>Back</button>";
$('#clusterWindow').addClass('inactive');
$('#infoWindow').append("<div class='popOut' id='popOut'>" + cancelButton + html + "</div>");
for(var i = 0; i < cacheMarkers.length; i++){
if(parseInt(cacheMarkers[i].id) === markerCacheIndex){
markerCacheIndex = i;
break;
}
}
var a = cacheMarkers[markerCacheIndex];
$("#infoWindow").on("click",".approve",function(){
approveMapEntry(a.id);
$(".statusUpdate").remove();
$(".approve").replaceWith("<h2 class='statusUpdate'>This entry has been approved.</h2><div class='unapprove btn'>Unpublish</div>");
var image = {
url: '/images/MapIndicator.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
a.icon = image;
a.setMap(null);
a.setMap(map);
});
$("#infoWindow").on("click",".unapprove",function(){
unapproveMapEntry(a.id);
$(".statusUpdate").remove();
$(".unapprove").replaceWith("<h2 class='statusUpdate'>This entry has been unapproved.</h2><div class='approve btn'>Publish</div>");
var image = {
url: '/images/non-approved-map.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
a.icon = image;
a.setMap(null);
a.setMap(map);
});
}
/*
Switches view in already opened info window back to initial view before openPopOut was called
@input NONE
*/
function closePopOut()
{
$('#clusterWindow').removeClass('inactive');
$('#popOut').remove();
}
/*
Fills info window at specified latLong position with HTML content given
@input HTML - HTML content
@input latLong - Google maps lat long object
*/
function openWindow(html, latLong, zoom){
closeWindows();
zoom = typeof zoom !== 'undefined' ? zoom : 9;
map.setZoom(zoom);
var y_offset = -100; //This may need to be global var in future.
offsetCenter(latLong,0,y_offset);
window.infoWindow.setPosition(latLong);
window.infoWindow.setContent(html);
window.infoWindow.open(map);
}
/*
Closes open info Window
@input NONE
*/
function closeWindows()
{
if (window.infoWindow) {
window.infoWindow.close();
}
clearSearch();
$("#searchInput").val('');
}
////////// Miss. Action Methods ///////////////////
// Function to 'approve' a map entry
function approveMapEntry(personID){
$.ajax({
url: "/worldmap/approvePerson?id=" + personID,
success: function(data){console.log(data);}
});
}
// Function to 'unapprove' a map entry
function unapproveMapEntry(personID){
console.log(personID);
$.ajax({
url: "/worldmap/unapprovePerson?id=" + personID,
success: function(data){console.log(data);}
});
}
// increases zoomFluid value at 1/2 second intervals
function zoomTo(){
//console.log(map.getZoom());
if(map.getZoom()>=8) {
return 0;
}
else {
zoomFluid = map.getZoom() + 0.1;
//console.log(zoomFluid);
map.setZoom(zoomFluid);
setTimeout(zoomTo, 10);
}
}
/*
Set Actions for shortcuts
Possible Actions
ESC - closes current window
*/
$(document).keyup(function(e) {
if (e.keyCode == 27) { closeWindows(); } // escape key maps to keycode `27`
});
//Variables
var map = null;
var people = [];
var cacheMarkers = [];
window.infoWindow = null;
var dataURL = "assets/AlumPortalGetPoints.php";
var maxSearchResults = 15;
var place_service;
var admin;
var styles = [
{
"featureType": "water",
"elementType": "geometry",
"stylers": [
{
"color": "#193341"
}
]
},
{
"featureType": "landscape",
"elementType": "geometry",
"stylers": [
{
"color": "#2c5a71"
}
]
},
{
"featureType": "road",
"elementType": "geometry",
"stylers": [
{
"color": "#29768a"
},
{
"lightness": -37
}
]
},
{
"featureType": "poi",
"elementType": "geometry",
"stylers": [
{
"color": "#406d80"
}
]
},
{
"featureType": "transit",
"elementType": "geometry",
"stylers": [
{
"color": "#406d80"
}
]
},
{
"elementType": "labels.text.stroke",
"stylers": [
{
"visibility": "on"
},
{
"color": "#3e606f"
},
{
"weight": 2
},
{
"gamma": 0.84
}
]
},
{
"elementType": "labels.text.fill",
"stylers": [
{
"color": "#ffffff"
}
]
},
{
"featureType": "administrative",
"elementType": "geometry",
"stylers": [
{
"weight": 0.6
},
{
"color": "#1a3541"
}
]
},
{
"elementType": "labels.icon",
"stylers": [
{
"visibility": "off"
}
]
},
{
"featureType": "poi.park",
"elementType": "geometry",
"stylers": [
{
"color": "#2c5a71"
}
]
}
];
// ==ClosureCompiler==
// @compilation_level ADVANCED_OPTIMIZATIONS
// @externs_url http://closure-compiler.googlecode.com/svn/trunk/contrib/externs/maps/google_maps_api_v3_3.js
// ==/ClosureCompiler==
/**
* @name MarkerClusterer for Google Maps v3
* @version version 1.0
* @author <NAME>
* @fileoverview
* The library creates and manages per-zoom-level clusters for large amounts of
* markers.
* <br/>
* This is a v3 implementation of the
* <a href="http://gmaps-utility-library-dev.googlecode.com/svn/tags/markerclusterer/"
* >v2 MarkerClusterer</a>.
*/
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A Marker Clusterer that clusters markers.
*
* @param {google.maps.Map} map The Google map to attach to.
* @param {Array.<google.maps.Marker>=} opt_markers Optional markers to add to
* the cluster.
* @param {Object=} opt_options support the following options:
* 'gridSize': (number) The grid size of a cluster in pixels.
* 'maxZoom': (number) The maximum zoom level that a marker can be part of a
* cluster.
* 'zoomOnClick': (boolean) Whether the default behaviour of clicking on a
* cluster is to zoom into it.
* 'averageCenter': (boolean) Wether the center of each cluster should be
* the average of all markers in the cluster.
* 'minimumClusterSize': (number) The minimum number of markers to be in a
* cluster before the markers are hidden and a count
* is shown.
* 'styles': (object) An object that has style properties:
* 'url': (string) The image url.
* 'height': (number) The image height.
* 'width': (number) The image width.
* 'anchor': (Array) The anchor position of the label text.
* 'textColor': (string) The text color.
* 'textSize': (number) The text size.
* 'backgroundPosition': (string) The position of the backgound x, y.
* @constructor
* @extends google.maps.OverlayView
*/
function MarkerClusterer(map, opt_markers, opt_options) {
// MarkerClusterer implements google.maps.OverlayView interface. We use the
// extend function to extend MarkerClusterer with google.maps.OverlayView
// because it might not always be available when the code is defined so we
// look for it at the last possible moment. If it doesn't exist now then
// there is no point going ahead :)
this.extend(MarkerClusterer, google.maps.OverlayView);
this.map_ = map;
/**
* @type {Array.<google.maps.Marker>}
* @private
*/
this.markers_ = [];
/**
* @type {Array.<Cluster>}
*/
this.clusters_ = [];
this.sizes = [62, 60, 66, 78, 90];
/**
* @private
*/
this.styles_ = [];
/**
* @type {boolean}
* @private
*/
this.ready_ = false;
var options = opt_options || {};
/**
* @type {number}
* @private
*/
this.gridSize_ = options['gridSize'] || 60;
/**
* @private
*/
this.minClusterSize_ = options['minimumClusterSize'] || 2;
this.renderCluster = options['renderCluster'];
/**
* @type {?number}
* @private
*/
this.maxZoom_ = options['maxZoom'] || null;
this.styles_ = options['styles'] || [];
/**
* @type {string}
* @private
*/
this.imagePath_ = options['imagePath'] ||
this.MARKER_CLUSTER_IMAGE_PATH_;
/**
* @type {string}
* @private
*/
this.imageExtension_ = options['imageExtension'] ||
this.MARKER_CLUSTER_IMAGE_EXTENSION_;
/**
* @type {boolean}
* @private
*/
this.zoomOnClick_ = true;
if (options['zoomOnClick'] != undefined) {
this.zoomOnClick_ = options['zoomOnClick'];
}
/**
* @type {boolean}
* @private
*/
this.averageCenter_ = false;
if (options['averageCenter'] != undefined) {
this.averageCenter_ = options['averageCenter'];
}
this.setupStyles_();
this.setMap(map);
/**
* @type {number}
* @private
*/
this.prevZoom_ = this.map_.getZoom();
// Add the map event listeners
var that = this;
google.maps.event.addListener(this.map_, 'zoom_changed', function() {
var zoom = that.map_.getZoom();
if (window.infoWindow)
{
window.infoWindow.close();
}
if (that.prevZoom_ != zoom) {
that.prevZoom_ = zoom;
that.resetViewport();
}
});
google.maps.event.addListener(this.map_, 'idle', function() {
that.redraw();
});
// Finally, add the markers
if (opt_markers && opt_markers.length) {
this.addMarkers(opt_markers, false);
}
}
/**
* The marker cluster image path.
*
* @type {string}
* @private
*/
MarkerClusterer.prototype.MARKER_CLUSTER_IMAGE_PATH_ =
'/images/cluster-icon';
/**
* The marker cluster image path.
*
* @type {string}
* @private
*/
MarkerClusterer.prototype.MARKER_CLUSTER_IMAGE_EXTENSION_ = 'svg';
/**
* Extends a objects prototype by anothers.
*
* @param {Object} obj1 The object to be extended.
* @param {Object} obj2 The object to extend with.
* @return {Object} The new extended object.
* @ignore
*/
MarkerClusterer.prototype.extend = function(obj1, obj2) {
return (function(object) {
for (var property in object.prototype) {
this.prototype[property] = object.prototype[property];
}
return this;
}).apply(obj1, [obj2]);
};
/**
* Implementaion of the interface method.
* @ignore
*/
MarkerClusterer.prototype.onAdd = function() {
this.setReady_(true);
};
/**
* Implementaion of the interface method.
* @ignore
*/
MarkerClusterer.prototype.draw = function() {};
/**
* Sets up the styles object.
*
* @private
*/
MarkerClusterer.prototype.setupStyles_ = function() {
if (this.styles_.length) {
return;
}
for (var i = 0, size; size = this.sizes[i]; i++) {
this.styles_.push({
url: this.imagePath_ + (i + 1) + '.' + this.imageExtension_,
height: size,
width: size
});
}
};
/**
* Fit the map to the bounds of the markers in the clusterer.
*/
MarkerClusterer.prototype.fitMapToMarkers = function() {
var markers = this.getMarkers();
var bounds = new google.maps.LatLngBounds();
for (var i = 0, marker; marker = markers[i]; i++) {
bounds.extend(marker.getPosition());
}
this.map_.fitBounds(bounds);
};
/**
* Sets the styles.
*
* @param {Object} styles The style to set.
*/
MarkerClusterer.prototype.setStyles = function(styles) {
this.styles_ = styles;
};
/**
* Gets the styles.
*
* @return {Object} The styles object.
*/
MarkerClusterer.prototype.getStyles = function() {
return this.styles_;
};
/**
* Whether zoom on click is set.
*
* @return {boolean} True if zoomOnClick_ is set.
*/
MarkerClusterer.prototype.isZoomOnClick = function() {
return this.zoomOnClick_;
};
/**
* Whether average center is set.
*
* @return {boolean} True if averageCenter_ is set.
*/
MarkerClusterer.prototype.isAverageCenter = function() {
return this.averageCenter_;
};
/**
* Returns the array of markers in the clusterer.
*
* @return {Array.<google.maps.Marker>} The markers.
*/
MarkerClusterer.prototype.getMarkers = function() {
return this.markers_;
};
/**
* Returns the number of markers in the clusterer
*
* @return {Number} The number of markers.
*/
MarkerClusterer.prototype.getTotalMarkers = function() {
return this.markers_.length;
};
/**
* Sets the max zoom for the clusterer.
*
* @param {number} maxZoom The max zoom level.
*/
MarkerClusterer.prototype.setMaxZoom = function(maxZoom) {
this.maxZoom_ = maxZoom;
};
/**
* Gets the max zoom for the clusterer.
*
* @return {number} The max zoom level.
*/
MarkerClusterer.prototype.getMaxZoom = function() {
return this.maxZoom_;
};
/**
* The function for calculating the cluster icon image.
*
* @param {Array.<google.maps.Marker>} markers The markers in the clusterer.
* @param {number} numStyles The number of styles available.
* @return {Object} A object properties: 'text' (string) and 'index' (number).
* @private
*/
MarkerClusterer.prototype.calculator_ = function(markers, numStyles) {
var index = 0;
var count = markers.length;
var dv = count;
while (dv !== 0) {
dv = parseInt(dv / 10, 10);
index++;
}
index = Math.min(index, numStyles);
return {
text: count,
index: index
};
};
/**
* Set the calculator function.
*
* @param {function(Array, number)} calculator The function to set as the
* calculator. The function should return a object properties:
* 'text' (string) and 'index' (number).
*
*/
MarkerClusterer.prototype.setCalculator = function(calculator) {
this.calculator_ = calculator;
};
/**
* Get the calculator function.
*
* @return {function(Array, number)} the calculator function.
*/
MarkerClusterer.prototype.getCalculator = function() {
return this.calculator_;
};
/**
* Add an array of markers to the clusterer.
*
* @param {Array.<google.maps.Marker>} markers The markers to add.
* @param {boolean=} opt_nodraw Whether to redraw the clusters.
*/
MarkerClusterer.prototype.addMarkers = function(markers, opt_nodraw) {
for (var i = 0, marker; marker = markers[i]; i++) {
this.pushMarkerTo_(marker);
}
if (!opt_nodraw) {
this.redraw();
}
};
/**
* Pushes a marker to the clusterer.
*
* @param {google.maps.Marker} marker The marker to add.
* @private
*/
MarkerClusterer.prototype.pushMarkerTo_ = function(marker) {
marker.isAdded = false;
if (marker['draggable']) {
// If the marker is draggable add a listener so we update the clusters on
// the drag end.
var that = this;
google.maps.event.addListener(marker, 'dragend', function() {
marker.isAdded = false;
that.repaint();
});
}
this.markers_.push(marker);
};
/**
* Adds a marker to the clusterer and redraws if needed.
*
* @param {google.maps.Marker} marker The marker to add.
* @param {boolean=} opt_nodraw Whether to redraw the clusters.
*/
MarkerClusterer.prototype.addMarker = function(marker, opt_nodraw) {
this.pushMarkerTo_(marker);
if (!opt_nodraw) {
this.redraw();
}
};
/**
* Removes a marker and returns true if removed, false if not
*
* @param {google.maps.Marker} marker The marker to remove
* @return {boolean} Whether the marker was removed or not
* @private
*/
MarkerClusterer.prototype.removeMarker_ = function(marker) {
var index = -1;
if (this.markers_.indexOf) {
index = this.markers_.indexOf(marker);
} else {
for (var i = 0, m; m = this.markers_[i]; i++) {
if (m == marker) {
index = i;
break;
}
}
}
if (index == -1) {
// Marker is not in our list of markers.
return false;
}
marker.setMap(null);
this.markers_.splice(index, 1);
return true;
};
/**
* Remove a marker from the cluster.
*
* @param {google.maps.Marker} marker The marker to remove.
* @param {boolean=} opt_nodraw Optional boolean to force no redraw.
* @return {boolean} True if the marker was removed.
*/
MarkerClusterer.prototype.removeMarker = function(marker, opt_nodraw) {
var removed = this.removeMarker_(marker);
if (!opt_nodraw && removed) {
this.resetViewport();
this.redraw();
return true;
} else {
return false;
}
};
/**
* Removes an array of markers from the cluster.
*
* @param {Array.<google.maps.Marker>} markers The markers to remove.
* @param {boolean=} opt_nodraw Optional boolean to force no redraw.
*/
MarkerClusterer.prototype.removeMarkers = function(markers, opt_nodraw) {
var removed = false;
for (var i = 0, marker; marker = markers[i]; i++) {
var r = this.removeMarker_(marker);
removed = removed || r;
}
if (!opt_nodraw && removed) {
this.resetViewport();
this.redraw();
return true;
}
};
/**
* Sets the clusterer's ready state.
*
* @param {boolean} ready The state.
* @private
*/
MarkerClusterer.prototype.setReady_ = function(ready) {
if (!this.ready_) {
this.ready_ = ready;
this.createClusters_();
}
};
/**
* Returns the number of clusters in the clusterer.
*
* @return {number} The number of clusters.
*/
MarkerClusterer.prototype.getTotalClusters = function() {
return this.clusters_.length;
};
/**
* Returns the google map that the clusterer is associated with.
*
* @return {google.maps.Map} The map.
*/
MarkerClusterer.prototype.getMap = function() {
return this.map_;
};
/**
* Sets the google map that the clusterer is associated with.
*
* @param {google.maps.Map} map The map.
*/
MarkerClusterer.prototype.setMap = function(map) {
this.map_ = map;
};
/**
* Returns the size of the grid.
*
* @return {number} The grid size.
*/
MarkerClusterer.prototype.getGridSize = function() {
return this.gridSize_;
};
/**
* Sets the size of the grid.
*
* @param {number} size The grid size.
*/
MarkerClusterer.prototype.setGridSize = function(size) {
this.gridSize_ = size;
};
/**
* Returns the min cluster size.
*
* @return {number} The grid size.
*/
MarkerClusterer.prototype.getMinClusterSize = function() {
return this.minClusterSize_;
};
/**
* Sets the min cluster size.
*
* @param {number} size The grid size.
*/
MarkerClusterer.prototype.setMinClusterSize = function(size) {
this.minClusterSize_ = size;
};
/**
* Extends a bounds object by the grid size.
*
* @param {google.maps.LatLngBounds} bounds The bounds to extend.
* @return {google.maps.LatLngBounds} The extended bounds.
*/
MarkerClusterer.prototype.getExtendedBounds = function(bounds) {
var projection = this.getProjection();
// Turn the bounds into latlng.
var tr = new google.maps.LatLng(bounds.getNorthEast().lat(),
bounds.getNorthEast().lng());
var bl = new google.maps.LatLng(bounds.getSouthWest().lat(),
bounds.getSouthWest().lng());
// Convert the points to pixels and the extend out by the grid size.
var trPix = projection.fromLatLngToDivPixel(tr);
trPix.x += this.gridSize_;
trPix.y -= this.gridSize_;
var blPix = projection.fromLatLngToDivPixel(bl);
blPix.x -= this.gridSize_;
blPix.y += this.gridSize_;
// Convert the pixel points back to LatLng
var ne = projection.fromDivPixelToLatLng(trPix);
var sw = projection.fromDivPixelToLatLng(blPix);
// Extend the bounds to contain the new bounds.
bounds.extend(ne);
bounds.extend(sw);
return bounds;
};
/**
* Determins if a marker is contained in a bounds.
*
* @param {google.maps.Marker} marker The marker to check.
* @param {google.maps.LatLngBounds} bounds The bounds to check against.
* @return {boolean} True if the marker is in the bounds.
* @private
*/
MarkerClusterer.prototype.isMarkerInBounds_ = function(marker, bounds) {
return bounds.contains(marker.getPosition());
};
/**
* Clears all clusters and markers from the clusterer.
*/
MarkerClusterer.prototype.clearMarkers = function() {
this.resetViewport(true);
// Set the markers a empty array.
this.markers_ = [];
};
/**
* Clears all existing clusters and recreates them.
* @param {boolean} opt_hide To also hide the marker.
*/
MarkerClusterer.prototype.resetViewport = function(opt_hide) {
// Remove all the clusters
for (var i = 0, cluster; cluster = this.clusters_[i]; i++) {
cluster.remove();
}
// Reset the markers to not be added and to be invisible.
for (var i = 0, marker; marker = this.markers_[i]; i++) {
marker.isAdded = false;
if (opt_hide) {
marker.setMap(null);
}
}
this.clusters_ = [];
};
/**
*
*/
MarkerClusterer.prototype.repaint = function() {
var oldClusters = this.clusters_.slice();
this.clusters_.length = 0;
this.resetViewport();
this.redraw();
// Remove the old clusters.
// Do it in a timeout so the other clusters have been drawn first.
window.setTimeout(function() {
for (var i = 0, cluster; cluster = oldClusters[i]; i++) {
cluster.remove();
}
}, 0);
};
/**
* Redraws the clusters.
*/
MarkerClusterer.prototype.redraw = function() {
this.createClusters_();
};
/**
* Calculates the distance between two latlng locations in km.
* @see http://www.movable-type.co.uk/scripts/latlong.html
*
* @param {google.maps.LatLng} p1 The first lat lng point.
* @param {google.maps.LatLng} p2 The second lat lng point.
* @return {number} The distance between the two points in km.
* @private
*/
MarkerClusterer.prototype.distanceBetweenPoints_ = function(p1, p2) {
if (!p1 || !p2) {
return 0;
}
var R = 6371; // Radius of the Earth in km
var dLat = (p2.lat() - p1.lat()) * Math.PI / 180;
var dLon = (p2.lng() - p1.lng()) * Math.PI / 180;
var a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
Math.cos(p1.lat() * Math.PI / 180) * Math.cos(p2.lat() * Math.PI / 180) *
Math.sin(dLon / 2) * Math.sin(dLon / 2);
var c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
var d = R * c;
return d;
};
/**
* Add a marker to a cluster, or creates a new cluster.
*
* @param {google.maps.Marker} marker The marker to add.
* @private
*/
MarkerClusterer.prototype.addToClosestCluster_ = function(marker) {
var distance = 40000; // Some large number
var clusterToAddTo = null;
var pos = marker.getPosition();
for (var i = 0, cluster; cluster = this.clusters_[i]; i++) {
var center = cluster.getCenter();
if (center) {
var d = this.distanceBetweenPoints_(center, marker.getPosition());
if (d < distance) {
distance = d;
clusterToAddTo = cluster;
}
}
}
if (clusterToAddTo && clusterToAddTo.isMarkerInClusterBounds(marker)) {
clusterToAddTo.addMarker(marker);
} else {
var cluster = new Cluster(this);
cluster.addMarker(marker);
this.clusters_.push(cluster);
}
};
/**
* Creates the clusters.
*
* @private
*/
MarkerClusterer.prototype.createClusters_ = function() {
if (!this.ready_) {
return;
}
// Get our current map view bounds.
// Create a new bounds object so we don't affect the map.
var mapBounds = new google.maps.LatLngBounds(this.map_.getBounds().getSouthWest(),
this.map_.getBounds().getNorthEast());
var bounds = this.getExtendedBounds(mapBounds);
for (var i = 0, marker; marker = this.markers_[i]; i++) {
if (!marker.isAdded && this.isMarkerInBounds_(marker, bounds)) {
this.addToClosestCluster_(marker);
}
}
};
/**
* A cluster that contains markers.
*
* @param {MarkerClusterer} markerClusterer The markerclusterer that this
* cluster is associated with.
* @constructor
* @ignore
*/
function Cluster(markerClusterer) {
this.markerClusterer_ = markerClusterer;
this.map_ = markerClusterer.getMap();
this.gridSize_ = markerClusterer.getGridSize();
this.minClusterSize_ = markerClusterer.getMinClusterSize();
this.averageCenter_ = markerClusterer.isAverageCenter();
this.center_ = null;
this.markers_ = [];
this.bounds_ = null;
this.clusterIcon_ = new ClusterIcon(this, markerClusterer.getStyles(),
markerClusterer.getGridSize());
}
/**
* Determins if a marker is already added to the cluster.
*
* @param {google.maps.Marker} marker The marker to check.
* @return {boolean} True if the marker is already added.
*/
Cluster.prototype.isMarkerAlreadyAdded = function(marker) {
if (this.markers_.indexOf) {
return this.markers_.indexOf(marker) != -1;
} else {
for (var i = 0, m; m = this.markers_[i]; i++) {
if (m == marker) {
return true;
}
}
}
return false;
};
/**
* Add a marker the cluster.
*
* @param {google.maps.Marker} marker The marker to add.
* @return {boolean} True if the marker was added.
*/
Cluster.prototype.addMarker = function(marker) {
if (this.isMarkerAlreadyAdded(marker)) {
return false;
}
if (!this.center_) {
this.center_ = marker.getPosition();
this.calculateBounds_();
} else {
if (this.averageCenter_) {
var l = this.markers_.length + 1;
var lat = (this.center_.lat() * (l-1) + marker.getPosition().lat()) / l;
var lng = (this.center_.lng() * (l-1) + marker.getPosition().lng()) / l;
this.center_ = new google.maps.LatLng(lat, lng);
this.calculateBounds_();
}
}
marker.isAdded = true;
this.markers_.push(marker);
var len = this.markers_.length;
if (len < this.minClusterSize_ && marker.getMap() != this.map_) {
// Min cluster size not reached so show the marker.
marker.setMap(this.map_);
}
if (len == this.minClusterSize_) {
// Hide the markers that were showing.
for (var i = 0; i < len; i++) {
this.markers_[i].setMap(null);
}
}
if (len >= this.minClusterSize_) {
marker.setMap(null);
}
this.updateIcon();
return true;
};
/**
* Returns the marker clusterer that the cluster is associated with.
*
* @return {MarkerClusterer} The associated marker clusterer.
*/
Cluster.prototype.getMarkerClusterer = function() {
return this.markerClusterer_;
};
/**
* Returns the bounds of the cluster.
*
* @return {google.maps.LatLngBounds} the cluster bounds.
*/
Cluster.prototype.getBounds = function() {
var bounds = new google.maps.LatLngBounds(this.center_, this.center_);
var markers = this.getMarkers();
for (var i = 0, marker; marker = markers[i]; i++) {
bounds.extend(marker.getPosition());
}
return bounds;
};
/**
* Removes the cluster
*/
Cluster.prototype.remove = function() {
this.clusterIcon_.remove();
this.markers_.length = 0;
delete this.markers_;
};
/**
* Returns the center of the cluster.
*
* @return {number} The cluster center.
*/
Cluster.prototype.getSize = function() {
return this.markers_.length;
};
/**
* Returns the center of the cluster.
*
* @return {Array.<google.maps.Marker>} The cluster center.
*/
Cluster.prototype.getMarkers = function() {
return this.markers_;
};
/**
* Returns the center of the cluster.
*
* @return {google.maps.LatLng} The cluster center.
*/
Cluster.prototype.getCenter = function() {
return this.center_;
};
/**
* Calculated the extended bounds of the cluster with the grid.
*
* @private
*/
Cluster.prototype.calculateBounds_ = function() {
var bounds = new google.maps.LatLngBounds(this.center_, this.center_);
this.bounds_ = this.markerClusterer_.getExtendedBounds(bounds);
};
/**
* Determines if a marker lies in the clusters bounds.
*
* @param {google.maps.Marker} marker The marker to check.
* @return {boolean} True if the marker lies in the bounds.
*/
Cluster.prototype.isMarkerInClusterBounds = function(marker) {
return this.bounds_.contains(marker.getPosition());
};
/**
* Returns the map that the cluster is associated with.
*
* @return {google.maps.Map} The map.
*/
Cluster.prototype.getMap = function() {
return this.map_;
};
/**
* Updates the cluster icon
*/
Cluster.prototype.updateIcon = function() {
var zoom = this.map_.getZoom();
var mz = this.markerClusterer_.getMaxZoom();
if (mz && zoom > mz) {
// The zoom is greater than our max zoom so show all the markers in cluster.
for (var i = 0, marker; marker = this.markers_[i]; i++) {
marker.setMap(this.map_);
}
return;
}
if (this.markers_.length < this.minClusterSize_) {
// Min cluster size not yet reached.
this.clusterIcon_.hide();
return;
}
var numStyles = this.markerClusterer_.getStyles().length;
var sums = this.markerClusterer_.getCalculator()(this.markers_, numStyles);
this.clusterIcon_.setCenter(this.center_);
this.clusterIcon_.setSums(sums);
this.clusterIcon_.show();
};
/**
* A cluster icon
*
* @param {Cluster} cluster The cluster to be associated with.
* @param {Object} styles An object that has style properties:
* 'url': (string) The image url.
* 'height': (number) The image height.
* 'width': (number) The image width.
* 'anchor': (Array) The anchor position of the label text.
* 'textColor': (string) The text color.
* 'textSize': (number) The text size.
* 'backgroundPosition: (string) The background postition x, y.
* @param {number=} opt_padding Optional padding to apply to the cluster icon.
* @constructor
* @extends google.maps.OverlayView
* @ignore
*/
function ClusterIcon(cluster, styles, opt_padding) {
cluster.getMarkerClusterer().extend(ClusterIcon, google.maps.OverlayView);
this.styles_ = styles;
this.padding_ = opt_padding || 0;
this.cluster_ = cluster;
this.center_ = null;
this.map_ = cluster.getMap();
this.div_ = null;
this.sums_ = null;
this.visible_ = false;
this.setMap(this.map_);
}
/**
* Triggers the clusterclick event and zoom's if the option is set.
*/
ClusterIcon.prototype.triggerClusterClick = function() {
var markerClusterer = this.cluster_.getMarkerClusterer();
// Trigger the clusterclick event.
google.maps.event.trigger(markerClusterer, 'clusterclick', this.cluster_);
//Google Maps Marker Event Event
markerClusterer.renderCluster(this.cluster_.getCenter(), this.cluster_.getMarkers());
if (markerClusterer.isZoomOnClick()) {
// Zoom into the cluster.
this.map_.fitBounds(this.cluster_.getBounds());
}
};
/**
* Adding the cluster icon to the dom.
* @ignore
*/
ClusterIcon.prototype.onAdd = function() {
this.div_ = document.createElement('DIV');
if (this.visible_) {
var pos = this.getPosFromLatLng_(this.center_);
this.div_.style.cssText = this.createCss(pos);
this.div_.innerHTML = this.sums_.text;
}
var panes = this.getPanes();
panes.overlayMouseTarget.appendChild(this.div_);
var that = this;
google.maps.event.addDomListener(this.div_, 'click', function() {
that.triggerClusterClick();
});
};
/**
* Returns the position to place the div dending on the latlng.
*
* @param {google.maps.LatLng} latlng The position in latlng.
* @return {google.maps.Point} The position in pixels.
* @private
*/
ClusterIcon.prototype.getPosFromLatLng_ = function(latlng) {
var pos = this.getProjection().fromLatLngToDivPixel(latlng);
pos.x -= parseInt(this.width_ / 2, 10);
pos.y -= parseInt(this.height_ / 2, 10);
return pos;
};
/**
* Draw the icon.
* @ignore
*/
ClusterIcon.prototype.draw = function() {
if (this.visible_) {
var pos = this.getPosFromLatLng_(this.center_);
this.div_.style.top = pos.y + 'px';
this.div_.style.left = pos.x + 'px';
}
};
/**
* Hide the icon.
*/
ClusterIcon.prototype.hide = function() {
if (this.div_) {
this.div_.style.display = 'none';
}
this.visible_ = false;
};
/**
* Position and show the icon.
*/
ClusterIcon.prototype.show = function() {
if (this.div_) {
var pos = this.getPosFromLatLng_(this.center_);
this.div_.style.cssText = this.createCss(pos);
this.div_.style.display = '';
}
this.visible_ = true;
};
/**
* Remove the icon from the map
*/
ClusterIcon.prototype.remove = function() {
this.setMap(null);
};
/**
* Implementation of the onRemove interface.
* @ignore
*/
ClusterIcon.prototype.onRemove = function() {
if (this.div_ && this.div_.parentNode) {
this.hide();
this.div_.parentNode.removeChild(this.div_);
this.div_ = null;
}
};
/**
* Set the sums of the icon.
*
* @param {Object} sums The sums containing:
* 'text': (string) The text to display in the icon.
* 'index': (number) The style index of the icon.
*/
ClusterIcon.prototype.setSums = function(sums) {
this.sums_ = sums;
this.text_ = sums.text;
this.index_ = sums.index;
if (this.div_) {
this.div_.innerHTML = sums.text;
}
this.useStyle();
};
/**
* Sets the icon to the the styles.
*/
ClusterIcon.prototype.useStyle = function() {
var index = Math.max(0, this.sums_.index - 1);
index = Math.min(this.styles_.length - 1, index);
var style = this.styles_[index];
this.url_ = style['url'];
this.height_ = style['height'];
this.width_ = style['width'];
this.textColor_ = style['textColor'];
this.textSize_ = "16px";
this.anchor_ = style['anchor'];
this.textSize_ = style['textSize'];
this.backgroundPosition_ = style['backgroundPosition'];
};
/**
* Sets the center of the icon.
*
* @param {google.maps.LatLng} center The latlng to set as the center.
*/
ClusterIcon.prototype.setCenter = function(center) {
this.center_ = center;
};
/**
* Create the css text based on the position of the icon.
*
* @param {google.maps.Point} pos The position.
* @return {string} The css style text.
*/
ClusterIcon.prototype.createCss = function(pos) {
var style = [];
style.push('background-image:url(' + this.url_ + ');');
var backgroundPosition = this.backgroundPosition_ ? this.backgroundPosition_ : '0 0';
style.push('background-position:' + backgroundPosition + ';');
if (typeof this.anchor_ === 'object') {
if (typeof this.anchor_[0] === 'number' && this.anchor_[0] > 0 &&
this.anchor_[0] < this.height_) {
style.push('height:' + (this.height_ - this.anchor_[0]) +
'px; padding-top:' + this.anchor_[0] + 'px;');
} else {
style.push('height:' + this.height_ + 'px; line-height:' + this.height_ +
'px;');
}
if (typeof this.anchor_[1] === 'number' && this.anchor_[1] > 0 &&
this.anchor_[1] < this.width_) {
style.push('width:' + (this.width_ - this.anchor_[1]) +
'px; padding-left:' + this.anchor_[1] + 'px;');
} else {
style.push('width:' + this.width_ + 'px; text-align:center;');
}
} else {
style.push('height:' + this.height_ + 'px; line-height:' +
this.height_ + 'px; width:' + this.width_ + 'px; text-align:center;');
}
var txtColor = this.textColor_ ? this.textColor_ : 'black';
var txtSize = this.textSize_ ? this.textSize_ : 11;
style.push('cursor:pointer; top:' + pos.y + 'px; left:' +
pos.x + 'px; color:' + txtColor + '; position:absolute; font-size:' +
txtSize + 'px; font-family:Arial,sans-serif; font-weight:bold');
return style.join('');
};
// Export Symbols for Closure
// If you are not going to compile with closure then you can remove the
// code below.
window['MarkerClusterer'] = MarkerClusterer;
MarkerClusterer.prototype['addMarker'] = MarkerClusterer.prototype.addMarker;
MarkerClusterer.prototype['addMarkers'] = MarkerClusterer.prototype.addMarkers;
MarkerClusterer.prototype['clearMarkers'] =
MarkerClusterer.prototype.clearMarkers;
MarkerClusterer.prototype['fitMapToMarkers'] =
MarkerClusterer.prototype.fitMapToMarkers;
MarkerClusterer.prototype['getCalculator'] =
MarkerClusterer.prototype.getCalculator;
MarkerClusterer.prototype['getGridSize'] =
MarkerClusterer.prototype.getGridSize;
MarkerClusterer.prototype['getExtendedBounds'] =
MarkerClusterer.prototype.getExtendedBounds;
MarkerClusterer.prototype['getMap'] = MarkerClusterer.prototype.getMap;
MarkerClusterer.prototype['getMarkers'] = MarkerClusterer.prototype.getMarkers;
MarkerClusterer.prototype['getMaxZoom'] = MarkerClusterer.prototype.getMaxZoom;
MarkerClusterer.prototype['getStyles'] = MarkerClusterer.prototype.getStyles;
MarkerClusterer.prototype['getTotalClusters'] =
MarkerClusterer.prototype.getTotalClusters;
MarkerClusterer.prototype['getTotalMarkers'] =
MarkerClusterer.prototype.getTotalMarkers;
MarkerClusterer.prototype['redraw'] = MarkerClusterer.prototype.redraw;
MarkerClusterer.prototype['removeMarker'] =
MarkerClusterer.prototype.removeMarker;
MarkerClusterer.prototype['removeMarkers'] =
MarkerClusterer.prototype.removeMarkers;
MarkerClusterer.prototype['resetViewport'] =
MarkerClusterer.prototype.resetViewport;
MarkerClusterer.prototype['repaint'] =
MarkerClusterer.prototype.repaint;
MarkerClusterer.prototype['setCalculator'] =
MarkerClusterer.prototype.setCalculator;
MarkerClusterer.prototype['setGridSize'] =
MarkerClusterer.prototype.setGridSize;
MarkerClusterer.prototype['setMaxZoom'] =
MarkerClusterer.prototype.setMaxZoom;
MarkerClusterer.prototype['onAdd'] = MarkerClusterer.prototype.onAdd;
MarkerClusterer.prototype['draw'] = MarkerClusterer.prototype.draw;
Cluster.prototype['getCenter'] = Cluster.prototype.getCenter;
Cluster.prototype['getSize'] = Cluster.prototype.getSize;
Cluster.prototype['getMarkers'] = Cluster.prototype.getMarkers;
ClusterIcon.prototype['onAdd'] = ClusterIcon.prototype.onAdd;
ClusterIcon.prototype['draw'] = ClusterIcon.prototype.draw;
ClusterIcon.prototype['onRemove'] = ClusterIcon.prototype.onRemove;
//SEARCH FUNCTIONS
var people_result_count = 0;
var place_result_count = 0;
var searchType;
var searching;
var ajaxRequest = null;
/*
Takes an index of node inside marker cache and adds that person's name and aproximate
real world city, contry location.
*/
function addSearchResult(markerCacheIndex, matchType, matchData)
{
if(loggedIn){
var markerPresent = false;
for(var i = 0; i < cacheMarkers.length; i++){
if(cacheMarkers[i].id === markerCacheIndex){
markerPresent = true;
markerCacheIndex = i;
break;
}
}
if(markerPresent){
var person = cacheMarkers[markerCacheIndex].person;
var url = "http://maps.googleapis.com/maps/api/geocode/json?latlng=" +
person['GeoLocLat'] + "," + person['GeoLocLong'] + "&sensor=false";
$.getJSON(url, function(data){
var location;
if (data['status'] != 'OK') location = "";
else{
location = " - " + data['results'][data['results'].length-2]['formatted_address'];
}
var resultData = "<div class='row' id='searchResultRow' onClick='serchResult_click(this)'" +
"index='" + markerCacheIndex + "'>";
if(matchType === "JobTitle"){
resultData += person['AlumFName'] + " " + person['AlumLName'] + " - <strong>" + matchData + "</strong>";
} else if(matchType === "GradYear"){
resultData += person['AlumFName'] + " " + person['AlumLName'] + " - <strong>Class of " + matchData + "</strong>";
} else{
resultData += "<strong>" + person['AlumFName'] + " " + person['AlumLName'] + "</strong>" + location;
}
resultData += "</div>";
$("#searchResults .people").append(resultData);
});
}
}
}
function clearSearch()
{
$('#searchResults .people').empty();
$('#searchResults .places').empty();
people_result_count = 0;
place_result_count = 0;
}
function predictionResult_click(prediction){
var request = {
placeId: $(prediction).attr("index")
};
place_service.getDetails(request, goToPrediction);
}
var goToPrediction = function(place, status){
if (status == google.maps.places.PlacesServiceStatus.OK) {
if(searchType == "account"){
placeMarker(place.geometry.location);
}
if(place.geometry.viewport){
map.fitBounds(place.geometry.viewport);
} else{
map.setCenter(place.geometry.location);
map.setZoom(13);
}
clearSearch();
$("#searchInput").val('');
}
};
var displaySuggestions = function(predictions, status) {
if (status != google.maps.places.PlacesServiceStatus.OK) {
return;
}
predictions.forEach(function(prediction){
place_result_count++;
if(place_result_count < 4){
$("#searchResults .places").append("<div class='row' id='searchResultRow' onClick='predictionResult_click(this)' index='" + prediction.place_id + "'>" + prediction.description + "</div>");
}
});
};
/*
Initialize search function, giving searchInput textbox on input event
that uses searchPerson(serach) method to look through cashed first & last names
to find the number of maches specified in maxSearchResults.
*/
function initSearch(){
searching = false;
//var searchBox = new google.maps.places.SearchBox(document.getElementById('location-input'));
var service = new google.maps.places.AutocompleteService();
place_service = new google.maps.places.PlacesService(map);
//service.getDetails(request, callback);
$('#searchInput').bind("input", function(event){
if(!searching){
if(ajaxRequest){
ajaxRequest.abort();
}
//searching = true;
clearSearch();
var search = $(this).val();
if (search === "")
return;
var url = "/worldmap/searchMap?query=" + search;
ajaxRequest = $.getJSON(url, function(data){
for(var i = 0; i < data.length; i++){
people_result_count++;
var matchData = "";
if(data[i].matchType === "JobTitle"){
matchData = data[i].JobTitle;
} else if (data[i].matchType === "GradYear"){
matchData = data[i].GradYear;
}
addSearchResult(data[i].AlumID, data[i].matchType, matchData);
if(people_result_count >= 3){
break;
}
}
});
service.getQueryPredictions({ input: search }, displaySuggestions);
}
// var indicesMatch = searchPerson(search);
// if (indicesMatch.length <= 0)
// {
// return;
// }
// for (i=0; i<indicesMatch.length; i++)
// {
// addSearchResult(indicesMatch[i]);
// }
});
}
/*
Main search function
@input name : contains sring with eather first or first + last name
*/
function searchPerson(name)
{
var parsed_name = name.split(" ");
var searchCashIndices = [];
if (parsed_name.length <= 1)
{
searchCashIndices = searchFirstName(parsed_name[0]);
if (searchCashIndices.length < 1)
{
searchCashIndices = searchLastName(parsed_name[0]);
}
}
else
{
searchCashIndices = searchFirstLastName(parsed_name[0], parsed_name[1]);
}
return searchCashIndices;
}
function searchFirstName(name)
{
var ids = [];
for (i=0; i<cacheMarkers.length; i++)
{
if (compare(cacheMarkers[i].person['AlumFName'], name))
{
ids.push(i);
}
if (ids.length >= maxSearchResults) return ids;
}
return ids;
}
function searchLastName(name)
{
var ids = [];
for (i=0; i<cacheMarkers.length; i++)
{
if (compare(cacheMarkers[i].person['AlumLName'], name))
{
ids.push(i);
}
if (ids.length >= maxSearchResults) return ids;
}
return ids;
}
function searchFirstLastName(firstName, lastName)
{
var firstMatches = searchFirstName(firstName);
var ids = [];
var id;
for (i=0; i<firstMatches.length; i++)
{
id = firstMatches[i];
if ( compare(cacheMarkers[id].person['AlumLName'], lastName) )
{
ids.push(id);
}
if (ids.length >= maxSearchResults) return ids;
}
return ids;
}
function middle(start, end)
{
return start + (end - start + 1)/2;
}
/*
Function compares two strings.
The first is from dataset. The second is user input.
It returns if the two strings are resonable matches.
*/
function compare(string_data, string_input)
{
return string_data.toLowerCase().substring(0,string_input.length) == string_input.toLowerCase();
}
/*
HELPER METHODS FOR HANDLING OFFSET MAP MARKERS
*/
function offsetCenter(latlng,offsetx,offsety) {
map.setCenter(getOffset(latlng,offsetx,offsety) );
}
function getOffset(latlng, offsetx, offsety)
{
// latlng is the apparent centre-point
// offsetx is the distance you want that point to move to the right, in pixels
// offsety is the distance you want that point to move upwards, in pixels
// offset can be negative
// offsetx and offsety are both optional
var scale = Math.pow(2, map.getZoom());
var nw = new google.maps.LatLng(
map.getBounds().getNorthEast().lat(),
map.getBounds().getSouthWest().lng()
);
var worldCoordinateCenter = map.getProjection().fromLatLngToPoint(latlng);
var pixelOffset = new google.maps.Point((offsetx/scale) || 0,(offsety/scale) ||0);
var worldCoordinateNewCenter = new google.maps.Point(
worldCoordinateCenter.x - pixelOffset.x,
worldCoordinateCenter.y + pixelOffset.y
);
return map.getProjection().fromPointToLatLng(worldCoordinateNewCenter);
}<file_sep><?php require_once(TEMPLATES_PATH . "/layout/nav.php"); ?>
<div class="container">
<h1>Thank You!</h1>
<h2 class="serif italic subtext">An email has been sent with instructions about how to reset your password.</h2>
</div><file_sep><?php
/* If all points were requested, return resultset for rendering map*/
function getPoints(){
$mysqli = connectToDB();
if ($result = $mysqli->query("SELECT AlumID, GeoLocLat, GeoLocLong, AlumFName, AlumLName, IsApproved, RecChurch FROM AlumnData ORDER BY AlumFName, AlumLName")) {
$points = array();
while($row = $result->fetch_array(MYSQLI_ASSOC))
{
array_push($points, $row);
}
/* return result set */
$data = json_encode($points);
$mysqli->close();
return $data;
}
}
/* If specific person is requested with URL param id={user id} return result set for listing single user data */
function getPerson(){
$mysqli = connectToDB();
$alumID = $_GET['id'];
if ($result = $mysqli->query("SELECT AlumID, AlumFName, AlumLName, AlumBio, JobTitle, GradYear, IsApproved, RecChurch FROM AlumnData WHERE AlumID = $alumID")) {
$people = array();
while($row = $result->fetch_array(MYSQLI_ASSOC))
{
array_push($people, $row);
}
/* return result set */
$data = json_encode($people);
$mysqli->close();
return $data;
}
}
/* If specific person is requested with URL param id={user id} return result set for listing single user data */
function getPersonImg(){
$mysqli = connectToDB();
$alumID = $_GET['id'];
if ($result = $mysqli->query("SELECT AlumImg FROM AlumnData WHERE AlumID = $alumID")) {
$people = array();
while($row = $result->fetch_array(MYSQLI_ASSOC))
{
array_push($people, base64_encode($row["AlumImg"]));
}
/* return result set */
$data = json_encode($people);
$mysqli->close();
return $data;
}
}
/* If specific person is requested with URL param id={user id} return result set for listing single user data */
function getPeople(){
$mysqli = connectToDB();
$str_ids = json_encode($_POST["ids"]);
$str_ids[0] = "(";
$str_ids[strlen($str_ids) - 1] = ")";
$query = "SELECT AlumID, AlumFName, AlumLName, AlumEmail, AlumPhone, AlumBio, JobTitle, GradYear, IsApproved, RecChurch FROM AlumnData WHERE AlumID IN " . $str_ids . " ORDER BY AlumFName, AlumLName";
$result = $mysqli->query($query);
$people = array();
while($row = $result->fetch_array(MYSQLI_ASSOC))
{
array_push($people, $row);
}
/* return result set */
$data = json_encode($people);
$mysqli->close();
return $data;
}
/* If request is to approve an entry, use the given id to set the var */
function approvePerson(){
if ($id = filter_input(INPUT_GET, 'id')) {
$mysqli = connectToDB();
$query = "UPDATE AlumnData SET IsApproved='1' WHERE AlumID = $id";
$result = $mysqli->query($query);
/* return result set */
$data = "Entry has been approved";
$mysqli->close();
return $data;
}
}
/* If request is to approve an entry, use the given id to set the var */
function unapprovePerson(){
if ($id = filter_input(INPUT_GET, 'id')) {
$mysqli = connectToDB();
$query = "UPDATE AlumnData SET IsApproved='0',Disabled='1' WHERE AlumID = $id";
$result = $mysqli->query($query);
/* return result set */
$data = "Entry has been disapproved";
$mysqli->close();
return $data;
}
}
function searchMap(){
$mysqli = connectToDB();
$points = array();
$matchedIDs = array();
//Name Match
$query = "SELECT AlumID FROM AlumnData WHERE IsApproved='1' AND (AlumFName LIKE '%".$_GET["query"]."%' OR AlumLName LIKE '%".$_GET["query"]."%')";
$result = $mysqli->query($query);
while($row = $result->fetch_array(MYSQLI_ASSOC))
{
$row["matchType"] = "Name";
array_push($points, $row);
array_push($matchedIDs, $row["AlumID"]);
}
$result->close();
//Job Title Match
$query = "SELECT AlumID, JobTitle FROM AlumnData WHERE IsApproved='1' AND JobTitle LIKE '%".$_GET["query"]."%'";
$result = $mysqli->query($query);
while($row = $result->fetch_array(MYSQLI_ASSOC))
{
if(!in_array($row["AlumID"],$matchedIDs)){
$row["matchType"] = "JobTitle";
array_push($points, $row);
}
}
$result->close();
//Grad Year Match
$query = "SELECT AlumID, GradYear FROM AlumnData WHERE IsApproved='1' AND GradYear LIKE '%".$_GET["query"]."%'";
$result = $mysqli->query($query);
while($row = $result->fetch_array(MYSQLI_ASSOC))
{
if(!in_array($row["AlumID"],$matchedIDs)){
$row["matchType"] = "GradYear";
array_push($points, $row);
}
}
$result->close();
/* return result set */
$data = json_encode($points);
return $data;
$mysqli->close();
}
?><file_sep><?php require_once(TEMPLATES_PATH . "/layout/nav.php"); ?>
<div class="container">
<h1>Log In To Your Account</h1>
<!-- <h2 class="serif italic subtext">Welcome! Please enter your email and password in order to access you account.</h2> -->
<div class="content center form">
<form action="/myAccount" method="POST" class="signup-form">
<ul>
<?php
if(isset($_SESSION["error"])){
?>
<li class="error">
<?php
echo($_SESSION["error"]);
unset($_SESSION["error"]);
?>
</li>
<?php }
if(isset($_SESSION["msg"])){ ?>
<li class="msg">
<?php echo($_SESSION["msg"]);
unset($_SESSION["msg"]); ?>
</li>
<?php }?>
<li>
<input type="text" name="email-real" id="email-real" value="<?php if(isset($_SESSION['email'])){echo($_SESSION['email']);} ?>" placeholder="Email Address" />
<input type="text" name="email" id="email" />
</li>
<li>
<input type="<PASSWORD>" name="pass" id="pass" placeholder="<PASSWORD>" />
</li>
<li>
<input type="submit" value="Submit" class="btn" />
</li>
<li class="serif italic small">Don't have an account? <a href="/createProfile">Create One!</a></li>
<li class="serif italic small">Did you forget your password?<br/><a href="/forgotPassword">We can help!</a></li>
</ul>
</form>
</div>
</div><file_sep>function adminApproveMapEntry(personID,btn){
$(btn).off('click');
$.ajax({
url: "/worldmap/approvePerson?id=" + personID,
success: function(data){
$(btn).addClass("unapprove");
$(btn).removeClass("approve");
$(btn).html("Unpublish");
$(btn).click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminUnapproveMapEntry(personID, btn);
});
}
});
}
function adminUnapproveMapEntry(personID,btn){
$(btn).off('click');
$.ajax({
url: "/worldmap/unapprovePerson?id=" + personID,
success: function(data){
$(btn).removeClass("unapprove");
$(btn).addClass("approve");
$(btn).html("Publish");
$(btn).click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminApproveMapEntry(personID, btn);
});
}
});
}
$(document).ready(function() {
$(".burger").click(function(e){
e.preventDefault();
$(".burger").toggleClass("close-overlay");
$(".nav").toggleClass("open");
});
$("#loginBtn").click(function(e){
e.preventDefault();
$(".login-container").animate({"width":"300px","height":"230px"},600);
$("#loginBtn").fadeOut(300,function(){
$(".login-form").fadeIn(300);
});
});
$("#profImgCtnr").change(function(){
$this = $(this);
$('#profImg').text($this.val());
//console.log(1);
});
$("#profImg").change(function(){
$(".update-img").html("File Uploaded!");
$(".update-img").off("click");
});
$(".update-img").click(function(e){
e.preventDefault();
$("#profImg").click();
//console.log(2);
}).show();
$(".closeBtn").click(function(e){
e.preventDefault();
$($(this).parent()).css("top","-100%");
});
$(".alumnus .approve").click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminApproveMapEntry(personID, btn);
});
$(".alumnus .unapprove").click(function(e){
var personID = $(this).attr("data-id");
var btn = $(this);
adminUnapproveMapEntry(personID, btn);
});
$(".account-status a").click(function(e){
e.preventDefault();
$(".account-status-overlay").removeClass("hide").addClass("open");
});
$(".status-info .close-overlay").click(function(e){
e.preventDefault();
$(".account-status-overlay").removeClass("open").addClass("hide");
})
$(".send-msg").click(function(e){
e.preventDefault();
var name = $("#name").val();
var email = $("#email-real").val();
var msg = $("#message").val();
var personID = $("#personID").val();
$("#name").val("");
$("#email-real").val("");
$("#message").val("");
$("#personID").val("");
var formData = "name=" + name + "&email-real=" + email + "&message=" + msg + "&personID=" + personID;
$(".contact-form").css("top","-100%");
$.ajax({
type: "POST",
url: "/worldmap/contact",
data: formData,
success: function(data){
//console.log(data);
}
});
//console.log(formData);
});
});<file_sep><?php require_once(TEMPLATES_PATH . "/layout/nav.php"); ?>
<div class="container">
<h1>Account Creation</h1>
<h2 class="serif italic subtext">Thanks for joining our community! Once your account is complete and approved, it’ll be available to view on our map</h2>
<div class="content center form">
<form action="/worldmap/createAccount" method="POST" class="signup-form">
<ul>
<?php
//session_start();
if(isset($_SESSION["error"])){
?>
<li class="error">
<?php
echo($_SESSION["error"]);
unset($_SESSION["error"]);
?>
</li>
<?php } ?>
<li>
<input type="text" name="email-real" id="email-real" value="<?php if(isset($_SESSION['email'])){echo($_SESSION['email']);} ?>" placeholder="Email Address" />
<input type="text" name="email" id="email" />
<p>Your email address will not be given out to others outside of Purdue Christian Campus House.</p>
</li>
<li>
<input type="<PASSWORD>" name="pass" id="pass" placeholder="<PASSWORD>" />
</li>
<li>
<input type="<PASSWORD>" name="passConf" id="passConf" placeholder="<PASSWORD>" />
</li>
<li>
<input type="checkbox" name="terms" id="terms" />
<label for="terms">I agree to the <a href="/termsOfService" target="blank">terms and conditions</a> that apply to using this website.</label>
</li>
<li>
<input type="submit" value="Submit" class="btn" />
</li>
</ul>
</form>
</div>
</div><file_sep><?php
//require_once(realpath(dirname(__FILE__) . "/../config.php"));
function attemptLogin(){
//Check to see if the correct login variables are present
if(!checkLoginVars()){
if(isset($_POST['email-real'])){
$_SESSION['email'] = $_POST['email-real'];
}
return false;
}
//Connect to the database & set email value
$mysqli = connectToDB();
$email = $_POST['email-real'];
//Run SQL query and check to see if any results were returned, if not then return false
$sql = "SELECT * FROM AlumnData WHERE AlumEmail = '".$email."'";
$result = $mysqli->query($sql);
if(!$result->num_rows){
$_SESSION['error'] = "Your email or password is incorrect, please try again";
$mysqli->close();
return false;
}
//Set user data variable and get temporary password if it exists
$userData = $result->fetch_array();
$sql = "SELECT TempPass FROM TempPass WHERE AlumID = '".$userData["AlumID"]."'";
$tempResult = $mysqli->query($sql);
$tempPass = $tempResult->fetch_array();
$tempPass = $tempPass["TempPass"];
//Check to see if user account is active and return false if not
if(!$userData["IsActive"]){
$_SESSION["error"] = "Your account has not been actived yet, please check your email for a link to activate your account.</br></br>If you haven't received an email, please check your spam folder and if it still hasn't shown, feel free to contact us!";
$_SESSION['email'] = $_POST['email-real'];
$mysqli->close();
return false;
}
//Test to see if user is using temp password and redirect to proper page if so
if($_POST["pass"] == $tempPass){
$_SESSION["accountToReset"] = $userData["AlumID"];
header("Location: /changePassword");
exit;
}
//Verify that password is correct, return false otherwise
if(!password_verify($_POST['pass'],$userData['AlumPass'])){
$_SESSION['error'] = "Your email or password is incorrect, please try again";
$_SESSION['email'] = $_POST['email-real'];
$mysqli->close();
return false;
}
//At this point, login is successful. Set the session variable and return true
$_SESSION['user'] = $email;
if($userData["IsAdmin"] == "1"){
$_SESSION["IsAdmin"] = true;
}else{
$_SESSION["IsAdmin"] = false;
}
$mysqli->close();
return true;
}
function checkLoginVars(){
//Check to see if fake email field is empty and that the actual email field and password field are filled out
if(empty($_POST['email']) && !empty($_POST['email-real']) && !empty($_POST['pass'])){
return true;
} else{
if(empty($_POST['email-real'])){
$_SESSION['error'] = "Please log in to view this page";
} else {
$_SESSION['error'] = "Not all fields were completed";
}
return false;
}
}
function logout(){
session_destroy();
header("Location: /");
exit;
}
function getUserData(){
//Connect to the database
$mysqli = connectToDB();
$userData;
$sql = "SELECT * FROM AlumnData WHERE AlumEmail = '".$_SESSION['user']."'";
$result = $mysqli->query($sql);
if($result->num_rows){
$userData = $result->fetch_array();
}
$mysqli->close();
return $userData;
}
function createAccount(){
$mail = createMailer();
checkAccountVars();
$email = $_POST['email-real'];
$pass = password_hash($_POST['pass'], PASSWORD_DEFAULT);
validateEmail($email);
if ($_POST['pass'] !== $_POST['passConf']) {
$_SESSION['error'] = "Passwords do not match";
createFailed();
}
else if(empty($_POST["terms"])){
$_SESSION["error"] = "Please agree to the terms and conditions";
createFailed();
}
else{
$mysqli = connectToDB();
$sql = "SELECT * FROM AlumnData WHERE AlumEmail = '".$email."'";
$result = $mysqli->query($sql);
if($result->num_rows){
$_SESSION['error'] = "There is already an account registered to this email";
createFailed();
}
else{
$sql = "INSERT INTO AlumnData (AlumFName,AlumLName,AlumEmail,AlumPass) VALUES ('$fname','$lname','$email','$pass')";
$result = $mysqli->query($sql);
$newID = $mysqli->insert_id;
if($result){
$approvalCode = bin2hex(openssl_random_pseudo_bytes(8));
$sql = "INSERT INTO AccountApprovalCode (AlumID,ApprovalCode) VALUES ('$newID','$approvalCode')";
$result = $mysqli->query($sql);
$mail->setFrom('<EMAIL>', 'Purdue Christian Campus House');
$mail->addReplyTo('<EMAIL>', 'PCCH IT');
$mail->addAddress($email, "");
$mail->Subject = 'Alumni World Map Account Creation';
$mail->msgHTML("Your account has been created!</br></br>Before you can use your account, please use the folloing link to activate your account.</br></br>Thanks so much for joining us and enjoy the experience!</br></br>http://worldmap.pcch.org/worldmap/activate?approvalCode=".$approvalCode);
$mail->AltBody = "Your account has been created and your approval code is: ".$approvalCode;
if (!$mail->send()) {
echo "Mailer Error: " . $mail->ErrorInfo;
} else {
echo "Message sent!";
}
}
else{
$_SESSION["error"] = "There was a problem creating your account, please try again later";
createFailed();
}
}
}
$_SESSION["msg"] = "Your account has been created! Please check your email for a message containing your activation link.";
header("Location: /login");
exit;
}
function checkAccountVars(){
if(empty($_POST['email']) && !empty($_POST['email-real']) && !empty($_POST['pass']) && !empty($_POST['passConf'])){
return true;
} else{
$_SESSION['error'] = "Not all fields were completed";
createFailed();
}
}
function validateEmail($email){
if (!filter_var($email, FILTER_VALIDATE_EMAIL)) {
$_SESSION['error'] = "Please input a valid email";
createFailed();
}
}
function createFailed(){
$_SESSION['email'] = $_POST['email-real'];
session_write_close();
header("Location: /createProfile");
exit;
}
function activate(){
$approvalCode;
if(!empty($_GET["approvalCode"])){
$approvalCode = $_GET["approvalCode"];
} else{
header("Location: /");
exit;
}
$mysqli = connectToDB();
$sql = "SELECT AlumID FROM AccountApprovalCode WHERE ApprovalCode = '$approvalCode'";
$result = $mysqli->query($sql);
$alumID = $result->fetch_array();
$alumID = $alumID["AlumID"];
$sql = "UPDATE AlumnData SET IsActive = '1' WHERE AlumID = '$alumID'";
$result = $mysqli->query($sql);
if($result){
$recipient = "<EMAIL>";
$mail = createMailer();
$mail->setFrom('<EMAIL>', 'Purdue Christian Campus House');
$mail->addReplyTo('<EMAIL>', 'PCCH IT');
$mail->addAddress($recipient, "");
$mail->Subject = 'World Map Account Created';
$mail->msgHTML("An account has been created and activated on the world map platform.");
$mail->AltBody = "An account has been created and activated on the world map platform.";
$_SESSION["msg"] = "Your account has been successfully activated! Please login to view your new account.";
if (!$mail->send()) {
//echo "Mailer Error: " . $mail->ErrorInfo;
} else {
//echo "Message sent!";
}
header("Location: /login");
exit;
}
}
function forgotPassword(){
if(empty($_POST['email']) && !empty($_POST['email-real'])){
$email = $_POST['email-real'];
$mysqli = connectToDB();
$sql = "SELECT AlumID FROM AlumnData WHERE AlumEmail = '".$email."'";
$result = $mysqli->query($sql);
if(!$result->num_rows){
$_SESSION['error'] = "We didn't find an account associated with that email address";
}
else{
$userData = $result->fetch_array();
$userID = $userData["AlumID"];
$pwd = bin2hex(openssl_random_pseudo_bytes(8));
$sql = "INSERT INTO TempPass (AlumID, TempPass) VALUES ('$userID','$pwd')";
$result = $mysqli->query($sql);
$mail = createMailer();
$mail->setFrom('<EMAIL>', 'Purdue Christian Campus House');
$mail->addReplyTo('<EMAIL>', 'PCCH IT');
$mail->addAddress($email, "");
$mail->Subject = 'Alumni World Map Password Reset';
$mail->msgHTML("Your temporary password is ".$pwd.".");
$mail->AltBody = "Your temporary password is ".$pwd.".";
if (!$mail->send()) {
echo "Mailer Error: " . $mail->ErrorInfo;
} else {
echo "Message sent!";
}
}
if(!empty($_SESSION['error'])){
$_SESSION['email'] = $email;
session_write_close();
header("Location: /forgotPassword");
exit;
} else{
session_write_close();
header("Location: /resetSuccess");
exit;
}
}
else{
$_SESSION['error'] = "Please enter a valid email address";
$_SESSION['email'] = $_POST['email-real'];
session_write_close();
header("Location: /forgotPassword");
exit;
}
}
function changePassword(){
$mysqli = connectToDB();
$pass = password_hash($_POST['pass'], PASSWORD_DEFAULT);
if(empty($_SESSION["accountToReset"])){
header("Location: /");
exit;
} else if(empty($_POST["pass"]) || empty($_POST["passConf"])){
$_SESSION["error"] = "Please fill out all fields!";
header("Location: /changePassword");
exit;
} else if ($_POST['pass'] !== $_POST['passConf']) {
$_SESSION['error'] = "Passwords do not match";
header("Location: /changePassword");
exit;
} else{
$sql = "UPDATE AlumnData SET AlumPass = <PASSWORD>' WHERE AlumID = '".$_SESSION["accountToReset"]."'";
$result = $mysqli->query($sql);
if($result){
$_SESSION["msg"] = "Your password has bee updated successfully! You may now use it to log into your account.";
} else{
$_SESSION["msg"] = "There was a problem updating your password, please try again a bit later.";
}
header("Location: /login");
exit;
}
}
function accountApproved($fname, $lname, $bio, $jobTitle, $gradYear, $recChurch, $email){
$recipient = "<EMAIL>";
$recipientTwo = "<EMAIL>";
$mail = createMailer();
$mail->setFrom('<EMAIL>', 'Purdue Christian Campus House');
$mail->addReplyTo('<EMAIL>', 'PCCH IT');
$mail->addAddress($recipient, "");
$mail->addAddress($recipientTwo, "");
$mail->Subject = 'World Map Account Created';
$mail->msgHTML("An account has been published on the world map platform.");
$mail->AltBody = "An account has been published on the world map platform.";
$message = "There has been an account published on the world map platform. Account details are as follows: <br/><br/>";
$message .= "Name: " . $fname . " " . $lname . "<br/>";
$message .= "Email: " . $email . "<br/>";
$message .= "Grad Year: " . $gradYear . "<br/>";
$message .= "Job Title: " . $jobTitle . "<br/>";
$message .= "Recommended Church: " . $recChurch . "<br/>";
$message .= "Bio: " . $bio . "<br/>";
$mail->msgHTML($message);
if (!$mail->send()) {
//echo "Mailer Error: " . $mail->ErrorInfo;
} else {
//echo "Message sent!";
}
}
function updateProfile(){
if(!isset($_SESSION['user'])){
$_SESSION['error'] = "Please login in order to view this page";
header("Location: /login");
exit;
}
$mysqli = connectToDB();
$sql = "SELECT * FROM AlumnData WHERE AlumEmail = '".$_SESSION['user']."'";
$result = $mysqli->query($sql);
if($result->num_rows){
$userData = $result->fetch_array();
}
$fname = $userData['AlumFName'];
$lname = $userData['AlumLName'];
//$email = $userData['AlumEmail'];
$bio = $userData['AlumBio'];
$jobTitle = $userData['JobTitle'];
$gradYear = $userData['GradYear'];
$profPic = $userData['AlumImg'];
$latLoc = $userData['GeoLocLat'];
$longLoc = $userData['GeoLocLong'];
$recChurch = $userData['RecChurch'];
$disabled = $userData['Disabled'];
$approved = $userData['IsApproved'];
$approveSql;
if(!empty($_POST['fname'])){
$fname = $_POST['fname'];
}
if(!empty($_POST['lname'])){
$lname = $_POST['lname'];
}
if(!empty($_POST['bio'])){
$bio = $_POST['bio'];
}
if(!empty($_POST['job'])){
$jobTitle = $_POST['job'];
}
if(!empty($_POST['grad'])){
$gradYear = $_POST['grad'];
}
if(!empty($_POST['latLoc'])){
$latLoc = $_POST['latLoc'];
}
if(!empty($_POST['longLoc'])){
$longLoc = $_POST['longLoc'];
}
if(!empty($_POST['recChurch'])){
$recChurch = $_POST['recChurch'];
}
if(!empty($_FILES['profImg']['tmp_name'])){
//$_SESSION['updateMsg'] .= "123";
$tmpName = $_FILES['profImg']['tmp_name'];
if($fp = fopen($tmpName, 'r')){
$data = fread($fp, filesize($tmpName));
$data = addslashes($data);
fclose($fp);
$profPic = $data;
}
$sql = "UPDATE AlumnData SET AlumFName = '$fname',AlumLName = '$lname',AlumBio = '$bio',JobTitle = '$jobTitle',AlumImg = '$profPic',GradYear = '$gradYear',GeoLocLat = '$latLoc',GeoLocLong = '$longLoc', RecChurch = '$recChurch' WHERE AlumEmail = '".$_SESSION['user']."'";
}
else{
$sql = "UPDATE AlumnData SET AlumFName = '$fname',AlumLName = '$lname',AlumBio = '$bio',JobTitle = '$jobTitle',GradYear = '$gradYear',GeoLocLat = '$latLoc',GeoLocLong = '$longLoc', RecChurch = '$recChurch' WHERE AlumEmail = '".$_SESSION['user']."'";
}
if(!empty($fname) && !empty($lname) && !empty($bio) && !empty($jobTitle) && !empty($gradYear) && !empty($latLoc) && !empty($longLoc) && !empty($recChurch) && !$disabled && !$approved){
$approveSql = "UPDATE AlumnData SET IsApproved = '1' WHERE AlumEmail = '".$_SESSION['user']."'";
$approval = $mysqli->query($approveSql);
accountApproved($fname, $lname, $bio, $jobTitle, $gradYear, $recChurch, $userData["AlumEmail"]);
}
$result = $mysqli->query($sql);
if($result)
$_SESSION['updateMsg'] = "Update was successful";
else
$_SESSION['updateMsg'] = "There was an error updating your profile";
header("Location: /myAccount");
exit;
}
function contact(){
if(!empty($_POST['email-real']) && !empty($_POST['name']) && !empty($_POST['message']) && !empty($_POST['personID'])){
$senderEmail = $_POST['email-real'];
$senderName = $_POST['name'];
$senderMsg = $_POST['message'];
$alumID = $_POST["personID"];
$mysqli = connectToDB();
$sql = "SELECT * FROM AlumnData WHERE AlumID = '".$alumID."'";
$result = $mysqli->query($sql);
if(!$result->num_rows){
$_SESSION['error'] = "This person doesn't exist!";
//header("Location: /worldMap.php");
//exit;
}
$userData = $result->fetch_array();
$alumEmail = $userData["AlumEmail"];
$alumName = $userData["AlumFName"];
$messageToAlum = "Hello ".$alumName."!<br/><br/>";
$messageToAlum .= "You've received a message through the Campus House World Map from ".$senderName."<br/>";
$messageToAlum .= "Please direct any responses to this message to the sender at ".$senderEmail."<br/><br/>";
$messageToAlum .= "Message: <br/>".$senderMsg;
$messageToSender = "Hello ".$senderName."!<br/><br/>";
$messageToSender .= "Thank you for using our contact system on the world map! We hope you enjoyed your experience and you should be receiving a response from the alumnus you contacted soon.";
$mailToAlum = createMailer();
$mailToAlum->setFrom('<EMAIL>', 'Purdue Christian Campus House');
$mailToAlum->addReplyTo('<EMAIL>', 'PCCH IT');
$mailToAlum->addAddress($alumEmail, "");
$mailToAlum->Subject = 'Message Received From World Map';
$mailToAlum->msgHTML($messageToAlum);
$mailToAlum->AltBody = $messageToAlum;
if (!$mailToAlum->send()) {
//echo "Mailer Error: " . $mail->ErrorInfo;
} else {
//echo "Message sent!";
}
$mailToSender = createMailer();
$mailToSender->setFrom('<EMAIL>', 'Purdue Christian Campus House');
$mailToSender->addReplyTo('<EMAIL>', 'PCCH IT');
$mailToSender->addAddress($senderEmail, "");
$mailToSender->Subject = 'Message Received From World Map';
$mailToSender->msgHTML($messageToSender);
$mailToSender->AltBody = $messageToSender;
if (!$mailToSender->send()) {
echo "Mailer Error: " . $mail->ErrorInfo;
} else {
echo "Message sent!";
}
}
//header("Location: /worldMap.php");
//exit;
}
// if(empty($_POST['email']) && !empty($_POST['email-real']) && !empty($_POST['pass'])){
// $email = $_POST['email-real'];
// if($mysqli->connect_error){
// $_SESSION['error'] = $mysqli->connect_error;
// }
// else{
// $sql = "SELECT * FROM AlumnData WHERE AlumEmail = '".$email."'";
// $result = $mysqli->query($sql);
// if(!$result->num_rows){
// $_SESSION['error'] = "Your email or password is incorrect, please try again";
// }
// else{
// $userData = $result->fetch_array();
// $sql = "SELECT TempPass FROM TempPass WHERE AlumID = '".$userData["AlumID"]."'";
// $tempResult = $mysqli->query($sql);
// $tempPass = $tempResult->fetch_array();
// //$tempPass = "<PASSWORD>";
// $tempPass = $tempPass["Temp<PASSWORD>"];
// if(!$userData["IsActive"]){
// $_SESSION["error"] = "Your account has not been actived yet, please check your email for a link to activate your account.</br></br>If you haven't received an email, please check your spam folder and if it still hasn't shown, feel free to contact us!";
// }
// else if(password_verify($_POST['pass'],$userData['AlumPass'])){
// echo("Login Successful");
// $_SESSION['user'] = $email;
// if($userData["IsAdmin"] == "1"){
// $_SESSION["IsAdmin"] = true;
// header("Location: /adminPanel.php");
// exit;
// }else{
// $_SESSION["IsAdmin"] = false;
// header("Location: /profileUpdate.php");
// exit;
// }
// }
// else if($_POST["pass"] == $tempPass){
// //$_SESSION["msg"] = "You've logged in with your temporary password";
// $_SESSION["accountToReset"] = $userData["AlumID"];
// header("Location: /newPass.php");
// exit;
// }
// else{
// $_SESSION['error'] = "Your email or password is incorrect, please try again";
// }
// }
// }
// if($userData["IsAdmin"] == "1"){
// header("Location: /adminPanel.php");
// exit;
// }else{
// header("Location: /profileUpdate.php");
// exit;
// }
// }
?><file_sep><?php require_once(TEMPLATES_PATH . "/layout/nav.php"); ?>
<div class="contact-form">
<form action="" method="POST" enctype="multipart/form-data">
<h1 class="headline"></h1>
<h2 class="subtext">Use the form below to send a brief message to the selected alumnus!</h2>
<div class="row grid">
<div class="col-12">
<input name="name" id="name" type="text" placeholder="<NAME>" />
</div>
</div>
<div class="row grid">
<div class="col-12">
<input name="email-real" id="email-real" type="text" placeholder="Your Email" />
</div>
</div>
<div class="row grid">
<div class="col-12">
<textarea name="message" id="message" placeholder="What would you like to say?"></textarea>
</div>
</div>
<input style="display:none;" type="text" id="personID" name="personID" />
<div style="margin: 10px 0; text-align: center;"><a href="#" class="btn dark send-msg" style="display:inline-block;">Send Message</a></div>
</form>
<a class="closeBtn" href="#"><img src="/assets/images/close-btn.svg" height="30" alt="Close" /></a>
</div>
<div class="theMap">
<div id="gmap" class="gmap">
<div id="map-canvas" class="map-canvas"></div>
</div>
<?php if(isset($_SESSION['user']) || isset($_SESSION["IsAdmin"])){ ?>
<div class="searchContainer" id="searchContainer">
<div class="searchBox" id="searchBox">
<input type="text" class="searchInput" id="searchInput" placeholder="Search for someone you know" autocomplete="off"></input>
<div class="searchResults" id="searchResults">
<div class="people"></div>
<div class="places"></div>
</div>
</div>
<button class="searchButton" id="searchButton" onclick="search_click()">Search</button>
</div>
<?php } ?>
</div>
<script type="text/javascript">
<?php
echo("var loggedIn = false;");
if(isset($_SESSION["user"]) && $_SESSION["user"] != ""){
echo("loggedIn = true;");
}
if(isset($_SESSION["IsAdmin"]) && $_SESSION["IsAdmin"]){
echo("google.maps.event.addDomListener(window,'load',initAdminMap);");
} else{
echo("google.maps.event.addDomListener(window,'load',initNormMap);");
}
?>
</script><file_sep><?php
function query_to_csv($db_conn, $query, $filename, $attachment = false, $headers = true) {
if($attachment) {
// send response headers to the browser
header( 'Content-Type: text/csv' );
header( 'Content-Disposition: attachment;filename='.$filename);
$fp = fopen('php://output', 'w');
} else {
$fp = fopen($filename, 'w');
}
$result = $db_conn->query($query);
if($headers) {
// output header row (if at least one row exists)
$row = $result->fetch_array(MYSQLI_ASSOC);
if($row) {
fputcsv($fp, array_keys($row));
// reset pointer back to beginning
$result->data_seek(0);
}
}
while($row = $result->fetch_array(MYSQLI_ASSOC)) {
fputcsv($fp, $row);
}
fclose($fp);
}
function csvQuery(){
// Using the function
$sql = "SELECT AlumFName, AlumLName, AlumEmail, AlumPhone, JobTitle, GradYear, RecChurch FROM AlumnData";
// $db_conn should be a valid db handle
$mysqli = connectToDB();
// output as an attachment
query_to_csv($mysqli, $sql, "WorldMapData.csv", true);
}
// output to file system
//query_to_csv($db_conn, $sql, "test.csv", false);
?><file_sep><!DOCTYPE html>
<html style="height:100%;">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>Home - PCCH Alumni World Map</title>
<script type="text/javascript" src="https://maps.googleapis.com/maps/api/js?libraries=places"></script>
<script src="js/vendor/modernizr-2.8.3.min.js"></script>
<script src="js/vendor/jquery-1.11.3.min.js" type="text/javascript"></script>
<script src="js/dist/main.js"></script>
<link href="css/main.css" type="text/css" rel="stylesheet" />
</head>
<?php global $templateType, $templateToLoad; ?>
<body <?php if($templateType != "home" && $templateType != "map" && $templateToLoad != "myAccount.php"){echo("class='hasFooter'");} ?>><file_sep><?php
require_once(realpath(dirname(__FILE__) . "/resources/config.php"));
require_once(LIBRARY_PATH . "/PHPMailer/autoloader.php");
require_once(LIBRARY_PATH . "/dbconn.php");
require_once(LIBRARY_PATH . "/templateFunctions.php");
require_once(LIBRARY_PATH . "/mapFunctions.php");
require_once(LIBRARY_PATH . "/accountFunctions.php");
require_once(LIBRARY_PATH . "/smtpSettings.php");
if(isset($_SESSION["IsAdmin"])){
if($_SESSION["IsAdmin"]){
require_once(LIBRARY_PATH . "/adminFunctions.php");
}
}
/*
Now you can handle all your php logic outside of the template
file which makes for very clean code!
*/
$templateToLoad = explode("/",$_SERVER['REQUEST_URI']);
$templateType = "normal";
if($templateToLoad[1] == "worldmap"){
$functionToCall = $templateToLoad[2];
if(strpos($functionToCall,"?")){
$functionToCall = explode("?", $functionToCall);
$functionToCall = $functionToCall[0];
}
$outcome = call_user_func($functionToCall);
if(!$outcome){
//header("Location: /");
exit;
}
echo($outcome);
exit;
} else if($templateToLoad[1] == "mapView"){
$templateToLoad = $templateToLoad[1].".php";
$templateType = "map";
} else if($templateToLoad[1] != ""){
$templateToLoad = $templateToLoad[1];
if(strpos($templateToLoad,"?")){
$templateToLoad = explode("?", $templateToLoad);
$templateToLoad = $templateToLoad[0];
}
$templateToLoad = $templateToLoad.".php";
} else{
$templateToLoad = "home.php";
$templateType = "home";
}
$setInIndexDotPhp = $templateToLoad;
// Must pass in variables (as an array) to use in template
$variables = array(
'setInIndexDotPhp' => $setInIndexDotPhp
);
renderLayoutWithContentFile($templateToLoad, $templateType, $variables);
?><file_sep><div class="navbar">
<div class="content">
<a href="http://pcch.org" target="blank" class="logo-mobile"><img src="/assets/images/pcch-logo.svg" height="35" alt="Purdue Christian Campus House" /></a>
<a href="#" class="burger">
<span class="burger-bun top"></span>
<span class="burger-patty"></span>
<span class="burger-bun btm"></span>
</a>
<ul class="nav">
<li class="logo-desktop"><a href="http://pcch.org" target="blank"><img src="/assets/images/pcch-logo.svg" height="35" alt="Purdue Christian Campus House" /></a></li>
<li><a href="/mapView">World Map</a></li>
<li><a href="/about">About This Project</a></li>
<?php if(!isset($_SESSION['user'])){ ?>
<li class="right"><a href="/login">Log In</a></li>
<?php }else if(isset($_SESSION['IsAdmin']) && $_SESSION["IsAdmin"]){ ?>
<li><a href="/adminView">Admin Panel</a></li>
<li class="right"><a href="/worldmap/logout">Sign Out</a></li>
<?php }else{ ?>
<li><a href="/myAccount">My Account</a></li>
<li class="right"><a href="/worldmap/logout">Sign Out</a></li>
<?php } ?>
</ul>
</div>
</div><file_sep>//SEARCH FUNCTIONS
var people_result_count = 0;
var place_result_count = 0;
var searchType;
var searching;
var ajaxRequest = null;
/*
Takes an index of node inside marker cache and adds that person's name and aproximate
real world city, contry location.
*/
function addSearchResult(markerCacheIndex, matchType, matchData)
{
if(loggedIn){
var markerPresent = false;
for(var i = 0; i < cacheMarkers.length; i++){
if(cacheMarkers[i].id === markerCacheIndex){
markerPresent = true;
markerCacheIndex = i;
break;
}
}
if(markerPresent){
var person = cacheMarkers[markerCacheIndex].person;
var url = "http://maps.googleapis.com/maps/api/geocode/json?latlng=" +
person['GeoLocLat'] + "," + person['GeoLocLong'] + "&sensor=false";
$.getJSON(url, function(data){
var location;
if (data['status'] != 'OK') location = "";
else{
location = " - " + data['results'][data['results'].length-2]['formatted_address'];
}
var resultData = "<div class='row' id='searchResultRow' onClick='serchResult_click(this)'" +
"index='" + markerCacheIndex + "'>";
if(matchType === "JobTitle"){
resultData += person['AlumFName'] + " " + person['AlumLName'] + " - <strong>" + matchData + "</strong>";
} else if(matchType === "GradYear"){
resultData += person['AlumFName'] + " " + person['AlumLName'] + " - <strong>Class of " + matchData + "</strong>";
} else{
resultData += "<strong>" + person['AlumFName'] + " " + person['AlumLName'] + "</strong>" + location;
}
resultData += "</div>";
$("#searchResults .people").append(resultData);
});
}
}
}
function clearSearch()
{
$('#searchResults .people').empty();
$('#searchResults .places').empty();
people_result_count = 0;
place_result_count = 0;
}
function predictionResult_click(prediction){
var request = {
placeId: $(prediction).attr("index")
};
place_service.getDetails(request, goToPrediction);
}
var goToPrediction = function(place, status){
if (status == google.maps.places.PlacesServiceStatus.OK) {
if(searchType == "account"){
placeMarker(place.geometry.location);
}
if(place.geometry.viewport){
map.fitBounds(place.geometry.viewport);
} else{
map.setCenter(place.geometry.location);
map.setZoom(13);
}
clearSearch();
$("#searchInput").val('');
}
};
var displaySuggestions = function(predictions, status) {
if (status != google.maps.places.PlacesServiceStatus.OK) {
return;
}
predictions.forEach(function(prediction){
place_result_count++;
if(place_result_count < 4){
$("#searchResults .places").append("<div class='row' id='searchResultRow' onClick='predictionResult_click(this)' index='" + prediction.place_id + "'>" + prediction.description + "</div>");
}
});
};
/*
Initialize search function, giving searchInput textbox on input event
that uses searchPerson(serach) method to look through cashed first & last names
to find the number of maches specified in maxSearchResults.
*/
function initSearch(){
searching = false;
//var searchBox = new google.maps.places.SearchBox(document.getElementById('location-input'));
var service = new google.maps.places.AutocompleteService();
place_service = new google.maps.places.PlacesService(map);
//service.getDetails(request, callback);
$('#searchInput').bind("input", function(event){
if(!searching){
if(ajaxRequest){
ajaxRequest.abort();
}
//searching = true;
clearSearch();
var search = $(this).val();
if (search === "")
return;
var url = "/worldmap/searchMap?query=" + search;
ajaxRequest = $.getJSON(url, function(data){
for(var i = 0; i < data.length; i++){
people_result_count++;
var matchData = "";
if(data[i].matchType === "JobTitle"){
matchData = data[i].JobTitle;
} else if (data[i].matchType === "GradYear"){
matchData = data[i].GradYear;
}
addSearchResult(data[i].AlumID, data[i].matchType, matchData);
if(people_result_count >= 3){
break;
}
}
});
service.getQueryPredictions({ input: search }, displaySuggestions);
}
// var indicesMatch = searchPerson(search);
// if (indicesMatch.length <= 0)
// {
// return;
// }
// for (i=0; i<indicesMatch.length; i++)
// {
// addSearchResult(indicesMatch[i]);
// }
});
}
/*
Main search function
@input name : contains sring with eather first or first + last name
*/
function searchPerson(name)
{
var parsed_name = name.split(" ");
var searchCashIndices = [];
if (parsed_name.length <= 1)
{
searchCashIndices = searchFirstName(parsed_name[0]);
if (searchCashIndices.length < 1)
{
searchCashIndices = searchLastName(parsed_name[0]);
}
}
else
{
searchCashIndices = searchFirstLastName(parsed_name[0], parsed_name[1]);
}
return searchCashIndices;
}
function searchFirstName(name)
{
var ids = [];
for (i=0; i<cacheMarkers.length; i++)
{
if (compare(cacheMarkers[i].person['AlumFName'], name))
{
ids.push(i);
}
if (ids.length >= maxSearchResults) return ids;
}
return ids;
}
function searchLastName(name)
{
var ids = [];
for (i=0; i<cacheMarkers.length; i++)
{
if (compare(cacheMarkers[i].person['AlumLName'], name))
{
ids.push(i);
}
if (ids.length >= maxSearchResults) return ids;
}
return ids;
}
function searchFirstLastName(firstName, lastName)
{
var firstMatches = searchFirstName(firstName);
var ids = [];
var id;
for (i=0; i<firstMatches.length; i++)
{
id = firstMatches[i];
if ( compare(cacheMarkers[id].person['AlumLName'], lastName) )
{
ids.push(id);
}
if (ids.length >= maxSearchResults) return ids;
}
return ids;
}
function middle(start, end)
{
return start + (end - start + 1)/2;
}
/*
Function compares two strings.
The first is from dataset. The second is user input.
It returns if the two strings are resonable matches.
*/
function compare(string_data, string_input)
{
return string_data.toLowerCase().substring(0,string_input.length) == string_input.toLowerCase();
}
/*
HELPER METHODS FOR HANDLING OFFSET MAP MARKERS
*/
function offsetCenter(latlng,offsetx,offsety) {
map.setCenter(getOffset(latlng,offsetx,offsety) );
}
function getOffset(latlng, offsetx, offsety)
{
// latlng is the apparent centre-point
// offsetx is the distance you want that point to move to the right, in pixels
// offsety is the distance you want that point to move upwards, in pixels
// offset can be negative
// offsetx and offsety are both optional
var scale = Math.pow(2, map.getZoom());
var nw = new google.maps.LatLng(
map.getBounds().getNorthEast().lat(),
map.getBounds().getSouthWest().lng()
);
var worldCoordinateCenter = map.getProjection().fromLatLngToPoint(latlng);
var pixelOffset = new google.maps.Point((offsetx/scale) || 0,(offsety/scale) ||0);
var worldCoordinateNewCenter = new google.maps.Point(
worldCoordinateCenter.x - pixelOffset.x,
worldCoordinateCenter.y + pixelOffset.y
);
return map.getProjection().fromPointToLatLng(worldCoordinateNewCenter);
}<file_sep><?php
if(!isset($_SESSION["user"])){
if(!attemptLogin()){
header("Location: /login");
exit;
}
}
require_once(TEMPLATES_PATH . "/layout/nav.php");
$userData = getUserData();
?>
<style type="text/css">
#map-canvas { height: 500px; width:100%; margin: 0; padding: 0;}
</style>
<?php require_once(LIBRARY_PATH . "/myAccountMap.php"); ?>
<div class="account-status-overlay">
<div class="status-info">
<h2>Why is my account inactive?</h2>
<p>First, check to see if all of your account information fields are filled out. We will not publish your map pin unless all fields are provided.</p>
<p>If your account is still inactive, that likely means that your account has been disabled by an admin. This is because of something which was deemed inappropriate or not aligned with the purpose of this website. If you feel there has been an error, feel free to contact our <a href="mailto:<EMAIL>">site administrator</a> or <a href="mailto:<EMAIL>"><NAME></a>.</p>
<span class="close-overlay">[X]</span>
</div>
</div>
<div class="container" style="padding-bottom:0;">
<h1>Welcome Back!</h1>
<h2 class="serif italic subtext">Below you’ll find your public profile.<br/>This is the information that will show up on your map icon.</h2>
<h2 class="serif italic subtext">NOTE: Your map pin will only display if all fields are filled out.</h2>
<div class="content white my-profile">
<?php if(isset($_SESSION["updateMsg"])){?><div class="row grid">
<div class="col-12 update-message"><h2><?php echo($_SESSION["updateMsg"]); unset($_SESSION["updateMsg"]); ?></h2></div>
</div><?php } ?>
<div class="row grid">
<div class="col-12 account-status">
<h2>Account Status: <?php if($userData["IsApproved"]){echo("Active");}else{echo("Inactive");} ?> <?php if(!$userData["IsApproved"]){ ?><a href="#">(?)</a><?php } ?></h2>
</div>
</div>
<form action="/worldmap/updateProfile" method="POST" enctype="multipart/form-data">
<div class="row grid">
<div class="col-6 right-align">
<label>Photo</label>
</div>
<div class="col-6">
<?php if(!empty($userData['AlumImg'])){ ?>
<div class="image-update">
<img src="<?php echo("data:image/jpeg;base64," . base64_encode( $userData['AlumImg'] )); ?>" width="200" />
<a href="#" class="update-img">Change It Up</a>
<div style="height:0; width:0; overflow:hidden;" class="profImgCtnr">
<input type="file" id="profImg" name="profImg" />
</div>
</div>
<?php }else{ ?>
<a href="#" class="btn dark update-img">Upload a Picture</a>
<div style="height:0; width:0; overflow:hidden;" class="profImgCtnr">
<input type="file" id="profImg" name="profImg" />
</div>
<?php } ?>
</div>
</div>
<div class="row grid">
<div class="col-6 right-align">
<label for="fname">First Name</label>
</div>
<div class="col-6">
<input name="fname" id="fname" type="text" placeholder="ex. John" value="<?php echo($userData['AlumFName']); ?>" />
</div>
</div>
<div class="row grid">
<div class="col-6 right-align">
<label for="lname">Last Name</label>
</div>
<div class="col-6">
<input name="lname" id="lname" type="text" placeholder="ex. Doe" value="<?php echo($userData['AlumLName']); ?>" />
</div>
</div>
<div class="row grid">
<div class="col-6 right-align">
<label for="job">Occupation</label>
</div>
<div class="col-6">
<input name="job" id="job" type="text" placeholder="ex. Web Developer" value="<?php echo($userData['JobTitle']); ?>" />
</div>
</div>
<div class="row grid">
<div class="col-6 right-align">
<label for="recChurch">Recommended Church</label>
</div>
<div class="col-6">
<input name="recChurch" id="recChurch" type="text" placeholder="Your Home Church" value="<?php echo($userData['RecChurch']); ?>" />
</div>
</div>
<div class="row grid">
<div class="col-6 right-align">
<label for="grad">Graduation Year</label>
</div>
<div class="col-6">
<input name="grad" id="grad" type="text" placeholder="ex. 2005" value="<?php echo($userData['GradYear']); ?>" />
</div>
</div>
<div class="row grid">
<div class="col-6 right-align">
<label for="bio">Bio</label>
</div>
<div class="col-6">
<textarea name="bio" id="bio" placeholder="Tell us something about yourself!"><?php echo($userData['AlumBio']); ?></textarea>
</div>
</div>
<div style="display:none;">
<input type="text" name="latLoc" id="latLoc" />
<input type="text" name="longLoc" id="longLoc" />
</div>
<div class="row grid">
<div class="col-12"><h2>Location</h2><p>Use the map below to set your location pin before submitting!</p></div>
</div>
<div><input type="submit" value="Submit" class="btn" /></div>
</form>
</div>
</div>
<div class="profile-map">
<div class="searchContainer" id="searchContainer">
<div class="searchBox" id="searchBox">
<input type="text" class="searchInput" id="searchInput" placeholder="Search for your location" autocomplete="off"></input>
<div class="searchResults" id="searchResults">
<div class="people"></div>
<div class="places"></div>
</div>
</div>
<button class="searchButton" id="searchButton" onclick="search_click()">Search</button>
</div>
<div id="map-canvas"></div>
</div><file_sep>
<script type="text/javascript">
var map;
var myMarker;
var searchType = "account";
function placeMarker(location){
console.log(1);
$("#latLoc").attr("value",location.lat());
$("#longLoc").attr("value",location.lng());
$("#latLoc").val(location.lat());
$("#longLoc").val(location.lng());
//console.log(location.lng());
if(myMarker == null){
console.log(3);
var image = {
url: '/assets/images/MapIndicator.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
myMarker = new google.maps.Marker({
position: location,
map: map,
icon: image
});
}
else{
console.log(4);
myMarker.position = location;
myMarker.setMap(map);
}
console.log(2);
//map.setCenter(location);
}
</script>
<?php
echo("<script type='text/javascript'>
var styles = [{'featureType': 'water','elementType': 'geometry','stylers': [{'color': '#193341'}]},{'featureType': 'landscape','elementType': 'geometry','stylers': [{'color': '#2c5a71'}]},{'featureType': 'road','elementType': 'geometry','stylers': [{'color': '#29768a'},{'lightness': -37}]},{'featureType': 'poi','elementType': 'geometry','stylers': [{'color': '#406d80'}]},{'featureType': 'transit','elementType': 'geometry','stylers': [{'color': '#406d80'}]},{'elementType': 'labels.text.stroke','stylers': [{'visibility': 'on'},{'color': '#3e606f'},{'weight': 2},{'gamma': 0.84}]},{'elementType': 'labels.text.fill','stylers': [{'color': '#ffffff'}]},{'featureType': 'administrative','elementType': 'geometry','stylers': [{'weight': 0.6},{'color': '#1a3541'}]},{'elementType': 'labels.icon','stylers': [{'visibility': 'off'}]},{'featureType': 'poi.park','elementType': 'geometry','stylers': [{'color': '#2c5a71'}]}];
function initialize() {");
if($userData["GeoLocLat"] !== null && !empty($userData["GeoLocLat"])){
echo("
var markLat = ".$userData['GeoLocLat'].";
var markLng = ".$userData['GeoLocLong'].";
var zoomLvl = 8");
}
else{
echo("
var markLat = 40;
var markLng = -90;
var zoomLvl = 4");
}
echo("
var mapOptions = {
center: { lat: markLat, lng: markLng},
zoom: zoomLvl,
mapTypeControlOptions:{mapTypeIds:[google.maps.MapTypeId.ROADMAP,'map_style']}
};
var styledMap = new google.maps.StyledMapType(styles,{name:'Styled Map'});
map = new google.maps.Map(document.getElementById('map-canvas'),mapOptions);
map.setOptions({ minZoom: 3, maxZoom: 15, mapTypeControl: false });
map.mapTypes.set('map_style',styledMap);
map.setMapTypeId('map_style');
google.maps.event.addListener(map, 'click', function(event) {
placeMarker(event.latLng);
});");
if($userData["GeoLocLat"] !== null && !empty($userData["GeoLocLat"])){
echo("
var image = {
url: '/assets/images/MapIndicator.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 32)
};
myMarker = new google.maps.Marker({
position: new google.maps.LatLng(markLat,markLng),
map: map,
icon: image
});");
}
echo("
initSearch();
}
google.maps.event.addDomListener(window, 'load', initialize);
</script>
");
?><file_sep><!-- Homepage content -->
<div class="home container table-display">
<div class="vertical-center">
<h1>Welcome to the Campus House Alumni Map</h1>
<h2 class="serif italic subtext">A platform for sharing stories, making connections in new cities, and furthering community around the globe</h2>
<a href="/mapView" class="btn">Explore</a>
</div>
</div>
<?php if(!isset($_SESSION['user'])){ ?>
<div class="login-container">
<a href="#" id="loginBtn">Login</a>
<form class="login-form" action="/myAccount" method="POST">
<input type="text" name="email-real" id="email-real" placeholder="Your Email"/>
<input type="text" name="email" id="email" placeholder="Your Email"/>
<input type="<PASSWORD>" name="pass" id="pass" placeholder="<PASSWORD>" />
<input type="submit" class="btn" value="Log In">
<p class="serif italic small">Don't have an account?<br/><a href="/createProfile">Create One!</a></p>
<p class="serif italic small">Did you forget your password?<br/><a href="/forgotPassword">We can help!</a></p>
</form>
</div>
<?php }else{ ?>
<div class="account-ctnr">
<a href="/myAccount" class="btn">My Account</a>
</div>
<?php } ?><file_sep><?php require_once(TEMPLATES_PATH . "/layout/nav.php"); ?>
<div class="container about-us">
<h1>What is the Alumni World Map?</h1>
<h2 class="serif italic subtext">We've spent a long time concepting and developing this idea for a new platform to connect. Ultimately we hope this continues to further community among current students and alumni. Read on to find out more.</h2>
<div class="content white">
<div class="row grid">
<div class="col-12">
<p>The purpose of the map is to provide a platform to connect alums with each other and with Campus House, collect stories of what God is continuing to do in their lives, compile relevant information for better communication, and to give our recent graduates a connection with alums in the place they are moving— potentially affording us a new wealth of relationships and stories.</p>
</div>
</div>
</div>
</div><file_sep><?php
if(!isset($_SESSION['user']) || !$_SESSION["IsAdmin"]){
$_SESSION['error'] = "Please login in order to view this page";
header("Location: login.php");
exit;
}
//require "assets/csvQuery.php";
require_once(TEMPLATES_PATH . "/layout/nav.php");
$pageNum = 1;
if(!empty($_GET["pageNum"])){
$pageNum = $_GET["pageNum"];
}
$startRow = ($pageNum - 1) * 10;
?>
<div class="container">
<div class="content white">
<div style="max-width: 850px; margin: 0 auto;">
<div class="controls">
<a href="/worldmap/csvQuery" class="btn csv dark">Export to CSV</a>
</div>
<?php
$mysqli = connectToDB();
$query = "SELECT AlumID FROM AlumnData WHERE IsAdmin = 0";
$result = $mysqli->query($query);
$numPages = ceil(($result->num_rows)/10);
$query = "SELECT * FROM AlumnData WHERE IsAdmin = 0 LIMIT $startRow, 10";
$result = $mysqli->query($query);
while($row = $result->fetch_array(MYSQLI_ASSOC)){ ?>
<div class="alumnus">
<p class="info"><?php
echo($row["AlumFName"]." ".$row["AlumLName"]);
if($row["GradYear"] || $row["JobTitle"]){
if(!$row["JobTitle"]){
echo(", Class of ".$row["GradYear"]);
} else if(!$row["GradYear"]){
echo(", ".$row["JobTitle"]);
} else{
echo(", Class of ".$row["GradYear"]." - ".$row["JobTitle"]);
}
}
?>
</p>
<?php if($row["IsApproved"] === "0"){ ?>
<p class="btn approve" data-id="<?php echo($row["AlumID"]); ?>">Publish</p>
<?php } else{ ?>
<p class="btn unapprove" data-id="<?php echo($row["AlumID"]); ?>">Unpublish</p>
<?php } ?>
</div>
<?php
}
?>
<div class="pages">
<?php
for($i = 0; $i < $numPages; $i++){
if(($i+1) == $pageNum){ ?>
<a href="#" class="curPage"><?php echo($i + 1); ?></a>
<?php } else{ ?>
<a href="/adminView?pageNum=<?php echo($i + 1); ?>"><?php echo($i + 1); ?></a>
<?php }
}
?>
</div>
</div>
</div>
</div><file_sep><?php
if(empty($_SESSION["accountToReset"])){
header("Location: /index.php");
exit;
}
require_once(TEMPLATES_PATH . "/layout/nav.php");
?>
<div class="container">
<h1>Set A New Password</h1>
<h2 class="serif italic subtext">Please enter and confirm a new password for your account.</h2>
<div class="content center form">
<form action="/worldmap/changePassword" method="POST" class="signup-form">
<ul>
<?php
//session_start();
if(isset($_SESSION["error"])){
?>
<li class="error">
<?php
echo($_SESSION["error"]);
unset($_SESSION["error"]);
?>
</li>
<?php } ?>
<li>
<input type="<PASSWORD>" name="pass" id="pass" placeholder="<PASSWORD>" />
</li>
<li>
<input type="<PASSWORD>" name="passConf" id="passConf" placeholder="Confirm Password" />
</li>
<li>
<input type="submit" value="Submit" class="btn" />
</li>
</ul>
</form>
</div>
</div><file_sep><?php require_once(TEMPLATES_PATH . "/layout/nav.php"); ?>
<div class="container">
<h1>Forgot your Password?</h1>
<h2 class="serif italic subtext">Enter your email below and we'll send you a link to get you back up and running!</h2>
<div class="content center form">
<form action="/worldmap/forgotPassword" method="POST" class="signup-form">
<ul>
<?php
//session_start();
if(isset($_SESSION["error"])){
?>
<li class="error">
<?php
echo($_SESSION["error"]);
unset($_SESSION["error"]);
?>
</li>
<?php } ?>
<li>
<input type="text" name="email-real" id="email-real" value="" placeholder="Email Address" />
<input type="text" name="email" id="email" />
</li>
<li>
<input type="submit" value="Submit" class="btn" />
</li>
</ul>
</form>
</div>
</div><file_sep><?php
function createMailer(){
$mail = new PHPMailer;
$mail->isSMTP();
$mail->Host = 'smtp.gmail.com';
$mail->Port = 587;
$mail->SMTPSecure = 'tls';
$mail->SMTPAuth = true;
$mail->Username = "<EMAIL>";
$mail->Password = "<PASSWORD>";
return $mail;
}
?><file_sep><?php
function connectToDB(){
global $config;
$mysqli = new mysqli($config["db"]["host"], $config["db"]["username"], $config["db"]["password"], $config["db"]["dbname"]);
if($mysqli->connect_error){
$_SESSION['error'] = "Well, this is embarassing. We seem to be having some issues, please try again later";
exit;
}
return $mysqli;
}
?><file_sep><?php require_once(TEMPLATES_PATH . "/layout/nav.php"); ?>
<div class="container about-us">
<h1>Well this is embarrassing...</h1>
<h2 class="serif italic subtext">We can't seem to find the page you're looking for. Please check the URL and try again later. If this problem persists, please contact our <a href="mailto:<EMAIL>" style="text-decoration: underline; font-weight: bold;">IT guy</a>, he usually has some ideas.</h2>
</div><file_sep>module.exports = function(grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
vars: {
scss_dir: './scss/',
css_dir: './css/',
scss_file: 'main.scss',
css_file: 'main.css',
css_min_file: 'main.min.css',
}
});
grunt.loadNpmTasks('grunt-contrib-sass');
grunt.config('sass', {
options: {
style: 'nested',
lineNumbers: true
},
dev: {
options: {
style: 'expanded',
debugInfo: false,
},
files: [{
src: ['<%= vars.scss_dir %><%= vars.scss_file %>'],
dest: '<%= vars.css_dir %><%= vars.css_file %>'
}],
},
prod: {
options: {
style: 'compressed',
debugInfo: false
},
files: [{
src: ['<%= vars.scss_dir %><%= vars.scss_file %>'],
dest: '<%= vars.css_dir %><%= vars.css_file %>'
}],
}
});
// grunt.loadNpmTasks('grunt-postcss');
// grunt.config('postcss', {
// options: {
// map: true,
// },
// dev: {
// processors: [
// require('autoprefixer-core')({browsers: 'last 1 version'}),
// ],
// files: [{
// src: ['<%= sass.dev.files.0.dest %>'],
// dest: '<%= vars.css_dir %><%= vars.css_file %>'
// }],
// },
// prod: {
// processors: [
// require('autoprefixer-core')({browsers: 'last 1 version'}),
// require('csswring')
// ],
// files: [{
// src: ['<%= sass.prod.files.0.dest %>'],
// dest: '<%= vars.css_dir %><%= vars.css_file %>'
// }],
// }
// });
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.config('uglify', {
options: {
banner: '/*! <%= pkg.name %> <%= grunt.template.today("dd-mm-yyyy") %> */\n',
compress: {
drop_console: true
},
mangle: {}
},
dev: {
src: 'js/dist/main.js',
dest: 'js/dist/main.min.js'
}
});
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.config('concat', {
dev: {
src: ['js/src/**/*.js'],
dest: 'js/dist/main.js'
}
});
// grunt.loadNpmTasks('grunt-cache-bust');
// grunt.config('cacheBust', {
// options: {
// encoding: 'utf8',
// algorithm: 'md5',
// length: 16,
// deleteOriginals: true
// },
// assets: {
// files: [{
// // src: ['index.html']
// }]
// }
// });
grunt.loadNpmTasks('grunt-autoprefixer');
grunt.config('autoprefixer', {
options: {
browsers: ['> 1%', 'last 2 versions', 'ff 17', 'opera 12.1' /* the defaults, and also: */, 'ie >= 8']
},
dev: {
files: [{
src: ['<%= sass.dev.files.0.dest %>'],
dest: '<%= vars.css_dir %><%= vars.css_file %>'
}],
},
prod: {
files: [{
src: ['<%= sass.prod.files.0.dest %>'],
dest: '<%= vars.css_dir %><%= vars.css_file %>'
}],
}
});
grunt.loadNpmTasks('grunt-contrib-cssmin');
grunt.config('cssmin', {
dev: {
files: [
{
src: ['<%= vars.css_dir %><%= vars.css_file %>'],
dest: '<%= vars.css_dir %><%= vars.css_min_file %>'
}
]
},
});
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.config('watch', {
options: {
livereload: true
},
gruntfile: {
files: ['Gruntfile.js']
},
scss: {
files: [
'<%= vars.scss_dir %>**/*.scss'
],
tasks: ['sass:dev', 'autoprefixer:dev']
},
javascript: {
files: [
'js/src/**/*.js'
],
tasks: ['concat:dev']
}
});
grunt.registerTask('dev', ['sass:dev', 'autoprefixer:dev', 'concat:dev', 'uglify:dev']);
grunt.registerTask('prod', ['sass:prod', 'autoprefixer:prod', 'concat:prod', 'uglify:prod', 'cachebust:prod']);
grunt.registerTask('default',['watch']);
};<file_sep><?php
echo("Attempting to send email...");
require_once("library/PHPMailer/autoloader.php");
$email = "<EMAIL>";
$mail = new PHPMailer;
$mail->isSMTP();
$mail->Host = 'smtp.gmail.com';
$mail->Port = 587;
$mail->SMTPSecure = 'tls';
$mail->SMTPAuth = true;
$mail->Username = "<EMAIL>";
$mail->Password = "<PASSWORD>";
$mail->setFrom('<EMAIL>', 'Purdue Christian Campus House');
$mail->addReplyTo('<EMAIL>', 'PCCH IT');
$mail->addAddress($email, "");
$mail->Subject = 'Alumni World Map Test Message';
$mail->msgHTML("This is a test message");
$mail->AltBody = "This is a test message";
if (!$mail->send()) {
echo "Mailer Error: " . $mail->ErrorInfo;
} else {
echo "Message sent!";
}
?><file_sep>function initMap(){
var styledMap = new google.maps.StyledMapType(styles,{name:"Styled Map"});
var mapOptions = {
center:{lat:40,lng:-90},
zoom:3,
mapTypeControl: false,
streetViewControl: false,
zoomControlOptions: {
position: google.maps.ControlPosition.RIGHT_CENTER
},
mapTypeControlOptions:{mapTypeIds:[google.maps.MapTypeId.ROADMAP,'map_style']}
};
map = new google.maps.Map(document.getElementById('map-canvas'),mapOptions);
map.setOptions({ minZoom: 3, maxZoom: 15 });
map.mapTypes.set('map_style',styledMap);
map.setMapTypeId('map_style');
//console.log(1);
$.getJSON("/worldmap/getPoints", function(data){
//cacheMarkers = data;
console.log(data);
setMarkers(map, data);
});
//console.log(2);
window.infoWindow = new google.maps.InfoWindow({
maxWidth:400
});
}
/*
Initialized Map. Load data points from database and store in cacheMarkers var.
*/
function initNormMap(){
admin = false;
initMap();
initSearch();
}
/*
Initialized Admin Map. Load data points from database and store in cacheMarkers var.
*/
function initAdminMap(){
admin = true;
initMap();
//initSearch();
}
/*
Given map and array of data, add markers to the map and initialize marker cluster
*/
function setMarkers(map, people)
{
//console.log("points being set...");
for (var i = 0; i < people.length; i++) {
var person = people[i];
if(person["IsApproved"] == "1" || admin){
var myLatLng = new google.maps.LatLng(person["GeoLocLat"], person["GeoLocLong"]);
//Create New Marker with myLatLng, map, person id, and icon vars
var image = {
url: '/assets/images/MapIndicator.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
var image2 = {
url: '/assets/images/non-approved-map.svg',
scaledSize: new google.maps.Size(36, 66),
origin: new google.maps.Point(0, 0),
anchor: new google.maps.Point(18, 66)
};
if(person["IsApproved"] === "0" && admin){
image = image2;
}
var marker = new google.maps.Marker({
position: myLatLng,
map: map,
person:person,
id: person["AlumID"],
icon: image
});
//Add marker click event that runs marker_click function
google.maps.event.addListener(marker, 'click', function(){
if(admin){
admin_marker_click(this);
} else{
marker_click(this);
}
});
//Put marker is list of markers
cacheMarkers.push(marker);
}
}
//Set MarkerClusterer to collect markers
var markerCluster = new MarkerClusterer(map, cacheMarkers, {styles: [{
textColor: "#FFFFFF",
fontFamily: "Bebas,helvetica,arial,sans-serif",
textSize: "18",
fontWeight: "normal",
height: 60,
width: 60,
url: "/assets/images/cluster-icon2.svg"
}],zoomOnClick:false, maxZoom:13, gridSize:100, renderCluster:cluster_click });
}
|
5ea7bf89f805e2066df929102c85f5b6819825ef
|
[
"JavaScript",
"PHP"
] | 26 |
JavaScript
|
SwiseWeb/world-map
|
d4f80842020d7f4ee2406b1185a00427e6a0d16e
|
e60ca7ae98548dd27ffdb46e90deebc397370aac
|
refs/heads/master
|
<file_sep>--- The Grid class.
-- Implementation of the `grid` class.
-- The `grid` is a implicit graph which represents the 2D
-- world map layout on which the `pathfinder` object will run.
-- During a search, the `pathfinder` object needs to save some critical values. These values are cached within each `node`
-- object, and the whole set of nodes are tight inside the `grid` object itself.
if (...) then
-- Dependencies
local _PATH = (...):gsub('%.grid$','')
-- Local references
local Utils = require ('libs.jumper.core.utils')
local Assert = require ('libs.jumper.core.assert')
local Node = require ('libs.jumper.core.node')
-- Local references
local pairs = pairs
local assert = assert
local next = next
local setmetatable = setmetatable
local floor = math.floor
local coroutine = coroutine
-- Offsets for straights moves
local straightOffsets = {
{x = 1, y = 0} --[[W]], {x = -1, y = 0}, --[[E]]
{x = 0, y = 1} --[[S]], {x = 0, y = -1}, --[[N]]
}
-- Offsets for diagonal moves
local diagonalOffsets = {
{x = -1, y = -1} --[[NW]], {x = 1, y = -1}, --[[NE]]
{x = -1, y = 1} --[[SW]], {x = 1, y = 1}, --[[SE]]
}
--- The `Grid` class.<br/>
-- This class is callable.
-- Therefore,_ <code>Grid(...)</code> _acts as a shortcut to_ <code>Grid:new(...)</code>.
-- @type Grid
local Grid = {}
Grid.__index = Grid
-- Specialized grids
local PreProcessGrid = setmetatable({},Grid)
local PostProcessGrid = setmetatable({},Grid)
PreProcessGrid.__index = PreProcessGrid
PostProcessGrid.__index = PostProcessGrid
PreProcessGrid.__call = function (self,x,y)
return self:getNodeAt(x,y)
end
PostProcessGrid.__call = function (self,x,y,create)
if create then return self:getNodeAt(x,y) end
return self._nodes[y] and self._nodes[y][x]
end
--- Inits a new `grid`
-- @class function
-- @tparam table|string map A collision map - (2D array) with consecutive indices (starting at 0 or 1)
-- or a `string` with line-break chars (<code>\n</code> or <code>\r</code>) as row delimiters.
-- @tparam[opt] bool cacheNodeAtRuntime When __true__, returns an empty `grid` instance, so that
-- later on, indexing a non-cached `node` will cause it to be created and cache within the `grid` on purpose (i.e, when needed).
-- This is a __memory-safe__ option, in case your dealing with some tight memory constraints.
-- Defaults to __false__ when omitted.
-- @treturn grid a new `grid` instance
-- @usage
-- -- A simple 3x3 grid
-- local myGrid = Grid:new({{0,0,0},{0,0,0},{0,0,0}})
--
-- -- A memory-safe 3x3 grid
-- myGrid = Grid('000\n000\n000', true)
function Grid:new(map, cacheNodeAtRuntime)
if type(map) == 'string' then
assert(Assert.isStrMap(map), 'Wrong argument #1. Not a valid string map')
map = Utils.strToMap(map)
end
assert(Assert.isMap(map),('Bad argument #1. Not a valid map'))
assert(Assert.isBool(cacheNodeAtRuntime) or Assert.isNil(cacheNodeAtRuntime),
('Bad argument #2. Expected \'boolean\', got %s.'):format(type(cacheNodeAtRuntime)))
if cacheNodeAtRuntime then
return PostProcessGrid:new(map,walkable)
end
return PreProcessGrid:new(map,walkable)
end
--- Checks if `node` at [x,y] is __walkable__.
-- Will check if `node` at location [x,y] both *exists* on the collision map and *is walkable*
-- @class function
-- @tparam int x the x-location of the node
-- @tparam int y the y-location of the node
-- @tparam[opt] string|int|func walkable the value for walkable locations in the collision map array (see @{Grid:new}).
-- Defaults to __false__ when omitted.
-- If this parameter is a function, it should be prototyped as __f(value)__ and return a `boolean`:
-- __true__ when value matches a __walkable__ `node`, __false__ otherwise. If this parameter is not given
-- while location [x,y] __is valid__, this actual function returns __true__.
-- @tparam[optchain] int clearance the amount of clearance needed. Defaults to 1 (normal clearance) when not given.
-- @treturn bool __true__ if `node` exists and is __walkable__, __false__ otherwise
-- @usage
-- -- Always true
-- print(myGrid:isWalkableAt(2,3))
--
-- -- True if node at [2,3] collision map value is 0
-- print(myGrid:isWalkableAt(2,3,0))
--
-- -- True if node at [2,3] collision map value is 0 and has a clearance higher or equal to 2
-- print(myGrid:isWalkableAt(2,3,0,2))
--
function Grid:isWalkableAt(x, y, walkable, clearance)
local nodeValue = self._map[y] and self._map[y][x]
if nodeValue then
if not walkable then return true end
else
return false
end
local hasEnoughClearance = not clearance and true or false
if not hasEnoughClearance then
if not self._isAnnotated[walkable] then return false end
local node = self:getNodeAt(x,y)
local nodeClearance = node:getClearance(walkable)
hasEnoughClearance = (nodeClearance >= clearance)
end
if self._eval then
return walkable(nodeValue) and hasEnoughClearance
end
return ((nodeValue == walkable) and hasEnoughClearance)
end
--- Returns the `grid` width.
-- @class function
-- @treturn int the `grid` width
-- @usage print(myGrid:getWidth())
function Grid:getWidth()
return self._width
end
--- Returns the `grid` height.
-- @class function
-- @treturn int the `grid` height
-- @usage print(myGrid:getHeight())
function Grid:getHeight()
return self._height
end
--- Returns the collision map.
-- @class function
-- @treturn map the collision map (see @{Grid:new})
-- @usage local map = myGrid:getMap()
function Grid:getMap()
return self._map
end
--- Returns the set of nodes.
-- @class function
-- @treturn {{node,...},...} an array of nodes
-- @usage local nodes = myGrid:getNodes()
function Grid:getNodes()
return self._nodes
end
--- Returns the `grid` bounds. Returned values corresponds to the upper-left
-- and lower-right coordinates (in tile units) of the actual `grid` instance.
-- @class function
-- @treturn int the upper-left corner x-coordinate
-- @treturn int the upper-left corner y-coordinate
-- @treturn int the lower-right corner x-coordinate
-- @treturn int the lower-right corner y-coordinate
-- @usage local left_x, left_y, right_x, right_y = myGrid:getBounds()
function Grid:getBounds()
return self._min_x, self._min_y,self._max_x, self._max_y
end
--- Returns neighbours. The returned value is an array of __walkable__ nodes neighbouring a given `node`.
-- @class function
-- @tparam node node a given `node`
-- @tparam[opt] string|int|func walkable the value for walkable locations in the collision map array (see @{Grid:new}).
-- Defaults to __false__ when omitted.
-- @tparam[optchain] bool allowDiagonal when __true__, allows adjacent nodes are included (8-neighbours).
-- Defaults to __false__ when omitted.
-- @tparam[optchain] bool tunnel When __true__, allows the `pathfinder` to tunnel through walls when heading diagonally.
-- @tparam[optchain] int clearance When given, will prune for the neighbours set all nodes having a clearance value lower than the passed-in value
-- Defaults to __false__ when omitted.
-- @treturn {node,...} an array of nodes neighbouring a given node
-- @usage
-- local aNode = myGrid:getNodeAt(5,6)
-- local neighbours = myGrid:getNeighbours(aNode, 0, true)
function Grid:getNeighbours(node, walkable, allowDiagonal, tunnel, clearance)
local neighbours = {}
for i = 1,#straightOffsets do
local n = self:getNodeAt(
node._x + straightOffsets[i].x,
node._y + straightOffsets[i].y
)
if n and self:isWalkableAt(n._x, n._y, walkable, clearance) then
neighbours[#neighbours+1] = n
end
end
if is_target and math.abs(node._x - self.target_x) <= 1 and math.abs(node._y - self.target_y) <= 1 then
allowDiagonal = true
end
if not allowDiagonal then return neighbours end
tunnel = not not tunnel
for i = 1,#diagonalOffsets do
local n = self:getNodeAt(
node._x + diagonalOffsets[i].x,
node._y + diagonalOffsets[i].y
)
if n and self:isWalkableAt(n._x, n._y, walkable, clearance) then
if tunnel then
neighbours[#neighbours+1] = n
else
local skipThisNode = false
local n1 = self:getNodeAt(node._x+diagonalOffsets[i].x, node._y)
local n2 = self:getNodeAt(node._x, node._y+diagonalOffsets[i].y)
if ((n1 and n2) and not self:isWalkableAt(n1._x, n1._y, walkable, clearance) and not self:isWalkableAt(n2._x, n2._y, walkable, clearance)) then
skipThisNode = true
end
if not skipThisNode then neighbours[#neighbours+1] = n end
end
end
end
return neighbours
end
--- Grid iterator. Iterates on every single node
-- in the `grid`. Passing __lx, ly, ex, ey__ arguments will iterate
-- only on nodes inside the bounding-rectangle delimited by those given coordinates.
-- @class function
-- @tparam[opt] int lx the leftmost x-coordinate of the rectangle. Default to the `grid` leftmost x-coordinate (see @{Grid:getBounds}).
-- @tparam[optchain] int ly the topmost y-coordinate of the rectangle. Default to the `grid` topmost y-coordinate (see @{Grid:getBounds}).
-- @tparam[optchain] int ex the rightmost x-coordinate of the rectangle. Default to the `grid` rightmost x-coordinate (see @{Grid:getBounds}).
-- @tparam[optchain] int ey the bottom-most y-coordinate of the rectangle. Default to the `grid` bottom-most y-coordinate (see @{Grid:getBounds}).
-- @treturn node a `node` on the collision map, upon each iteration step
-- @treturn int the iteration count
-- @usage
-- for node, count in myGrid:iter() do
-- print(node:getX(), node:getY(), count)
-- end
function Grid:iter(lx,ly,ex,ey)
local min_x = lx or self._min_x
local min_y = ly or self._min_y
local max_x = ex or self._max_x
local max_y = ey or self._max_y
local x, y
y = min_y
return function()
x = not x and min_x or x+1
if x > max_x then
x = min_x
y = y+1
end
if y > max_y then
y = nil
end
return self._nodes[y] and self._nodes[y][x] or self:getNodeAt(x,y)
end
end
--- Grid iterator. Iterates on each node along the outline (border) of a squared area
-- centered on the given node.
-- @tparam node node a given `node`
-- @tparam[opt] int radius the area radius (half-length). Defaults to __1__ when not given.
-- @treturn node a `node` at each iteration step
-- @usage
-- for node in myGrid:around(node, 2) do
-- ...
-- end
function Grid:around(node, radius)
local x, y = node._x, node._y
radius = radius or 1
local _around = Utils.around()
local _nodes = {}
repeat
local state, x, y = coroutine.resume(_around,x,y,radius)
local nodeAt = state and self:getNodeAt(x, y)
if nodeAt then _nodes[#_nodes+1] = nodeAt end
until (not state)
local _i = 0
return function()
_i = _i+1
return _nodes[_i]
end
end
--- Each transformation. Calls the given function on each `node` in the `grid`,
-- passing the `node` as the first argument to function __f__.
-- @class function
-- @tparam func f a function prototyped as __f(node,...)__
-- @tparam[opt] vararg ... args to be passed to function __f__
-- @treturn grid self (the calling `grid` itself, can be chained)
-- @usage
-- local function printNode(node)
-- print(node:getX(), node:getY())
-- end
-- myGrid:each(printNode)
function Grid:each(f,...)
for node in self:iter() do f(node,...) end
return self
end
--- Each (in range) transformation. Calls a function on each `node` in the range of a rectangle of cells,
-- passing the `node` as the first argument to function __f__.
-- @class function
-- @tparam int lx the leftmost x-coordinate coordinate of the rectangle
-- @tparam int ly the topmost y-coordinate of the rectangle
-- @tparam int ex the rightmost x-coordinate of the rectangle
-- @tparam int ey the bottom-most y-coordinate of the rectangle
-- @tparam func f a function prototyped as __f(node,...)__
-- @tparam[opt] vararg ... args to be passed to function __f__
-- @treturn grid self (the calling `grid` itself, can be chained)
-- @usage
-- local function printNode(node)
-- print(node:getX(), node:getY())
-- end
-- myGrid:eachRange(1,1,8,8,printNode)
function Grid:eachRange(lx,ly,ex,ey,f,...)
for node in self:iter(lx,ly,ex,ey) do f(node,...) end
return self
end
--- Map transformation.
-- Calls function __f(node,...)__ on each `node` in a given range, passing the `node` as the first arg to function __f__ and replaces
-- it with the returned value. Therefore, the function should return a `node`.
-- @class function
-- @tparam func f a function prototyped as __f(node,...)__
-- @tparam[opt] vararg ... args to be passed to function __f__
-- @treturn grid self (the calling `grid` itself, can be chained)
-- @usage
-- local function nothing(node)
-- return node
-- end
-- myGrid:imap(nothing)
function Grid:imap(f,...)
for node in self:iter() do
node = f(node,...)
end
return self
end
--- Map in range transformation.
-- Calls function __f(node,...)__ on each `node` in a rectangle range, passing the `node` as the first argument to the function and replaces
-- it with the returned value. Therefore, the function should return a `node`.
-- @class function
-- @tparam int lx the leftmost x-coordinate coordinate of the rectangle
-- @tparam int ly the topmost y-coordinate of the rectangle
-- @tparam int ex the rightmost x-coordinate of the rectangle
-- @tparam int ey the bottom-most y-coordinate of the rectangle
-- @tparam func f a function prototyped as __f(node,...)__
-- @tparam[opt] vararg ... args to be passed to function __f__
-- @treturn grid self (the calling `grid` itself, can be chained)
-- @usage
-- local function nothing(node)
-- return node
-- end
-- myGrid:imap(1,1,6,6,nothing)
function Grid:imapRange(lx,ly,ex,ey,f,...)
for node in self:iter(lx,ly,ex,ey) do
node = f(node,...)
end
return self
end
-- Specialized grids
-- Inits a preprocessed grid
function PreProcessGrid:new(map)
local newGrid = {}
newGrid._map = map
newGrid._nodes, newGrid._min_x, newGrid._max_x, newGrid._min_y, newGrid._max_y = Utils.arrayToNodes(newGrid._map)
newGrid._width = (newGrid._max_x-newGrid._min_x)+1
newGrid._height = (newGrid._max_y-newGrid._min_y)+1
newGrid._isAnnotated = {}
return setmetatable(newGrid,PreProcessGrid)
end
-- Inits a postprocessed grid
function PostProcessGrid:new(map)
local newGrid = {}
newGrid._map = map
newGrid._nodes = {}
newGrid._min_x, newGrid._max_x, newGrid._min_y, newGrid._max_y = Utils.getArrayBounds(newGrid._map)
newGrid._width = (newGrid._max_x-newGrid._min_x)+1
newGrid._height = (newGrid._max_y-newGrid._min_y)+1
newGrid._isAnnotated = {}
return setmetatable(newGrid,PostProcessGrid)
end
--- Returns the `node` at location [x,y].
-- @class function
-- @name Grid:getNodeAt
-- @tparam int x the x-coordinate coordinate
-- @tparam int y the y-coordinate coordinate
-- @treturn node a `node`
-- @usage local aNode = myGrid:getNodeAt(2,2)
-- Gets the node at location <x,y> on a preprocessed grid
function PreProcessGrid:getNodeAt(x,y)
return self._nodes[y] and self._nodes[y][x] or nil
end
-- Gets the node at location <x,y> on a postprocessed grid
function PostProcessGrid:getNodeAt(x,y)
if not x or not y then return end
if Utils.outOfRange(x,self._min_x,self._max_x) then return end
if Utils.outOfRange(y,self._min_y,self._max_y) then return end
if not self._nodes[y] then self._nodes[y] = {} end
if not self._nodes[y][x] then self._nodes[y][x] = Node:new(x,y) end
return self._nodes[y][x]
end
return setmetatable(Grid,{
__call = function(self,...)
return self:new(...)
end
})
end
<file_sep>-- Various utilities for Jumper top-level modules
if (...) then
-- Dependencies
local _PATH = (...):gsub('%.utils$','')
local Path = require ('libs.jumper.core.path')
local Node = require ('libs.jumper.core.node')
-- Local references
local pairs = pairs
local type = type
local t_insert = table.insert
local assert = assert
local coroutine = coroutine
-- Raw array items count
local function arraySize(t)
local count = 0
for k,v in pairs(t) do
count = count+1
end
return count
end
-- Parses a string map and builds an array map
local function stringMapToArray(str)
local map = {}
local w, h
for line in str:gmatch('[^\n\r]+') do
if line then
w = not w and #line or w
assert(#line == w, 'Error parsing map, rows must have the same size!')
h = (h or 0) + 1
map[h] = {}
for char in line:gmatch('.') do
map[h][#map[h]+1] = char
end
end
end
return map
end
-- Collects and returns the keys of a given array
local function getKeys(t)
local keys = {}
for k,v in pairs(t) do keys[#keys+1] = k end
return keys
end
-- Calculates the bounds of a 2d array
local function getArrayBounds(map)
local min_x, max_x
local min_y, max_y
for y in pairs(map) do
min_y = not min_y and y or (y<min_y and y or min_y)
max_y = not max_y and y or (y>max_y and y or max_y)
for x in pairs(map[y]) do
min_x = not min_x and x or (x<min_x and x or min_x)
max_x = not max_x and x or (x>max_x and x or max_x)
end
end
return min_x,max_x,min_y,max_y
end
-- Converts an array to a set of nodes
local function arrayToNodes(map)
local min_x, max_x
local min_y, max_y
local nodes = {}
for y in pairs(map) do
min_y = not min_y and y or (y<min_y and y or min_y)
max_y = not max_y and y or (y>max_y and y or max_y)
nodes[y] = {}
for x in pairs(map[y]) do
min_x = not min_x and x or (x<min_x and x or min_x)
max_x = not max_x and x or (x>max_x and x or max_x)
nodes[y][x] = Node:new(x,y)
end
end
return nodes,
(min_x or 0), (max_x or 0),
(min_y or 0), (max_y or 0)
end
-- Iterator, wrapped within a coroutine
-- Iterates around a given position following the outline of a square
local function around()
local iterf = function(x0, y0, s)
local x, y = x0-s, y0-s
coroutine.yield(x, y)
repeat
x = x + 1
coroutine.yield(x,y)
until x == x0+s
repeat
y = y + 1
coroutine.yield(x,y)
until y == y0 + s
repeat
x = x - 1
coroutine.yield(x, y)
until x == x0-s
repeat
y = y - 1
coroutine.yield(x,y)
until y == y0-s+1
end
return coroutine.create(iterf)
end
-- Extract a path from a given start/end position
local function traceBackPath(finder, node, startNode)
local path = Path:new()
path._grid = finder._grid
while true do
if node._parent then
t_insert(path._nodes,1,node)
node = node._parent
else
t_insert(path._nodes,1,startNode)
return path
end
end
end
-- Lookup for value in a table
local indexOf = function(t,v)
for i = 1,#t do
if t[i] == v then return i end
end
return nil
end
-- Is i out of range
local function outOfRange(i,low,up)
return (i< low or i > up)
end
return {
arraySize = arraySize,
getKeys = getKeys,
indexOf = indexOf,
outOfRange = outOfRange,
getArrayBounds = getArrayBounds,
arrayToNodes = arrayToNodes,
strToMap = stringMapToArray,
around = around,
drAround = drAround,
traceBackPath = traceBackPath
}
end
<file_sep>local _M = {}
function _M.clamp(value, min, max)
if value < min then
return min
elseif value > max then
return max
else
return value
end
end
function _M.round(value)
return math.floor(value + 0.5)
end
function _M.copy(t)
local c = {}
for k, v in pairs(t) do
c[k] = v
end
return c
end
function _M.deep_copy(t, out)
local c = out or {}
for k, v in pairs(t) do
if type(v) == 'table' then
c[k] = _M.deep_copy(v)
else
c[k] = v
end
end
return c
end
function _M.capitalize(s)
return s:sub(1, 1):upper() .. s:sub(2):gsub('_', ' ')
end
return _M<file_sep>local hashed = require('libs.hashed')
local _M = {}
local mouse_move_listeners = {}
local mouse_click_listeners = {}
local mouse_wheel_listeners = {}
local focused = {}
function _M.add_mouse_move_listener(listener)
table.insert(mouse_move_listeners, listener)
end
function _M.remove_mouse_move_listener(listener)
for i = #mouse_move_listeners, 1, -1 do
if mouse_move_listeners[i] == listener then
table.remove(mouse_move_listeners, i)
break
end
end
end
function _M.add_mouse_click_listener(listener)
table.insert(mouse_click_listeners, listener)
end
function _M.remove_mouse_click_listener(listener)
for i = #mouse_click_listeners, 1, -1 do
if mouse_click_listeners[i] == listener then
table.remove(mouse_click_listeners, i)
break
end
end
end
function _M.add_mouse_wheel_listener(listener)
table.insert(mouse_wheel_listeners, listener)
end
function _M.remove_mouse_wheel_listener(listener)
for i = #mouse_wheel_listeners, 1, -1 do
if mouse_wheel_listeners[i] == listener then
table.remove(mouse_wheel_listeners, i)
break
end
end
end
local function on_mouse_move(action)
if focused.mouse_move then
local l = focused.mouse_move
if type(l) == 'table' and l.on_mouse_move then
if l:on_mouse_move(action) then
return
end
else
if l(action) then
return
end
end
end
for i = 1, #mouse_move_listeners do
local l = mouse_move_listeners[i]
if type(l) == 'function' then
if l(action) then
break
end
elseif l.on_mouse_move then
if l:on_mouse_move(action) then
break
end
end
end
end
local function on_mouse_click(action)
if focused.mouse_click then
local l = focused.mouse_click
if type(l) == 'function' then
if l(action) then
return
end
elseif l.on_mouse_click then
if l:on_mouse_click(action) then
return
end
end
end
for i = 1, #mouse_click_listeners do
local l = mouse_click_listeners[i]
if type(l) == 'function' then
if l(action) then
break
end
elseif l.on_mouse_click then
if l:on_mouse_click(action) then
break
end
end
end
end
local function on_mouse_wheel(action)
if focused.mouse_wheel then
local l = focused.mouse_wheel
if type(l) == 'function' then
if l(action) then
return
end
elseif l.on_mouse_wheel then
if l:on_mouse_wheel(action) then
return
end
end
end
for i = 1, #mouse_wheel_listeners do
local l = mouse_wheel_listeners[i]
if type(l) == 'function' then
if l(action) then
break
end
elseif l.on_mouse_wheel then
if l:on_mouse_wheel(action) then
break
end
end
end
end
function _M.on_input(action_id, action)
if not action_id then
on_mouse_move(action)
elseif action_id == hashed.mouse_click or action_id == hashed.mouse_right_click then
action.button = action_id == hashed.mouse_click and hashed.left or hashed.right
on_mouse_click(action)
elseif action_id == hashed.mouse_wheel_up or action_id == hashed.mouse_wheel_down then
if action_id == hashed.mouse_wheel_down then
action.value = -action.value
end
on_mouse_wheel(action)
end
end
return _M<file_sep>local camera = require('libs.camera')
local flux = require('libs.flux')
local focus = require('libs.focus')
local hashed = require('libs.hashed')
local ray_intersection = require('libs.ray_intersection')
local utils = require('libs.utils')
local g = {}
function g:on_mouse_click(action)
if action.button == hashed.left then
local margin = 256 * 1280 / camera.width
if action.x <= margin then
return
end
if action.pressed then
if self.tween then
self.tween:stop()
self.tween = nil
end
local x, z = self:find_selection_xz(action)
if x then
local object = self:find_selected_object(x, z)
if object and not self.player.building then
self.selection_rects.selected.x, self.selection_rects.selected.z = x, z
self.selection_rects.selected.is_visible = true
self.selection_rects.selected.object = object
msg.post('/ui', hashed.selection)
msg.post('/audio#click', 'play_sound')
if object and object.is_unit and not object.is_alien then
msg.post('/ui', hashed.info, {info = 'Right click to move or attack.'})
elseif object and object.name == 'base' then
self.last_selected_base = object
end
else
local deselect = true
if self.player.building then
if not object or self.player.building == 'plate' then
deselect = self:spawn_building(x, z)
else
deselect = false
end
end
if deselect then
local base = self:find_base()
if base then
self.selection_rects.selected.x, self.selection_rects.selected.z = base.x, base.z
self.selection_rects.selected.object = base
else
self.selection_rects.selected.is_visible = false
self.selection_rects.selected.object = nil
end
msg.post('/ui', hashed.selection)
elseif self.player.building ~= 'plate' then
msg.post('/ui', hashed.info, {info = 'Place the building on a plate.'})
end
end
else
focus.set_touch(self)
self.is_focused = true
self.start_x, self.start_y = action.screen_x, action.screen_y
self.start_alpha = camera.alpha
end
elseif self.is_focused then
if action.released then
focus.release_touch(self)
self.is_focused = false
local snap_angle = math.pi / 6
if math.abs(camera.alpha / snap_angle) > 0.001 then
local alpha = camera.alpha + action.screen_dx / 30
self.tween = flux.to(camera, 0.5, {alpha = utils.round(alpha / snap_angle) * snap_angle}):ease('quadout')
end
else
camera.alpha = self.start_alpha + math.pi / 2 * ((action.screen_x - self.start_x) / camera.width)
end
end
else
if action.pressed then
local x, z = self:find_selection_xz(action)
if x then
local object = self:find_selected_object(x, z)
if self.selection_rects.selected.object and self.selection_rects.selected.object.is_unit and not self.selection_rects.selected.object.is_alien then
deselect = false
self:move_unit(self.selection_rects.selected.object, x, z)
self.selection_rects.selected.object.prefered_target = object
msg.post('/audio#click', 'play_sound')
end
else
focus.set_touch(self)
self.is_focused = true
self.mouse_right_start_x, self.mouse_right_start_y = action.x, action.y
self.camera_pan_start = camera.pan
end
elseif self.is_focused then
if action.released then
focus.release_touch(self)
self.is_focused = false
else
local px, pz = self.mouse_right_start_x - action.x, self.mouse_right_start_y - action.y
camera.pan_by(self.camera_pan_start, px, pz)
end
end
end
end
function g:on_mouse_move(action)
local x, z = self:find_selection_xz(action)
if x then
self.selection_rects.cursor.x, self.selection_rects.cursor.z = x, z
self.selection_rects.cursor.is_visible = true
--msg.post('/ui', hashed.info, {info = 'x = ' .. x .. ' z = ' .. z})
else
self.selection_rects.cursor.is_visible = false
end
end
function g:on_mouse_wheel(action)
camera.zoom_step(action.value)
end
function g:find_selection_xz(action)
local origin, direction = camera.ray(action.screen_x, action.screen_y)
local vmin, vmax = vmath.vector3(-4, 3, -4), vmath.vector3(4, 4, 4)
local selected_id, min_distance
for i = 1, #self.tiles do
local id = self.tiles[i]
local position = go.get_world_position(id)
local rotation = go.get_world_rotation(id)
local scale = go.get_scale_uniform(id)
local world = vmath.matrix4()
world.m03 = position.x
world.m13 = position.y
world.m23 = position.z
local is_intersection, distance = ray_intersection.check(origin, direction, position, world, vmin * scale, vmax * scale)
if is_intersection and (not min_distance or min_distance > distance) then
selected_id, min_distance = self.tiles[i], distance
end
end
local x, z
if selected_id then
for i = 1, #self.map do
if self.map[i].go == selected_id then
x, z = self.map[i].x, self.map[i].z
break
end
end
end
return x, z
end
function g:find_selected_object(x, z)
for i = 1, #self.units do
local u = self.units[i]
if u.x == x and u.z == z then
return u
end
end
for i = 1, #self.buildings do
local b = self.buildings[i]
if b.x == x and b.z == z then
return b
end
end
end
return g<file_sep>--[[
check(glm::vec3 ray_origin, glm::vec3 ray_direction, glm::mat4 model, glm::vec3 vmin, glm::vec3 vmax, GLfloat &intersection_distance) {
GLfloat t_min = 0.0f;
GLfloat t_max = 100000.0f;
const glm::vec3 delta = glm::vec3(model[3].x, model[3].y, model[3].z) - ray_origin;
for (uint8_t i = 0; i < 3; ++i) {
// Test intersection with the 2 planes perpendicular to the OBB's axis (in order X, Y, Z).
const glm::vec3 axis = glm::vec3(model[i].x, model[i].y, model[i].z);
const GLfloat e = glm::dot(axis, delta);
const GLfloat f = glm::dot(ray_direction, axis);
if (fabs(f) > 0.001f) { // Standard case.
GLfloat t1 = (e + vmin[i]) / f; // Intersection with the "left" plane.
GLfloat t2 = (e + vmax[i]) / f; // Intersection with the "right" plane.
// t1 and t2 now contain distances betwen ray origin and ray-plane intersections.
// We want t1 to represent the nearest intersection,
// so if it's not the case, invert t1 and t2.
if (t1 > t2) {
GLfloat w = t1;
t1 = t2;
t2 = w;
}
// t_max is the nearest "far" intersection (amongst the X,Y and Z planes pairs).
if (t2 < t_max ) t_max = t2;
// t_min is the farthest "near" intersection (amongst the X,Y and Z planes pairs).
if (t1 > t_min) t_min = t1;
// If "far" is closer than "near", then there is NO intersection.
if (t_max < t_min ) return false;
} else if (-e + vmin.x > 0.0f || -e + vmax.x < 0.0f) {
// Rare case : the ray is almost parallel to the planes, so they don't have any "intersection".
return false;
}
}
intersection_distance = t_min;
return true;
}
]]
local _M = {}
function _M.check(ray_origin, ray_direction, position, model, vmin, vmax)
local t_min, t_max = 0, 100000
local delta = position - ray_origin
local xyz = {'x', 'y', 'z'}
for i = 1, 3 do
-- Test intersection with the 2 planes perpendicular to the OBB's axis (in order X, Y, Z).
local axis = vmath.vector3(model['m' .. (i - 1) .. '0'], model['m' .. (i - 1) .. '1'], model['m' .. (i - 1) .. '2'])
local e = vmath.dot(axis, delta)
local f = vmath.dot(ray_direction, axis)
if math.abs(f) > 0.001 then -- Standard case.
local t1 = (e + vmin[xyz[i]]) / f -- Intersection with the "left" plane.
local t2 = (e + vmax[xyz[i]]) / f -- Intersection with the "right" plane.
-- t1 and t2 now contain distances betwen ray origin and ray-plane intersections.
-- We want t1 to represent the nearest intersection,
-- so if it's not the case, invert t1 and t2.
if t1 > t2 then
t1, t2 = t2, t1
end
-- t_max is the nearest "far" intersection (amongst the X,Y and Z planes pairs).
if t2 < t_max then t_max = t2 end
-- t_min is the farthest "near" intersection (amongst the X,Y and Z planes pairs).
if t1 > t_min then t_min = t1 end
-- If "far" is closer than "near", then there is NO intersection.
if t_max < t_min then return false end
elseif vmin.x - e > 0 or vmax.x - e < 0 then
-- Rare case : the ray is almost parallel to the planes, so they don't have any "intersection".
return false
end
end
-- Return true if there is intersection and the distance
return true, t_min
end
return _M<file_sep>-- Jump Point search algorithm
if (...) then
-- Dependancies
local _PATH = (...):match('(.+)%.search.jps$')
local Heuristics = require ('libs.jumper.core.heuristics')
local Heap = require ('libs.jumper.core.bheap')
-- Internalization
local max, abs = math.max, math.abs
-- Local helpers, these routines will stay private
-- As they are internally used by the public interface
-- Resets properties of nodes expanded during a search
-- This is a lot faster than resetting all nodes
-- between consecutive pathfinding requests
--[[
Looks for the neighbours of a given node.
Returns its natural neighbours plus forced neighbours when the given
node has no parent (generally occurs with the starting node).
Otherwise, based on the direction of move from the parent, returns
neighbours while pruning directions which will lead to symmetric paths.
In case diagonal moves are forbidden, when the given node has no
parent, we return straight neighbours (up, down, left and right).
Otherwise, we add left and right node (perpendicular to the direction
of move) in the neighbours list.
--]]
local function findNeighbours(finder, node, clearance)
if node._parent then
local neighbours = {}
local x,y = node._x, node._y
-- Node have a parent, we will prune some neighbours
-- Gets the direction of move
local dx = (x-node._parent._x)/max(abs(x-node._parent._x),1)
local dy = (y-node._parent._y)/max(abs(y-node._parent._y),1)
-- Diagonal move case
if dx~=0 and dy~=0 then
local walkY, walkX
-- Natural neighbours
if finder._grid:isWalkableAt(x,y+dy,finder._walkable, clearance) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x,y+dy)
walkY = true
end
if finder._grid:isWalkableAt(x+dx,y,finder._walkable, clearance) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+dx,y)
walkX = true
end
if walkX or walkY then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+dx,y+dy)
end
-- Forced neighbours
if (not finder._grid:isWalkableAt(x-dx,y,finder._walkable, clearance)) and walkY then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x-dx,y+dy)
end
if (not finder._grid:isWalkableAt(x,y-dy,finder._walkable, clearance)) and walkX then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+dx,y-dy)
end
else
-- Move along Y-axis case
if dx==0 then
local walkY
if finder._grid:isWalkableAt(x,y+dy,finder._walkable, clearance) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x,y+dy)
-- Forced neighbours are left and right ahead along Y
if (not finder._grid:isWalkableAt(x+1,y,finder._walkable, clearance)) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+1,y+dy)
end
if (not finder._grid:isWalkableAt(x-1,y,finder._walkable, clearance)) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x-1,y+dy)
end
end
-- In case diagonal moves are forbidden : Needs to be optimized
if not finder._allowDiagonal then
if finder._grid:isWalkableAt(x+1,y,finder._walkable, clearance) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+1,y)
end
if finder._grid:isWalkableAt(x-1,y,finder._walkable, clearance)
then neighbours[#neighbours+1] = finder._grid:getNodeAt(x-1,y)
end
end
else
-- Move along X-axis case
if finder._grid:isWalkableAt(x+dx,y,finder._walkable, clearance) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+dx,y)
-- Forced neighbours are up and down ahead along X
if (not finder._grid:isWalkableAt(x,y+1,finder._walkable, clearance)) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+dx,y+1)
end
if (not finder._grid:isWalkableAt(x,y-1,finder._walkable, clearance)) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x+dx,y-1)
end
end
-- : In case diagonal moves are forbidden
if not finder._allowDiagonal then
if finder._grid:isWalkableAt(x,y+1,finder._walkable, clearance) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x,y+1)
end
if finder._grid:isWalkableAt(x,y-1,finder._walkable, clearance) then
neighbours[#neighbours+1] = finder._grid:getNodeAt(x,y-1)
end
end
end
end
return neighbours
end
-- Node do not have parent, we return all neighbouring nodes
return finder._grid:getNeighbours(node, finder._walkable, finder._allowDiagonal, finder._tunnel, clearance)
end
--[[
Searches for a jump point (or a turning point) in a specific direction.
This is a generic translation of the algorithm 2 in the paper:
http://users.cecs.anu.edu.au/~dharabor/data/papers/harabor-grastien-aaai11.pdf
The current expanded node is a jump point if near a forced node
In case diagonal moves are forbidden, when lateral nodes (perpendicular to
the direction of moves are walkable, we force them to be turning points in other
to perform a straight move.
--]]
local function jump(finder, node, parent, endNode, clearance)
if not node then return end
local x,y = node._x, node._y
local dx, dy = x - parent._x,y - parent._y
-- If the node to be examined is unwalkable, return nil
if not finder._grid:isWalkableAt(x,y,finder._walkable, clearance) then return end
-- If the node to be examined is the endNode, return this node
if node == endNode then return node end
-- Diagonal search case
if dx~=0 and dy~=0 then
-- Current node is a jump point if one of his leftside/rightside neighbours ahead is forced
if (finder._grid:isWalkableAt(x-dx,y+dy,finder._walkable, clearance) and (not finder._grid:isWalkableAt(x-dx,y,finder._walkable, clearance))) or
(finder._grid:isWalkableAt(x+dx,y-dy,finder._walkable, clearance) and (not finder._grid:isWalkableAt(x,y-dy,finder._walkable, clearance))) then
return node
end
else
-- Search along X-axis case
if dx~=0 then
if finder._allowDiagonal then
-- Current node is a jump point if one of his upside/downside neighbours is forced
if (finder._grid:isWalkableAt(x+dx,y+1,finder._walkable, clearance) and (not finder._grid:isWalkableAt(x,y+1,finder._walkable, clearance))) or
(finder._grid:isWalkableAt(x+dx,y-1,finder._walkable, clearance) and (not finder._grid:isWalkableAt(x,y-1,finder._walkable, clearance))) then
return node
end
else
-- : in case diagonal moves are forbidden
if finder._grid:isWalkableAt(x+1,y,finder._walkable, clearance) or finder._grid:isWalkableAt(x-1,y,finder._walkable, clearance) then return node end
end
else
-- Search along Y-axis case
-- Current node is a jump point if one of his leftside/rightside neighbours is forced
if finder._allowDiagonal then
if (finder._grid:isWalkableAt(x+1,y+dy,finder._walkable, clearance) and (not finder._grid:isWalkableAt(x+1,y,finder._walkable, clearance))) or
(finder._grid:isWalkableAt(x-1,y+dy,finder._walkable, clearance) and (not finder._grid:isWalkableAt(x-1,y,finder._walkable, clearance))) then
return node
end
else
-- : in case diagonal moves are forbidden
if finder._grid:isWalkableAt(x,y+1,finder._walkable, clearance) or finder._grid:isWalkableAt(x,y-1,finder._walkable, clearance) then return node end
end
end
end
-- Recursive horizontal/vertical search
if dx~=0 and dy~=0 then
if jump(finder,finder._grid:getNodeAt(x+dx,y),node,endNode, clearance) then return node end
if jump(finder,finder._grid:getNodeAt(x,y+dy),node,endNode, clearance) then return node end
end
-- Recursive diagonal search
if finder._allowDiagonal then
if finder._grid:isWalkableAt(x+dx,y,finder._walkable, clearance) or finder._grid:isWalkableAt(x,y+dy,finder._walkable, clearance) then
return jump(finder,finder._grid:getNodeAt(x+dx,y+dy),node,endNode, clearance)
end
end
end
--[[
Searches for successors of a given node in the direction of each of its neighbours.
This is a generic translation of the algorithm 1 in the paper:
http://users.cecs.anu.edu.au/~dharabor/data/papers/harabor-grastien-aaai11.pdf
Also, we notice that processing neighbours in a reverse order producing a natural
looking path, as the pathfinder tends to keep heading in the same direction.
In case a jump point was found, and this node happened to be diagonal to the
node currently expanded in a straight mode search, we skip this jump point.
--]]
local function identifySuccessors(finder, openList, node, endNode, clearance, toClear)
-- Gets the valid neighbours of the given node
-- Looks for a jump point in the direction of each neighbour
local neighbours = findNeighbours(finder,node, clearance)
for i = #neighbours,1,-1 do
local skip = false
local neighbour = neighbours[i]
local jumpNode = jump(finder,neighbour,node,endNode, clearance)
-- : in case a diagonal jump point was found in straight mode, skip it.
if jumpNode and not finder._allowDiagonal then
if ((jumpNode._x ~= node._x) and (jumpNode._y ~= node._y)) then skip = true end
end
-- Performs regular A-star on a set of jump points
if jumpNode and not skip then
-- Update the jump node and move it in the closed list if it wasn't there
if not jumpNode._closed then
local extraG = Heuristics.EUCLIDIAN(jumpNode, node)
local newG = node._g + extraG
if not jumpNode._opened or newG < jumpNode._g then
toClear[jumpNode] = true -- Records this node to reset its properties later.
jumpNode._g = newG
jumpNode._h = jumpNode._h or
(finder._heuristic(jumpNode, endNode))
jumpNode._f = jumpNode._g+jumpNode._h
jumpNode._parent = node
if not jumpNode._opened then
openList:push(jumpNode)
jumpNode._opened = true
else
openList:heapify(jumpNode)
end
end
end
end
end
end
-- Calculates a path.
-- Returns the path from location `<startX, startY>` to location `<endX, endY>`.
return function(finder, startNode, endNode, clearance, toClear)
startNode._g, startNode._f, startNode._h = 0,0,0
local openList = Heap()
openList:push(startNode)
startNode._opened = true
toClear[startNode] = true
local node
while not openList:empty() do
-- Pops the lowest F-cost node, moves it in the closed list
node = openList:pop()
node._closed = true
-- If the popped node is the endNode, return it
if node == endNode then
return node
end
-- otherwise, identify successors of the popped node
identifySuccessors(finder, openList, node, endNode, clearance, toClear)
end
-- No path found, return nil
return nil
end
end<file_sep>-- Converts functions into on_message actions.
local _M = {}
local hashed_key_mt = {
__newindex = function(t, key, value)
rawset(t, hash(key), value)
end
}
local function add_messages(g)
local messages = {}
setmetatable(messages, hashed_key_mt)
function _G.on_message(self, message_id, message, sender)
local field = messages[message_id]
if field then
return field(self.g, message, sender)
end
end
g.messages = messages
end
local h_mouse_move = hash('mouse_move')
local function add_inputs(g)
local inputs = {}
setmetatable(inputs, hashed_key_mt)
function _G.on_input(self, action_id, action)
action_id = action_id or h_mouse_move
local field = inputs[action_id]
if field then
return field(self.g, action)
end
end
g.inputs = inputs
end
return function(params)
local g = {}
if params.messages then
add_messages(g)
end
if params.inputs then
add_inputs(g)
end
function _G.init(self)
self.g = {instance = self}
for k, v in pairs(g) do
self.g[k] = v
end
g = nil
if self.g.init then
self.g:init()
end
end
function _G.update(self, dt)
if self.g.update then
self.g:update(dt)
end
end
function _G.on_reload(self)
if self.g.on_reload then
self.g:on_reload()
end
end
function _G.final(self)
if self.g.final then
self.g:final()
end
self.g = nil
end
return g
end<file_sep>--- The Path class.
-- The `path` class is a structure which represents a path (ordered set of nodes) from a start location to a goal.
-- An instance from this class would be a result of a request addressed to `Pathfinder:getPath`.
--
-- This module is internally used by the library on purpose.
-- It should normally not be used explicitely, yet it remains fully accessible.
--
if (...) then
-- Dependencies
local _PATH = (...):match('(.+)%.path$')
local Heuristic = require ('libs.jumper.core.heuristics')
-- Local references
local abs, max = math.abs, math.max
local t_insert, t_remove = table.insert, table.remove
--- The `Path` class.<br/>
-- This class is callable.
-- Therefore, <em><code>Path(...)</code></em> acts as a shortcut to <em><code>Path:new(...)</code></em>.
-- @type Path
local Path = {}
Path.__index = Path
--- Inits a new `path`.
-- @class function
-- @treturn path a `path`
-- @usage local p = Path()
function Path:new()
return setmetatable({_nodes = {}}, Path)
end
--- Iterates on each single `node` along a `path`. At each step of iteration,
-- returns the `node` plus a count value. Aliased as @{Path:nodes}
-- @class function
-- @treturn node a `node`
-- @treturn int the count for the number of nodes
-- @see Path:nodes
-- @usage
-- for node, count in p:iter() do
-- ...
-- end
function Path:iter()
local i,pathLen = 1,#self._nodes
return function()
if self._nodes[i] then
i = i+1
return self._nodes[i-1],i-1
end
end
end
--- Iterates on each single `node` along a `path`. At each step of iteration,
-- returns a `node` plus a count value. Alias for @{Path:iter}
-- @class function
-- @name Path:nodes
-- @treturn node a `node`
-- @treturn int the count for the number of nodes
-- @see Path:iter
-- @usage
-- for node, count in p:nodes() do
-- ...
-- end
Path.nodes = Path.iter
--- Evaluates the `path` length
-- @class function
-- @treturn number the `path` length
-- @usage local len = p:getLength()
function Path:getLength()
local len = 0
for i = 2,#self._nodes do
len = len + Heuristic.EUCLIDIAN(self._nodes[i], self._nodes[i-1])
end
return len
end
--- Counts the number of steps.
-- Returns the number of waypoints (nodes) in the current path.
-- @class function
-- @tparam node node a node to be added to the path
-- @tparam[opt] int index the index at which the node will be inserted. If omitted, the node will be appended after the last node in the path.
-- @treturn path self (the calling `path` itself, can be chained)
-- @usage local nSteps = p:countSteps()
function Path:addNode(node, index)
index = index or #self._nodes+1
t_insert(self._nodes, index, node)
return self
end
--- `Path` filling modifier. Interpolates between non contiguous nodes along a `path`
-- to build a fully continuous `path`. This maybe useful when using search algorithms such as Jump Point Search.
-- Does the opposite of @{Path:filter}
-- @class function
-- @treturn path self (the calling `path` itself, can be chained)
-- @see Path:filter
-- @usage p:fill()
function Path:fill()
local i = 2
local xi,yi,dx,dy
local N = #self._nodes
local incrX, incrY
while true do
xi,yi = self._nodes[i]._x,self._nodes[i]._y
dx,dy = xi-self._nodes[i-1]._x,yi-self._nodes[i-1]._y
if (abs(dx) > 1 or abs(dy) > 1) then
incrX = dx/max(abs(dx),1)
incrY = dy/max(abs(dy),1)
t_insert(self._nodes, i, self._grid:getNodeAt(self._nodes[i-1]._x + incrX, self._nodes[i-1]._y +incrY))
N = N+1
else i=i+1
end
if i>N then break end
end
return self
end
--- `Path` compression modifier. Given a `path`, eliminates useless nodes to return a lighter `path`
-- consisting of straight moves. Does the opposite of @{Path:fill}
-- @class function
-- @treturn path self (the calling `path` itself, can be chained)
-- @see Path:fill
-- @usage p:filter()
function Path:filter()
local i = 2
local xi,yi,dx,dy, olddx, olddy
xi,yi = self._nodes[i]._x, self._nodes[i]._y
dx, dy = xi - self._nodes[i-1]._x, yi-self._nodes[i-1]._y
while true do
olddx, olddy = dx, dy
if self._nodes[i+1] then
i = i+1
xi, yi = self._nodes[i]._x, self._nodes[i]._y
dx, dy = xi - self._nodes[i-1]._x, yi - self._nodes[i-1]._y
if olddx == dx and olddy == dy then
t_remove(self._nodes, i-1)
i = i - 1
end
else break end
end
return self
end
--- Clones a `path`.
-- @class function
-- @treturn path a `path`
-- @usage local p = path:clone()
function Path:clone()
local p = Path:new()
for node in self:nodes() do p:addNode(node) end
return p
end
--- Checks if a `path` is equal to another. It also supports *filtered paths* (see @{Path:filter}).
-- @class function
-- @tparam path p2 a path
-- @treturn boolean a boolean
-- @usage print(myPath:isEqualTo(anotherPath))
function Path:isEqualTo(p2)
local p1 = self:clone():filter()
local p2 = p2:clone():filter()
for node, count in p1:nodes() do
if not p2._nodes[count] then return false end
local n = p2._nodes[count]
if n._x~=node._x or n._y~=node._y then return false end
end
return true
end
--- Reverses a `path`.
-- @class function
-- @treturn path self (the calling `path` itself, can be chained)
-- @usage myPath:reverse()
function Path:reverse()
local _nodes = {}
for i = #self._nodes,1,-1 do
_nodes[#_nodes+1] = self._nodes[i]
end
self._nodes = _nodes
return self
end
--- Appends a given `path` to self.
-- @class function
-- @tparam path p a path
-- @treturn path self (the calling `path` itself, can be chained)
-- @usage myPath:append(anotherPath)
function Path:append(p)
for node in p:nodes() do self:addNode(node) end
return self
end
return setmetatable(Path,
{__call = function(self,...)
return Path:new(...)
end
})
end<file_sep>--- The Pathfinder class
--
-- Implementation of the `pathfinder` class.
local _VERSION = ""
local _RELEASEDATE = ""
if (...) then
-- Dependencies
local _PATH = (...):gsub('%.pathfinder$','')
local Utils = require ('libs.jumper.core.utils')
local Assert = require ('libs.jumper.core.assert')
local Heap = require ('libs.jumper.core.bheap')
local Heuristic = require ('libs.jumper.core.heuristics')
local Grid = require ('libs.jumper.grid')
local Path = require ('libs.jumper.core.path')
-- Internalization
local t_insert, t_remove = table.insert, table.remove
local floor = math.floor
local pairs = pairs
local assert = assert
local type = type
local setmetatable, getmetatable = setmetatable, getmetatable
--- Finders (search algorithms implemented). Refers to the search algorithms actually implemented in Jumper.
--
-- <li>[A*](http://en.wikipedia.org/wiki/A*_search_algorithm)</li>
-- <li>[Dijkstra](http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm)</li>
-- <li>[Theta Astar](http://aigamedev.com/open/tutorials/theta-star-any-angle-paths/)</li>
-- <li>[BFS](http://en.wikipedia.org/wiki/Breadth-first_search)</li>
-- <li>[DFS](http://en.wikipedia.org/wiki/Depth-first_search)</li>
-- <li>[JPS](http://harablog.wordpress.com/2011/09/07/jump-point-search/)</li>
-- @finder Finders
-- @see Pathfinder:getFinders
local Finders = {
['ASTAR'] = require ('libs.jumper.search.astar'),
['DIJKSTRA'] = require ('libs.jumper.search.dijkstra'),
['THETASTAR'] = require ('libs.jumper.search.thetastar'),
['BFS'] = require ('libs.jumper.search.bfs'),
['DFS'] = require ('libs.jumper.search.dfs'),
['JPS'] = require ('libs.jumper.search.jps')
}
-- Will keep track of all nodes expanded during the search
-- to easily reset their properties for the next pathfinding call
local toClear = {}
--- Search modes. Refers to the search modes. In ORTHOGONAL mode, 4-directions are only possible when moving,
-- including North, East, West, South. In DIAGONAL mode, 8-directions are possible when moving,
-- including North, East, West, South and adjacent directions.
--
-- <li>ORTHOGONAL</li>
-- <li>DIAGONAL</li>
-- @mode Modes
-- @see Pathfinder:getModes
local searchModes = {['DIAGONAL'] = true, ['ORTHOGONAL'] = true}
-- Performs a traceback from the goal node to the start node
-- Only happens when the path was found
--- The `Pathfinder` class.<br/>
-- This class is callable.
-- Therefore,_ <code>Pathfinder(...)</code> _acts as a shortcut to_ <code>Pathfinder:new(...)</code>.
-- @type Pathfinder
local Pathfinder = {}
Pathfinder.__index = Pathfinder
--- Inits a new `pathfinder`
-- @class function
-- @tparam grid grid a `grid`
-- @tparam[opt] string finderName the name of the `Finder` (search algorithm) to be used for search.
-- Defaults to `ASTAR` when not given (see @{Pathfinder:getFinders}).
-- @tparam[optchain] string|int|func walkable the value for __walkable__ nodes.
-- If this parameter is a function, it should be prototyped as __f(value)__, returning a boolean:
-- __true__ when value matches a __walkable__ `node`, __false__ otherwise.
-- @treturn pathfinder a new `pathfinder` instance
-- @usage
-- -- Example one
-- local finder = Pathfinder:new(myGrid, 'ASTAR', 0)
--
-- -- Example two
-- local function walkable(value)
-- return value > 0
-- end
-- local finder = Pathfinder(myGrid, 'JPS', walkable)
function Pathfinder:new(grid, finderName, walkable)
local newPathfinder = {}
setmetatable(newPathfinder, Pathfinder)
newPathfinder:setGrid(grid)
newPathfinder:setFinder(finderName)
newPathfinder:setWalkable(walkable)
--newPathfinder:setMode('DIAGONAL')
newPathfinder:setHeuristic('MANHATTAN')
newPathfinder:setTunnelling(false)
return newPathfinder
end
--- Evaluates [clearance](http://aigamedev.com/open/tutorial/clearance-based-pathfinding/#TheTrueClearanceMetric)
-- for the whole `grid`. It should be called only once, unless the collision map or the
-- __walkable__ attribute changes. The clearance values are calculated and cached within the grid nodes.
-- @class function
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @usage myFinder:annotateGrid()
function Pathfinder:annotateGrid()
assert(self._walkable, 'Finder must implement a walkable value')
for x=self._grid._max_x,self._grid._min_x,-1 do
for y=self._grid._max_y,self._grid._min_y,-1 do
local node = self._grid:getNodeAt(x,y)
if self._grid:isWalkableAt(x,y,self._walkable) then
local nr = self._grid:getNodeAt(node._x+1, node._y)
local nrd = self._grid:getNodeAt(node._x+1, node._y+1)
local nd = self._grid:getNodeAt(node._x, node._y+1)
if nr and nrd and nd then
local m = nrd._clearance[self._walkable] or 0
m = (nd._clearance[self._walkable] or 0)<m and (nd._clearance[self._walkable] or 0) or m
m = (nr._clearance[self._walkable] or 0)<m and (nr._clearance[self._walkable] or 0) or m
node._clearance[self._walkable] = m+1
else
node._clearance[self._walkable] = 1
end
else node._clearance[self._walkable] = 0
end
end
end
self._grid._isAnnotated[self._walkable] = true
return self
end
--- Removes [clearance](http://aigamedev.com/open/tutorial/clearance-based-pathfinding/#TheTrueClearanceMetric)values.
-- Clears cached clearance values for the current __walkable__.
-- @class function
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @usage myFinder:clearAnnotations()
function Pathfinder:clearAnnotations()
assert(self._walkable, 'Finder must implement a walkable value')
for node in self._grid:iter() do
node:removeClearance(self._walkable)
end
self._grid._isAnnotated[self._walkable] = false
return self
end
--- Sets the `grid`. Defines the given `grid` as the one on which the `pathfinder` will perform the search.
-- @class function
-- @tparam grid grid a `grid`
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @usage myFinder:setGrid(myGrid)
function Pathfinder:setGrid(grid)
assert(Assert.inherits(grid, Grid), 'Wrong argument #1. Expected a \'grid\' object')
self._grid = grid
self._grid._eval = self._walkable and type(self._walkable) == 'function'
return self
end
--- Returns the `grid`. This is a reference to the actual `grid` used by the `pathfinder`.
-- @class function
-- @treturn grid the `grid`
-- @usage local myGrid = myFinder:getGrid()
function Pathfinder:getGrid()
return self._grid
end
--- Sets the __walkable__ value or function.
-- @class function
-- @tparam string|int|func walkable the value for walkable nodes.
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @usage
-- -- Value '0' is walkable
-- myFinder:setWalkable(0)
--
-- -- Any value greater than 0 is walkable
-- myFinder:setWalkable(function(n)
-- return n>0
-- end
function Pathfinder:setWalkable(walkable)
assert(Assert.matchType(walkable,'stringintfunctionnil'),
('Wrong argument #1. Expected \'string\', \'number\' or \'function\', got %s.'):format(type(walkable)))
self._walkable = walkable
self._grid._eval = type(self._walkable) == 'function'
return self
end
--- Gets the __walkable__ value or function.
-- @class function
-- @treturn string|int|func the `walkable` value or function
-- @usage local walkable = myFinder:getWalkable()
function Pathfinder:getWalkable()
return self._walkable
end
--- Defines the `finder`. It refers to the search algorithm used by the `pathfinder`.
-- Default finder is `ASTAR`. Use @{Pathfinder:getFinders} to get the list of available finders.
-- @class function
-- @tparam string finderName the name of the `finder` to be used for further searches.
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @usage
-- --To use Breadth-First-Search
-- myFinder:setFinder('BFS')
-- @see Pathfinder:getFinders
function Pathfinder:setFinder(finderName)
if not finderName then
if not self._finder then
finderName = 'ASTAR'
else return
end
end
assert(Finders[finderName],'Not a valid finder name!')
self._finder = finderName
return self
end
--- Returns the name of the `finder` being used.
-- @class function
-- @treturn string the name of the `finder` to be used for further searches.
-- @usage local finderName = myFinder:getFinder()
function Pathfinder:getFinder()
return self._finder
end
--- Returns the list of all available finders names.
-- @class function
-- @treturn {string,...} array of built-in finders names.
-- @usage
-- local finders = myFinder:getFinders()
-- for i, finderName in ipairs(finders) do
-- print(i, finderName)
-- end
function Pathfinder:getFinders()
return Utils.getKeys(Finders)
end
--- Sets a heuristic. This is a function internally used by the `pathfinder` to find the optimal path during a search.
-- Use @{Pathfinder:getHeuristics} to get the list of all available `heuristics`. One can also define
-- his own `heuristic` function.
-- @class function
-- @tparam func|string heuristic `heuristic` function, prototyped as __f(dx,dy)__ or as a `string`.
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @see Pathfinder:getHeuristics
-- @see core.heuristics
-- @usage myFinder:setHeuristic('MANHATTAN')
function Pathfinder:setHeuristic(heuristic)
assert(Heuristic[heuristic] or (type(heuristic) == 'function'),'Not a valid heuristic!')
self._heuristic = Heuristic[heuristic] or heuristic
return self
end
--- Returns the `heuristic` used. Returns the function itself.
-- @class function
-- @treturn func the `heuristic` function being used by the `pathfinder`
-- @see core.heuristics
-- @usage local h = myFinder:getHeuristic()
function Pathfinder:getHeuristic()
return self._heuristic
end
--- Gets the list of all available `heuristics`.
-- @class function
-- @treturn {string,...} array of heuristic names.
-- @see core.heuristics
-- @usage
-- local heur = myFinder:getHeuristic()
-- for i, heuristicName in ipairs(heur) do
-- ...
-- end
function Pathfinder:getHeuristics()
return Utils.getKeys(Heuristic)
end
--- Defines the search `mode`.
-- The default search mode is the `DIAGONAL` mode, which implies 8-possible directions when moving (north, south, east, west and diagonals).
-- In `ORTHOGONAL` mode, only 4-directions are allowed (north, south, east and west).
-- Use @{Pathfinder:getModes} to get the list of all available search modes.
-- @class function
-- @tparam string mode the new search `mode`.
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @see Pathfinder:getModes
-- @see Modes
-- @usage myFinder:setMode('ORTHOGONAL')
function Pathfinder:setMode(mode)
assert(searchModes[mode],'Invalid mode')
self._allowDiagonal = (mode == 'DIAGONAL')
return self
end
--- Returns the search mode.
-- @class function
-- @treturn string the current search mode
-- @see Modes
-- @usage local mode = myFinder:getMode()
function Pathfinder:getMode()
return (self._allowDiagonal and 'DIAGONAL' or 'ORTHOGONAL')
end
--- Gets the list of all available search modes.
-- @class function
-- @treturn {string,...} array of search modes.
-- @see Modes
-- @usage local modes = myFinder:getModes()
-- for modeName in ipairs(modes) do
-- ...
-- end
function Pathfinder:getModes()
return Utils.getKeys(searchModes)
end
--- Enables tunnelling. Defines the ability for the `pathfinder` to tunnel through walls when heading diagonally.
-- This feature __is not compatible__ with Jump Point Search algorithm (i.e. enabling it will not affect Jump Point Search)
-- @class function
-- @tparam bool bool a boolean
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @usage myFinder:setTunnelling(true)
function Pathfinder:setTunnelling(bool)
assert(Assert.isBool(bool), ('Wrong argument #1. Expected boolean, got %s'):format(type(bool)))
self._tunnel = bool
return self
end
--- Returns tunnelling feature state.
-- @class function
-- @treturn bool tunnelling feature actual state
-- @usage local isTunnellingEnabled = myFinder:getTunnelling()
function Pathfinder:getTunnelling()
return self._tunnel
end
--- Calculates a `path`. Returns the `path` from location __[startX, startY]__ to location __[endX, endY]__.
-- Both locations must exist on the collision map. The starting location can be unwalkable.
-- @class function
-- @tparam int startX the x-coordinate for the starting location
-- @tparam int startY the y-coordinate for the starting location
-- @tparam int endX the x-coordinate for the goal location
-- @tparam int endY the y-coordinate for the goal location
-- @tparam int clearance the amount of clearance (i.e the pathing agent size) to consider
-- @treturn path a path (array of nodes) when found, otherwise nil
-- @usage local path = myFinder:getPath(1,1,5,5)
function Pathfinder:getPath(startX, startY, endX, endY, clearance)
self:reset()
local startNode = self._grid:getNodeAt(startX, startY)
local endNode = self._grid:getNodeAt(endX, endY)
assert(startNode, ('Invalid location [%d, %d]'):format(startX, startY))
assert(endNode and self._grid:isWalkableAt(endX, endY),
('Invalid or unreachable location [%d, %d]'):format(endX, endY))
local _endNode = Finders[self._finder](self, startNode, endNode, clearance, toClear)
if _endNode then
return Utils.traceBackPath(self, _endNode, startNode)
end
return nil
end
--- Resets the `pathfinder`. This function is called internally between successive pathfinding calls, so you should not
-- use it explicitely, unless under specific circumstances.
-- @class function
-- @treturn pathfinder self (the calling `pathfinder` itself, can be chained)
-- @usage local path, len = myFinder:getPath(1,1,5,5)
function Pathfinder:reset()
for node in pairs(toClear) do node:reset() end
toClear = {}
return self
end
-- Returns Pathfinder class
Pathfinder._VERSION = _VERSION
Pathfinder._RELEASEDATE = _RELEASEDATE
return setmetatable(Pathfinder,{
__call = function(self,...)
return self:new(...)
end
})
end
<file_sep>local _M = {}
local touch_focus_id
function _M.set_touch(id)
touch_focus_id = id
end
function _M.release_touch()
touch_focus_id = nil
end
function _M.check_touch(id)
return not touch_focus_id or touch_focus_id == id
end
return _M<file_sep>-- Astar algorithm
-- This actual implementation of A-star is based on
-- [Nash A. & al. pseudocode](http://aigamedev.com/open/tutorials/theta-star-any-angle-paths/)
if (...) then
-- Internalization
local ipairs = ipairs
local huge = math.huge
-- Dependancies
local _PATH = (...):match('(.+)%.search.astar$')
local Heuristics = require ('libs.jumper.core.heuristics')
local Heap = require ('libs.jumper.core.bheap')
-- Updates G-cost
local function computeCost(node, neighbour, finder, clearance)
local mCost = Heuristics.EUCLIDIAN(neighbour, node)
if node._g + mCost < neighbour._g then
neighbour._parent = node
neighbour._g = node._g + mCost
end
end
-- Updates vertex node-neighbour
local function updateVertex(finder, openList, node, neighbour, endNode, clearance, heuristic, overrideCostEval)
local oldG = neighbour._g
local cmpCost = overrideCostEval or computeCost
cmpCost(node, neighbour, finder, clearance)
if neighbour._g < oldG then
local nClearance = neighbour._clearance[finder._walkable]
local pushThisNode = clearance and nClearance and (nClearance >= clearance)
if (clearance and pushThisNode) or (not clearance) then
if neighbour._opened then neighbour._opened = false end
neighbour._h = heuristic(endNode, neighbour)
neighbour._f = neighbour._g + neighbour._h
openList:push(neighbour)
neighbour._opened = true
end
end
end
-- Calculates a path.
-- Returns the path from location `<startX, startY>` to location `<endX, endY>`.
return function (finder, startNode, endNode, clearance, toClear, overrideHeuristic, overrideCostEval)
local heuristic = overrideHeuristic or finder._heuristic
local openList = Heap()
startNode._g = 0
startNode._h = heuristic(endNode, startNode)
startNode._f = startNode._g + startNode._h
openList:push(startNode)
toClear[startNode] = true
startNode._opened = true
while not openList:empty() do
local node = openList:pop()
node._closed = true
if node == endNode then return node end
local neighbours = finder._grid:getNeighbours(node, finder._walkable, finder._allowDiagonal, finder._tunnel)
for i = 1,#neighbours do
local neighbour = neighbours[i]
if not neighbour._closed then
toClear[neighbour] = true
if not neighbour._opened then
neighbour._g = huge
neighbour._parent = nil
end
updateVertex(finder, openList, node, neighbour, endNode, clearance, heuristic, overrideCostEval)
end
end
end
return nil
end
end
<file_sep># starclick
RTS+Clicker Defold game for Ludum Dare 41
<file_sep>local function addNode(self, node, nextNode, ed)
if not self._pathDB[node] then self._pathDB[node] = {} end
self._pathDB[node][ed] = nextNode == ed and node or nextNode
end
-- Path lookupTable
local lookupTable = {}
lookupTable.__index = lookupTable
function lookupTable:new()
local lut = {_pathDB = {}}
return setmetatable(lut, lookupTable)
end
function lookupTable:addPath(path)
local st, ed = path._nodes[1], path._nodes[#path._nodes]
for node, count in path:nodes() do
local nextNode = path._nodes[count+1]
if nextNode then addNode(self, node, nextNode, ed) end
end
end
function lookupTable:hasPath(nodeA, nodeB)
local found
found = self._pathDB[nodeA] and self._path[nodeA][nodeB]
if found then return true, true end
found = self._pathDB[nodeB] and self._path[nodeB][nodeA]
if found then return true, false end
return false
end
return lookupTable<file_sep>-- Hashed string library.
-- Computes hashes at runtime and caches the result.
local _M = {}
setmetatable(_M, {
__index = function(t, key)
local h = hash(key)
rawset(t, key, h)
return h
end
})
return _M<file_sep>local utils = require('libs.utils')
local g = {}
function g:find_nearby_targets(x, z, is_alien)
local targets = {}
local distance = 1
for i = 1, #self.units do
local u = self.units[i]
if u.is_alien == is_alien and math.abs(u.x - x) <= distance and math.abs(u.z - z) <= distance then
table.insert(targets, u)
end
end
for i = 1, #self.buildings do
local b = self.buildings[i]
if b.is_alien == is_alien and math.abs(b.x - x) <= distance and math.abs(b.z - z) <= distance then
table.insert(targets, b)
end
end
if #targets > 0 then
return targets
end
end
function g:find_humans()
local targets = {}
for i = 1, #self.units do
local u = self.units[i]
if not u.is_alien then
table.insert(targets, u)
end
end
for i = 1, #self.buildings do
local b = self.buildings[i]
if not b.is_alien then
table.insert(targets, b)
end
end
if #targets > 0 then
return targets
end
end
function g:find_free_spot_around(around_x, around_z, is_alien)
local spots = {
true, true, true,
true, true, true,
true, true, true
}
for dx = -1, 1 do
for dz = -1, 1 do
local x = utils.clamp(around_x + dx, 0, self.map_width)
local z = utils.clamp(around_z + dz, 0, self.map_height)
local value = self.ground[x + (z - 1) * self.map_width]
if value == 0 then
spots[dx + 2 + (dz + 1) * 3] = false
end
end
end
for i = 1, #self.units do
local u = self.units[i]
local dx, dz = u.x - around_x, u.z - around_z
if math.abs(dx) <= 1 and math.abs(dz) <= 1 then
spots[dx + 2 + (dz + 1) * 3] = false
end
end
for i = 1, #self.buildings do
local b = self.buildings[i]
local dx, dz = b.x - around_x, b.z - around_z
if math.abs(dx) <= 1 and math.abs(dz) <= 1 then
spots[dx + 2 + (dz + 1) * 3] = false
end
end
local human_spots = {8, 7, 9, 4, 6, 2, 1, 3}
local alien_spots = {2, 1, 3, 4, 6, 8, 7, 9}
for j = 1, 8 do
local i = is_alien and alien_spots[j] or human_spots[j]
if spots[i] then
local dx, dz = (i - 1) % 3 - 1, math.floor((i - 1) / 3) - 1
return around_x + dx, around_z + dz
end
end
end
function g:update_ai()
for i = 1, #self.units do
local u = self.units[i]
if not u.is_moving then
if u.target and not u.prefered_target then
local distance = 1
-- Check if target is alive.
if u.target.health == 0 then
u.target = nil
-- Check if target has moved away.
elseif math.abs(u.target.x - u.x) > distance or math.abs(u.target.z - u.z) > distance then
u.target = nil
end
else
-- Check for nearby enemy.
local targets = self:find_nearby_targets(u.x, u.z, not u.is_alien)
if targets then
if u.prefered_target then
for j = 1, #targets do
if targets[j] == u.prefered_target then
u.target = u.prefered_target
break
end
end
u.prefered_target = nil
end
if not u.target then
u.target = targets[math.random(1, #targets)]
end
elseif u.is_alien and math.random() < 0.01 then
-- Move somewhere.
local targets = self:find_humans()
if targets then
local target = targets[math.random(1, #targets)]
u.prefered_target = target
self:move_unit(u, target.x, target.z)
end
end
end
end
end
for i = 1, #self.buildings do
local b = self.buildings[i]
if b.action then
if self.current_time >= b.last_action_time + b.cooldown then
b.last_action_time = self.current_time
if b.action.spawn then
local x, z = self:find_free_spot_around(b.x, b.z, b.is_alien)
if x then
self:spawn_unit{name = b.action.spawn, x = x, z = z}
end
elseif b.action.is_click then
self:click_resources()
end
end
end
end
end
return g<file_sep>return {
units = {
motoquad = {health = 10, speed = 4, attack = 2, cooldown = 1, is_alien = false},
hovercraft = {health = 20, speed = 6, attack = 3, cooldown = 1, is_alien = false},
tower = {health = 40, speed = 0.5, attack = 30, cooldown = 3, is_alien = false, is_tall = true},
robot = {health = 100, speed = 2, attack = 30, cooldown = 2, is_alien = false, is_tall = true},
alien_lvl1 = {health = 5, speed = 2, attack = 1, cooldown = 1, is_alien = true},
alien_lvl2 = {health = 20, speed = 2, attack = 3, cooldown = 1, is_alien = true},
alien_lvl3 = {health = 60, speed = 1, attack = 15, cooldown = 2, is_alien = true, is_tall = true},
alien_lvl4 = {health = 500, speed = 1, attack = 30, cooldown = 3, is_alien = true, is_tall = true}
},
buildings = {
base = {
health = 100, is_alien = false, is_tall = true, click = {name = 'resources', amount = 1},
production = {
{name = 'plate', price = 2},
{name = 'auto_click', price = 10},
{name = 'double_click', price = 10},
{name = 'factory', price = 50},
{name = 'heavy_factory', price = 500},
},
info = 'Build plates first then factories and units.\nDestroy all alien hives.'
},
factory = {
health = 100, is_alien = false, is_tall = true, production = {{name = 'motoquad', price = 3}, {name = 'hovercraft', price = 5}},
info = 'Produces basic units to fight aliens.'
},
heavy_factory = {
health = 500, is_alien = false, is_tall = true, production = {{name = 'tower', price = 50}, {name = 'robot', price = 100}},
info = 'Produces basic units to fight aliens.'
},
double_click = {
health = 50, is_alien = false, resources_multiplier = 2,
info = 'Double resources on each click.'
},
auto_click = {
health = 50, is_alien = false, action = {is_click = true}, cooldown = 0.5,
info = 'Automatically clicks for resources.'
},
hive_lvl1 = {health = 100, is_alien = true, action = {spawn = 'alien_lvl1'}, cooldown = 10},
hive_lvl2 = {health = 200, is_alien = true, action = {spawn = 'alien_lvl2'}, cooldown = 15},
hive_lvl3 = {health = 400, is_alien = true, action = {spawn = 'alien_lvl3'}, cooldown = 20},
hive_lvl4 = {health = 1000, is_alien = true, is_tall = true, action = {spawn = 'alien_lvl4'}, cooldown = 30}
},
}
<file_sep>-- ThetaStar implementation
-- See: http://aigamedev.com/open/tutorials/theta-star-any-angle-paths for reference
if (...) then
local _PATH = (...):gsub('%.search.thetastar$','')
-- Depandancies
local Heuristics = require ('libs.jumper.core.heuristics')
local astar_search = require ('libs.jumper.search.astar')
-- Internalization
local ipairs = ipairs
local huge, abs = math._huge, math.abs
-- Line Of Sight (Bresenham's line marching algorithm)
-- http://en.wikipedia.org/wiki/Bresenham%27s_line_algorithm
local lineOfSight = function (node, neighbour, finder, clearance)
local x0, y0 = node._x, node._y
local x1, y1 = neighbour._x, neighbour._y
local dx = abs(x1-x0)
local dy = abs(y1-y0)
local err = dx - dy
local sx = (x0 < x1) and 1 or -1
local sy = (y0 < y1) and 1 or -1
while true do
if not finder._grid:isWalkableAt(x0, y0, finder._walkable, finder._tunnel, clearance) then
return false
end
if x0 == x1 and y0 == y1 then
break
end
local e2 = 2*err
if e2 > -dy then
err = err - dy
x0 = x0 + sx
end
if e2 < dx then
err = err + dx
y0 = y0 + sy
end
end
return true
end
-- Theta star cost evaluation
local function computeCost(node, neighbour, finder, clearance)
local parent = node._parent or node
local mpCost = Heuristics.EUCLIDIAN(neighbour, parent)
if lineOfSight(parent, neighbour, finder, clearance) then
if parent._g + mpCost < neighbour._g then
neighbour._parent = parent
neighbour._g = parent._g + mpCost
end
else
local mCost = Heuristics.EUCLIDIAN(neighbour, node)
if node._g + mCost < neighbour._g then
neighbour._parent = node
neighbour._g = node._g + mCost
end
end
end
-- Calculates a path.
-- Returns the path from location `<startX, startY>` to location `<endX, endY>`.
return function (finder, startNode, endNode, clearance, toClear, overrideHeuristic)
return astar_search(finder, startNode, endNode, clearance, toClear, overrideHeuristic, computeCost)
end
end<file_sep>-- Various assertion function for API methods argument-checking
if (...) then
-- Dependancies
local _PATH = (...):gsub('%.core.assert$','')
local Utils = require ('libs.jumper.core.utils')
-- Local references
local lua_type = type
local floor = math.floor
local concat = table.concat
local next = next
local pairs = pairs
local getmetatable = getmetatable
-- Is I an integer ?
local function isInteger(i)
return lua_type(i) ==('number') and (floor(i)==i)
end
-- Override lua_type to return integers
local function type(v)
return isInteger(v) and 'int' or lua_type(v)
end
-- Does the given array contents match a predicate type ?
local function arrayContentsMatch(t,...)
local n_count = Utils.arraySize(t)
if n_count < 1 then return false end
local init_count = t[0] and 0 or 1
local n_count = (t[0] and n_count-1 or n_count)
local types = {...}
if types then types = concat(types) end
for i=init_count,n_count,1 do
if not t[i] then return false end
if types then
if not types:match(type(t[i])) then return false end
end
end
return true
end
-- Checks if arg is a valid array map
local function isMap(m)
if not arrayContentsMatch(m, 'table') then return false end
local lsize = Utils.arraySize(m[next(m)])
for k,v in pairs(m) do
if not arrayContentsMatch(m[k], 'string', 'int') then return false end
if Utils.arraySize(v)~=lsize then return false end
end
return true
end
-- Checks if s is a valid string map
local function isStringMap(s)
if lua_type(s) ~= 'string' then return false end
local w
for row in s:gmatch('[^\n\r]+') do
if not row then return false end
w = w or #row
if w ~= #row then return false end
end
return true
end
-- Does instance derive straight from class
local function derives(instance, class)
return getmetatable(instance) == class
end
-- Does instance inherits from class
local function inherits(instance, class)
return (getmetatable(getmetatable(instance)) == class)
end
-- Is arg a boolean
local function isBoolean(b)
return (b==true or b==false)
end
-- Is arg nil ?
local function isNil(n)
return (n==nil)
end
local function matchType(value, types)
return types:match(type(value))
end
return {
arrayContentsMatch = arrayContentsMatch,
derives = derives,
inherits = inherits,
isInteger = isInteger,
isBool = isBoolean,
isMap = isMap,
isStrMap = isStringMap,
isOutOfRange = isOutOfRange,
isNil = isNil,
type = type,
matchType = matchType
}
end
<file_sep>local utils = require('libs.utils')
local _M = {
view = vmath.matrix4(), -- Camera view matrix.
projection = vmath.matrix4(), -- Camera projection matrix.
width = 1, -- Screen width.
height = 1, -- Screen height.
alpha = 0,
beta = 0,
distance = 0,
view_width = 0,
target = vmath.vector3(),
alpha_offset = 0,
beta_offset = 0,
dt = 0,
zoom = 0.08,
pan = vmath.vector3(-20, 0, 0)
}
local pl = 75
local pan_limit = {x = {min = -pl, max = pl}, z = {min = -pl, max = pl}}
-- Returns origin point of the ray and it's direction
function _M.ray(x, y)
local ray_start_screen = vmath.vector4((x / _M.width - 0.5) * 2.0, (y / _M.height - 0.5) * 2.0, -1.0, 1.0);
local ray_end_screen = vmath.vector4(ray_start_screen)
ray_end_screen.z = 0
local m = vmath.inv(_M.projection * _M.view)
local ray_start_world = m * ray_start_screen
local ray_end_world = m * ray_end_screen
local w = ray_start_world.w
local origin = vmath.vector3(ray_start_world.x / w, ray_start_world.y / w, ray_start_world.z / w)
w = ray_end_world.w
local direction = vmath.normalize(vmath.vector3(
ray_end_world.x / w - ray_start_world.x,
ray_end_world.y / w - ray_start_world.y,
ray_end_world.z / w - ray_start_world.z
))
-- Origin of the ray.
-- Direction, in world space, of the ray that goes "through" the screen point.
return origin, direction
end
function _M.zoom_step(value)
_M.zoom = utils.clamp(_M.zoom + value / 100, 0.04, 0.16)
end
function _M.pan_by(start, px, pz)
local sx = 2 * _M.zoom
local sy = 3.55 * sx * _M.height / _M.width
px = utils.clamp(start.x + px * sx, pan_limit.x.min, pan_limit.x.max)
pz = utils.clamp(start.z - pz * sy, pan_limit.z.min, pan_limit.z.max)
_M.pan = vmath.vector3(px, 0, pz)
end
return _M
|
226ef5cda1176629197ccadb482279fd4ab91b53
|
[
"Markdown",
"Lua"
] | 20 |
Lua
|
Lerg/starclick
|
d699d44bf5fc6c2ec13fda34455131ec8d122b3f
|
fc5b223154dffbdca43f8b2f35645ff1ba829089
|
refs/heads/master
|
<file_sep>#include <stdio.h>
int main()
{
int howmany;
printf("밑변의 길이가 5인 작은 피라미드로 큰 피라미드를 쌓습니다.\n작은 피라미드의 갯수를 입력해주세요.(단, 입력 값 = 1 + 2 + ... + n )\n");
scanf("%d", &howmany);
//입력 값이 1 + 2 .. 의 형태인지 판별
int sum = 0, n, tf = 0;
for(n = 0;sum <= howmany;n++)
{
sum += n;
if(sum != 0 && sum == howmany)
{
tf = 1;
break;
}
}
if(tf != 1)
{
printf("잘못된 입력입니다.");
return 0;
}
//피라미드 공사 n == 큰 피라미드 층, stack == 작은 피라미드 층
int stack = n * 3; //작은 층으로 센 총 층 수입니다. for문에서 한 층씩 쌓으면서 값이 줄어듭니다.
int stackplan = n * 3; //작은 층으로 센 총 층 수입니다. 값이 변하지 않습니다.
int nlocation = 0; //맨 윗층을 1층으로 센 큰 층수입니다. for문이 진행될 때 현재 몇 번째 큰 층인지 알려줍니다.
int stacklocation; //맨 윗층을 1층으로 센 작은 층수입니다. for문이 진행될 때 현재 몇 번째 작은 층인지 알려줍니다.
int L; //한 층의 길이입니다. for문에서 길이를 하나씩 늘려주면서 값이 줄어듭니다.
int length = 5 * n + (n - 1); //한 층의 길이입니다. 값이 변하지 않아 for문이 반복될 때 L의 값을 초기화 시켜줍니다.
printf("n = %d\n", n);
for(stack = stack;stack > 0;stack--)
{
stacklocation = stackplan + 1 - stack;
(stacklocation % 3 == 0) ? (nlocation = stacklocation / 3) : (nlocation = stacklocation / 3 + 1);
for(L = length;L > 0;L--)
{
if(stacklocation % 3 == 1)
{
if((L == stack) || (((L - stack) % 6 == 0) && (L <= (stack + (6 * (nlocation - 1)))) && (L >= stack)))
//L이 stack과 같거나 L - stack이 6의 배수이면서 stack <= L <= stack + 6(현재큰층-1) 을 만족할 때
{
printf("^");
}
else
{
printf(" ");
}
}
else if(stacklocation % 3 == 2)
{
if((L == stack) || ((((L - stack) % 6 <= 2) && (L <= (stack + (6 * (nlocation - 1))) + 2) && (L >= stack))))
{
printf("^");
}
else
{
printf(" ");
}
}
else if(stacklocation % 3 == 0)
{
if((L == stack) || ((((L - stack) % 6 <= 4) && (L <= (stack + (6 * (nlocation - 1))) + 4) && (L >= stack))))
{
printf("^");
}
else
{
printf(" ");
}
}
}
printf("\n");
}
return 0;
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <time.h>
unsigned long long int s[2];
unsigned long long int xorshift128plus(unsigned long long int spt0, unsigned long long int spt1) {
s[0] = spt0;
s[1] = spt1;
unsigned long long int x = s[0];
unsigned long long int const y = s[1];
s[0] = y;
x ^= x << 23; // a
s[1] = x ^ y ^ (x >> 17) ^ (y >> 26); // b, c
return s[1] + y;
}
int main()
{
int num[10] = { 0, };
int i, j, n, aver = 0;
unsigned long long int z, y, x;
printf("몇 번 돌려볼까요? : ");
scanf("%d", &n);
y = time(NULL);
x = clock();
for(j = 0;j < n;j++)
{
if((j != 0) && (j % 10000 == 0))
{
printf("%d번 돌림...\n", j);
}
z = xorshift128plus(x + j, y + j);
num[z % 10]++;
}
printf("\nxor128plus %d번 돌린결과 :\n", n);
for(j = 0;j < 10;j++)
{
printf("%d의 편차 : %d\n", j, (n / 10) -num[j]);
aver += abs((n / 10) - num[j]);
}
printf("\n편차의 평균 : %lg\n", (double)aver / 10);
return 0;
}
<file_sep>#include <stdio.h>
#define BIT 64
union number{
long long i;
double d;
};
void convert(union number n); //실수를 이진수로 출력
int main()
{
union number n;
n.i = n.d = 0;
scanf("%lf", &n.d);
convert(n);
return 0;
}
void convert(union number n)
{
int x, temp[BIT];
for(x = 0; x < BIT; x++)
{
temp[x] = n.i & 1;
n.i >>= 1;
}
for(x = BIT - 1; x >= 0; x--)
{
printf("%d", temp[x]);
if(BIT - x == 1) //1번째 자리일때(64비트 부호부)
{
printf(" ");
}
else if(BIT - x == 12) //2~12자리까지(64비트 지수부)
{
printf(" ");
}
else if(((BIT - 12 - x) % 4 == 0) & (BIT - 12 - x) >= 0) //13~64자리(64비트 가수부)
{
printf(" ");
}
}
printf("\n");
}
<file_sep>#include <stdio.h>
#include <stdint.h>
//프로그램이 재시작 될 때 영역 구분으로 사용
inline void div(){
printf("\n\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n\n");
};
//(size)byte의 정수를 이진법으로 출력해주는 함수
void printbit(int64_t num, int size);
int main()
{
int whatkind, size;
int64_t num;
retry: //while문 대신 금단의 비기 사용
printf("입력할 정수의 자료형 크기는?\n\n1 : 2byte(short)\n2 : 4byte(int)\n3 : 8byte(long long)\n\n0 : 종료\n\n번호입력 : ");
scanf("%d", &whatkind);
switch(whatkind)
{
case 0:
return 0;
case 1:
{
size = 2;
break;
}
case 2:
{
size = 4;
break;
}
case 3:
{
size = 8;
break;
}
default :
printf("잘못된 입력입니다. 다시 입력하세요.");
div();
goto retry;
}
printf("%dbyte의 정수입력 : ", size);
scanf("%lld", &num);
printbit(num, size);
div();
goto retry;
}
void printbit(int64_t num, int size)
{
int i, temp[size * 8];
for(i = size * 8 - 1; i >= 0; i--)
{
temp[i] = num & 0x0000000000000001;
num >>= 1;
}
for(i = 0; i <= size * 8 - 1; i++)
{
printf("%d", temp[i]);
if((i + 1) % 8 == 0) printf(" ");
}
}
<file_sep>/* ***************************************************************************** */
/* Copyright: <NAME> and <NAME>, University of Montreal */
/* <NAME>, Hiroshima University */
/* Notice: This code can be used freely for personal, academic, */
/* or non-commercial purposes. For commercial purposes, */
/* please contact <NAME> at: <EMAIL> */
/* ***************************************************************************** */
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <time.h>
#include <string.h>
#define W 32
#define R 32
#define M1 3
#define M2 24
#define M3 10
#define MAT0POS(t,v) (v^(v>>t))
#define MAT0NEG(t,v) (v^(v<<(-(t))))
#define Identity(v) (v)
#define V0 STATE[state_i ]
#define VM1 STATE[(state_i+M1) & 0x0000001fU]
#define VM2 STATE[(state_i+M2) & 0x0000001fU]
#define VM3 STATE[(state_i+M3) & 0x0000001fU]
#define VRm1 STATE[(state_i+31) & 0x0000001fU]
#define newV0 STATE[(state_i+31) & 0x0000001fU]
#define newV1 STATE[state_i ]
#define FACT 2.32830643653869628906e-10
static unsigned int state_i = 0;
static unsigned int STATE[R];
static unsigned int z0, z1, z2;
void InitWELLRNG1024a (unsigned int *init);
double WELLRNG1024a (void);
union num{
uint64_t l;
double d;
};
int main()
{
union num number;
unsigned int *init, *temp;
unsigned int i, x[R], n, p, leng, get = 1;
unsigned int want, aver = 0;
char * c;
init = x;
c = (char *)malloc(40);
printf("0 ~ n 까지의 갯수를 입력하세요.\nn입력 : ");
scanf("%d", &want);
sprintf(c, "%d", want);
leng = strlen(c);
for(i = 0; i < leng; i++)
{
get *= 10;
}
free(c);
temp = (int *)malloc(sizeof(int) * want);
for(i = 0;i < want;i++)
{
temp[i] = 0;
}
printf("몇 번 돌려볼까요? : ");
scanf("%u", &n);
for(i = 0; i < R; i++)
{
x[i] = time(NULL) + clock() + i;
}
InitWELLRNG1024a(init);
printf("%u번 돌립니다...\n", n);
for(i = 0; i < n; i++)
{
p = 0;
if(i % 10000 == 0 && i != 0) printf("%u번 돌림...\n", i);
p = WELLRNG1024a() * get;
p %= want;
temp[p]++;
}
printf("\n\n%d번 돌린 결과 : \n", i);
for(i = 0; i < want; i++)
{
printf("%u의 횟수 : %u\n", i, temp[i]);
}
printf("\n");
for(i = 0; i < want; i++)
{
printf("%d의 평균과의 대략적인 편차 : %d\n", i, (n / want) - temp[i]);
aver += abs((n / want) - temp[i]);
}
printf("\n편차의 평균 : %lg\n", (double)aver / (double)want);
free(temp);
return 0;
}
void InitWELLRNG1024a (unsigned int *init){
int j;
state_i = 0;
for (j = 0; j < R; j++)
STATE[j] = init[j];
}
double WELLRNG1024a (void){
z0 = VRm1;
z1 = Identity(V0) ^ MAT0POS (8, VM1);
z2 = MAT0NEG (-19, VM2) ^ MAT0NEG(-14,VM3);
newV1 = z1 ^ z2;
newV0 = MAT0NEG (-11,z0) ^ MAT0NEG(-7,z1) ^ MAT0NEG(-13,z2) ;
state_i = (state_i + 31) & 0x0000001fU;
return ((double) STATE[state_i] * FACT);
}
<file_sep>#include <stdio.h>
struct date {
int yun; //윤년이면 1, 아니라면 0을 넣어주세요.
int year;
int month;
int day;
};
//윤년을 판별합니다. return이 1이면 윤년, 0이면 윤년이 아닙니다.
int yuncheck(struct date d);
//잘못된 month 입력을 검사합니다. return이 1이면 정상, 0이면 오류입니다.
int monthcheck(struct date d);
//잘못된 day 입력을 검사합니다. 윤년일 경우 yun에 1, 아닐경우 0을 넣어주세요.
//return이 1이면 정상, 0이면 오류입니다.
int daycheck(struct date d);
//date에 after일만큼 더합니다.
struct date dayafter(struct date, int after);
//연,월,일을 입력받고 올바른지 검사합니다. 윤+m+d.c 파일의 yuncheck, monthcheck, daycheck함수와 같이 사용하세요.
struct date dateinput(void);
int main()
{
struct date d1, d2, d3;
d1 = dateinput();
d2 = dayafter(d1, 7);
d3 = dayafter(d1, 21);
printf("기준일로부터\n7일 후(8일째 되는 날)는 : %d.%d.%d\n21일 후(22일째 되는 날)는 : %d.%d.%d\n", d2.year, d2.month, d2.day, d3.year, d3.month, d3.day);
return 0;
}
int yuncheck(struct date d)
{
int tf;
if(d.year % 4 == 0 && d.year % 100 != 0 || d.year % 400 == 0)
{
tf = 1;
}
else
{
tf = 0;
}
return tf;
}
int monthcheck(struct date d)
{
int mtf = 1;
if(d.month < 1 || d.month > 12)
{
mtf = 0;
}
return mtf;
}
int daycheck(struct date d)
{
int dtf = 1;
if(d.day < 1 || (d.yun == 1 && d.month == 2 && d.day > 29) || (d.yun == 0 && d.month == 2 && d.day > 28) || (((d.month % 2 == 1 && d.month < 8) || (d.month % 2 == 0 && d.month >= 8)) && (d.day > 31)) || (((d.month % 2 == 1 && d.month >= 8) || (d.month % 2 == 0 && d.month < 8)) && (d.day > 30)))
{
dtf = 0;
}
return dtf;
}
struct date dayafter(struct date d, int after)
{
d.day += after;
while(d.day > 28)
{
if(d.yun == 1 && d.month == 2 && d.day > 29)
{
if(d.day <= 29) {break;}
d.day -= 29;
d.month++;
}
else if(d.yun == 0 && d.month == 2 && d.day > 28)
{
d.day -= 28;
d.month++;
}
else if(((d.month % 2 == 1 && d.month < 8) || (d.month % 2 == 0 && d.month >= 8)) && (d.day > 31))
{
if(d.day <= 31) {break;}
d.day -= 31;
d.month++;
}
else if(((d.month % 2 == 1 && d.month >= 8) || (d.month % 2 == 0 && d.month < 8)) && (d.day > 30))
{
if(d.day <= 30) {break;}
d.day -= 30;
d.month++;
}
if(d.month > 12)
{
d.year++;
d.month -= 12;
d.yun = yuncheck(d);
}
}
return d;
}
struct date dateinput(void)
{
struct date d;
while(1)
{
d.yun = d.year = d.month = d.day = 0;
printf("기준 날짜를 연.월.일 로 입력하세요. (예시 : 2019.8.25)\n입력 : ");
scanf("%d.%d.%d", &d.year, &d.month, &d.day);
getchar();
if(d.month == 0 || d.day == 0)
{
printf("%d년\n", d.year);
printf("잘못된 입력입니다. (자동으로 재실행이 안되면 엔터를 눌러주세요.)\n\n");
while(getchar() != '\n'){}
continue;
}
d.yun = yuncheck(d);
if(monthcheck(d) == 0)
{
printf("월 입력이 잘못되었습니다.\n\n");
continue;
}
if(daycheck(d) == 0)
{
printf("일 입력이 잘못되었습니다.\n\n");
continue;
}
break;
}
return d;
}
|
2e987148948a7cf2b0b60abf287b7c3f618e89d7
|
[
"C"
] | 6 |
C
|
Arinjbj/ArinLabjbj
|
f95aaa1e52f8388238f217b29400014ce079c7d1
|
b8e71a0942ee4e74539f6d371e8842060f541a4b
|
refs/heads/master
|
<repo_name>ahmedbodi/anycoinj<file_sep>/core/src/test/java/com/google/bitcoin/core/PostgresFullPrunedBlockChainTest.java
package com.google.bitcoin.core;
import com.google.bitcoin.store.BlockStoreException;
import com.google.bitcoin.store.FullPrunedBlockStore;
import com.google.bitcoin.store.PostgresFullPrunedBlockStore;
import org.junit.Ignore;
/**
* A Postgres implementation of the {@link AbstractFullPrunedBlockChainTest}
*/
@Ignore("enable the postgres driver dependency in the maven POM")
public class PostgresFullPrunedBlockChainTest extends AbstractFullPrunedBlockChainTest
{
// Replace these with your postgres location/credentials and remove @Ignore to test
private static final String DB_HOSTNAME = "localhost";
private static final String DB_NAME = CoinDefinition.coinName.toLowerCase() + "j_test";
private static final String DB_USERNAME = CoinDefinition.coinName.toLowerCase() +"j";
private static final String DB_PASSWORD = "<PASSWORD>";
@Override
public FullPrunedBlockStore createStore(NetworkParameters params, int blockCount)
throws BlockStoreException {
return new PostgresFullPrunedBlockStore(params, blockCount, DB_HOSTNAME, DB_NAME, DB_USERNAME, DB_PASSWORD);
}
@Override
public void resetStore(FullPrunedBlockStore store) throws BlockStoreException {
((PostgresFullPrunedBlockStore)store).resetStore();
}
}
|
3f43799196438b2bb5bfe66299e4f4debe8401c8
|
[
"Java"
] | 1 |
Java
|
ahmedbodi/anycoinj
|
765776a352b358c52ebb0740ad1bf9a2830643fe
|
b01b6465561f6f4db8c9c6b233ae3db77dfd1ffa
|
refs/heads/master
|
<file_sep># O Reino de Weblot :crown:
Um conjunto de códigos ilustrativos de APIs do HTML5.
## #comofaz?
Conforme o [enunciado][enunciado] diz, o trabalho é composto por uma (a) apresentação
de seminário, mais a criação e entrega de um (b) projeto de código simples ilustrando
cada API escolhida pelo grupo.
O projeto (b) deve ser entregue como um _Pull Request_ (veja [[1]][using-pull-requests] e
[[2]][creating-pull-requests]) neste repositório. Ou seja, um (01) membro do grupo deve:
1. Fazer um _fork_ deste repositório e dar permissão de escrita (commit/push) para todos os membros do grupo;
- Clicar no botão _fork_ e, depois, clicar no **Settings**
1. Criar um _branch_ com nome 'ano/semestre' (e.g., `2019/02`);
```
git branch 2019/02
```
ou então (eu gosto mais, porque cria o _branch_ e já muda pra ele):
```
git checkout -b 2019/02
```
1. Criar, **dentro da pasta `/apis`**, uma pasta com o nome da sua API de escolha (**nome em minúsculo,
sem acentos, trocando espaços por de hífens e sem o prefixo ou sufixo API** - _e.g._, `/apis/drag-and-drop/`) e colocar
seu código lá. Deve haver uma pasta dessa para cada API que você escolheu
1. Alterar o arquivo `apis.json` (na raiz) colocando as metainformações sobre cada API
que o grupo escolheu (**sem remover a que está lá**). Você deve ver o exemplo
da API de drag'n'drop e fazer da mesma forma:
```json
{
"semestre": "2019/01",
"apis": [
{
"nome": "Drag and Drop API",
"breveDescricao": "Possibilita o arraste de elementos HTML em cima de outros",
"paginaInicial": "apis/drag-and-drop/",
"screenshot": "apis/drag-and-drop/images/screenshot.png",
"desenvolvedores": [
{
"nome": "<NAME>",
"usuarioGithub": "fegemo"
}
],
"suporteDeNavegadores": {
"chrome": true,
"safari": true,
"firefox": true,
"edge": true,
"opera": true
}
},
{
"nome": "MINHA API",
"breveDescricao": "MINHA DESCRIÇÃO... etc."
}
]
}
```
- Não se esqueça de colocar uma vírgula entre o objeto que representa a API
"Drag and Drop" e o objeto que você vai criar para representar a sua(s)
API(s)
Os membros de um mesmo grupo devem trabalhar no _fork_ criado por apenas 1 integrante, ou
seja, aquele que criou o _fork_ deve dar permissão de escrita ao(s) outro(s).
Veja o projeto de exemplo que já está lá, sobre a API de _drag and drop_:
[código fonte][drag-and-drop-code] e o [exemplo publicado][drag-and-drop-live]. Procure
seguir a mesma ideia e formato (bonitão, criativão).
### O que deve conter
Seu código deve conter uma ilustração simples, porém interessante, de uso **de cada API
escolhida**. Procure separar os arquivos do código fonte em pastas
(_e.g._, `styles`, `scripts`, `images` etc.).
No diretório raiz de cada exemplo (_e.g._, `/apis/drag-and-drop/`) deve haver um arquivo
`index.html` com a página inicial (e provavelmente a única), que será a "porta de entrada"
do exemplo.
Além disso, crie um arquivo `README.md` (formato Markdown - veja [[3]][markdown] e
[[4]][markdown-tutorial]) para cada API escolhida contendo pelo menos três seções (a exemplo
[README.md da API de drag and drop][drag-and-drop-readme]):
1. API xyz
- Breve descrição da API (uma linha)
- **Uma _screenshot_ do seu exemplo rodando**
1. Links do Exemplo
- **Link para seus _slides_ do seminário**
- **Link para exemplo vivo** (formato: https://fegemo.github.io/cefet-web-weblot/apis/PASTA_DO_SEU_PROJETO/)
1. Créditos
- **Indicação de quem são os autores**
- Texto dando a devida atribuição aos eventuais recursos de terceiros usados (imagens, música, código fonte etc.)
## Enviando
Como mencionado, o envio será via um _pull request_. Assim que você der o último `git push` para o seu _fork_,
vá para a página do _fork_ no Github e clique em "Pull Request":

Nessa tela, você deve escolher o seu _branch_ (`ano/semestre`) como origem e o _branch_ (`ano/semestre`) do repositório original como destino (e não o `master`).
[enunciado]: https://github.com/fegemo/cefet-web/blob/master/assignments/seminar-html5/README.md#seminário---apis-do-html5
[using-pull-requests]: https://help.github.com/articles/using-pull-requests/
[creating-pull-requests]: https://help.github.com/articles/creating-a-pull-request/
[markdown]: https://daringfireball.net/projects/markdown/
[markdown-tutorial]: https://guides.github.com/features/mastering-markdown/
[drag-and-drop-code]: https://github.com/fegemo/cefet-web-weblot/tree/master/apis/drag-and-drop/
[drag-and-drop-live]: https://fegemo.github.io/cefet-web-weblot/apis/drag-and-drop/
[drag-and-drop-readme]: https://github.com/fegemo/cefet-web-weblot/blob/master/apis/drag-and-drop/README.md
<file_sep>import {SceneCreator} from "./SceneCreator.js";
const NEAR = 0.1;
const FAR = 500;
const INITIAL_ANGLE = 60;
let debugMode = false;
class World {
constructor (){
this.width = window.innerWidth;
this.height = window.innerHeight;
this.aspect = this.width/this.height;
this.container = document.querySelector("#container");
this.container.innerHTML = "";
//set camera (viewAngle, aspectRatio, near, far)//
this.camera = new THREE.PerspectiveCamera(60, this.aspect, NEAR, FAR);
this.scene = new THREE.Scene();
this.renderer = new THREE.WebGLRenderer({antialias: true});
this.renderer.setSize(this.width, this.height);
this.container.appendChild(this.renderer.domElement);
this.controls = new THREE.OrbitControls(this.camera, this.renderer.domElement);
this.controls.enabled = true;
this.controls.enableDamping = true;
this.setScene();
if (typeof VRFrameData === "undefined") {
this.active = false;
console.error("WebVR not supported");
return;
}
this.active = true;
this.firstVRFrame = false;
this.vr = {
display: null,
frameData: new VRFrameData()
};
requestAnimationFrame(() => this.update());
this.getDisplays();
window.addEventListener("vrdisplayactivate", () => this.activateVR());
window.addEventListener("vrdisplaydeactivate", () => this.deactivateVR());
window.addEventListener("resize", () => this.resize());
}
resize(){
this.width = window.innerWidth;
this.height = window.innerHeight;
this.aspect = this.width/this.height;
this.renderer.setSize(this.width, this.height);
if (!this.camera) {
return;
}
this.camera.aspect = this.aspect;
this.camera.updateProjectionMatrix();
}
setScene(){
this.objects = [];
let sceneCreator = new SceneCreator();
let environment = {
camera: this.camera,
scene: this.scene,
objects: this.objects,
}
sceneCreator.createScene(environment);
let light = new THREE.DirectionalLight(0x002288);
light.position.set(0, 45, 0);
this.scene.add(light);
light = new THREE.AmbientLight(0x222222);
this.scene.add(light);
this.camera.position.set(0, 5, 0);
this.controls.update();
}
getDisplays(){
navigator.getVRDisplays().then(displays => {
//Filter down to devices that can present.
displays = displays.filter(display => display.capabilities.canPresent);
//If there are no devices available, quit out.
if (displays.length === 0) {
console.error("No device available is able to present");
return;
}
//Store the first display we find. A more production-ready version should
//allow the user to choose from their available displays.
this.vr.display = displays[0];
this.vr.display.depthNear = NEAR;
this.vr.display.depthFar = FAR;
this.createPresentationButton();
});
}
getPresent(){
this.vr.display.requestPresent([{
source: this.renderer.domElement
}])
.catch(e => {
console.error("Unable to init VR: ${e}");
});
}
createPresentationButton () {
this.vrButton = document.createElement("button");
this.vrButton.classList.add("vr-toggle");
this.vrButton.textContent = "Enable VR";
this.vrButton.addEventListener("click", () => this.toggleVR());
document.body.appendChild(this.vrButton);
}
activateVR () {
if (!this.vr.display) {
return;
}
this.getPresent();
this.vrButton.textContent = "Disable VR";
}
deactivateVR () {
if (!this.vr.display) {
return;
}
if (!this.vr.display.isPresenting) {
return;
}
this.vr.display.exitPresent();
this.vrButton.textContent = "Enable VR";
}
toggleVR () {
if (this.vr.display.isPresenting) {
return this.deactivateVR();
}
return this.activateVR();
}
update(){
this.render();
}
render(){
if (this.active == false || !(this.vr.display && this.vr.display.isPresenting)) {
this.resize(); //Ensure that we switch everything back to auto for non-VR mode//
this.renderer.autoClear = true;
this.scene.matrixAutoUpdate = true;
this.renderer.render(this.scene, this.camera);
requestAnimationFrame(() => this.update());
}
else if (this.firstVRFrame) {
this.firstVRFrame = false;
this.vr.display.requestAnimationFrame(() => this.update());
}
else {
const EYE_WIDTH = this.width * 0.5;
const EYE_HEIGHT = this.height;
//Get all the latest data from the VR headset and dump it into frameData//
this.vr.display.getFrameData(this.vr.frameData);
//Disable autoupdating because these values will be coming from the//
//frameData data directly//
this.scene.matrixAutoUpdate = false;
//Make sure not to clear the renderer automatically, because we will need//
//to render it ourselves twice, once for each eye//
this.renderer.autoClear = false;
//Clear the canvas manually//
this.renderer.clear();
//Left eye//
let leftEyeParameters = {
x: 0,
y: 0,
w: EYE_WIDTH,
h: EYE_HEIGHT
}
this.renderEye(this.vr.frameData.leftViewMatrix, this.vr.frameData.leftProjectionMatrix, leftEyeParameters);
//Ensure that left eye calcs aren't going to interfere with right eye ones//
this.renderer.clearDepth();
//Right eye//
let rightEyeParameters = {
x: EYE_WIDTH,
y: 0,
w: EYE_WIDTH,
h: EYE_HEIGHT
}
this.renderEye(this.vr.frameData.rightViewMatrix, this.vr.frameData.rightProjectionMatrix, rightEyeParameters);
// Use the VR display's in-built rAF (which can be a diff refresh rate to
// the default browser one).
this.vr.display.requestAnimationFrame(() => this.update());
// Call submitFrame to ensure that the device renders the latest image from
// the WebGL context.
this.vr.display.submitFrame();
}
}
renderEye (viewMatrix, projectionMatrix, viewport) {
// Set the left or right eye half//
this.renderer.setViewport(viewport.x, viewport.y, viewport.w, viewport.h);
// Update the scene and camera matrices.
this.camera.projectionMatrix.fromArray(projectionMatrix);
this.scene.matrix.fromArray(viewMatrix);
// Tell the scene to update (otherwise it will ignore the change of matrix).
this.scene.updateMatrixWorld(true);
this.renderer.render(this.scene, this.camera);
}
}
let world = new World();<file_sep>export class ObjectCreator {
constructor(){
this.loader = new THREE.TextureLoader();
this.path = './images/';
this.grassTopTexture = this.loader.load(this.path + 'grass.jpg');
this.grassSideTexture = this.loader.load(this.path + 'grass-side.jpg');
this.grassBottomTexture = this.loader.load(this.path + 'earth.jpg');
this.waterTexture = this.loader.load(this.path + 'water.png');
this.earthTexture = this.loader.load(this.path + 'earth.jpg');
this.cloudTexture = this.loader.load(this.path + 'cloud.png');
this.sandTexture = this.loader.load(this.path + 'sand.jpg');
this.rockTexture = this.loader.load(this.path + 'rock.jpg');
this.treeTrunkTopTexture = this.loader.load(this.path + 'tree-trunk-top.jpg');
this.treeTrunkSideTexture = this.loader.load(this.path + 'tree-trunk-side.jpg');
this.treeTrunkBottomTexture = this.loader.load(this.path + 'tree-trunk-top.jpg');
this.treeLeafTexture = this.loader.load(this.path + 'tree-leaf.png');
}
createGrassCube(){
let materials = [
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.grassSideTexture, side: THREE.DoubleSide}), //right
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.grassSideTexture, side: THREE.DoubleSide}), //left
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.grassTopTexture, side: THREE.DoubleSide}), //top
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.grassBottomTexture, side: THREE.DoubleSide}), //bottom
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.grassSideTexture, side: THREE.DoubleSide}), //front
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.grassSideTexture, side: THREE.DoubleSide}) //side
];
let geometry = new THREE.BoxGeometry(1, 1, 1);
return new THREE.Mesh(geometry, materials);
}
createTransparentTextureCube(texture, scale){
let material = new THREE.MeshBasicMaterial({color: 0xffffff, map: texture, side: THREE.DoubleSide,
transparent: true, opacity: 0.5});
let geometry = new THREE.BoxGeometry(scale.x, scale.y, scale.z);
return new THREE.Mesh(geometry, material);
}
createSimpleTextureCube(texture, scale){
let material = new THREE.MeshBasicMaterial({color: 0xffffff, map: texture, side: THREE.DoubleSide});
let geometry = new THREE.BoxGeometry(scale.x, scale.y, scale.z);
return new THREE.Mesh(geometry, material);
}
createSimpleColorCube(color, scale){
let material = new THREE.MeshBasicMaterial({color: color, side: THREE.DoubleSide});
let geometry = new THREE.BoxGeometry(scale.x, scale.y, scale.z);
return new THREE.Mesh(geometry, material);
}
createWaterCube(){
let material = new THREE.MeshBasicMaterial({color: 0xffffff, map: this.waterTexture, side: THREE.DoubleSide,
transparent: true, opacity: 0.25});
let geometry = new THREE.BoxGeometry(1, 1, 1);
return new THREE.Mesh(geometry, material);
}
createEarthCube(){
return this.createSimpleTextureCube(this.earthTexture, {x: 1, y: 1, z: 1});
}
createCloudCube(){
return this.createTransparentTextureCube(this.cloudTexture, {x: 1, y: 1, z: 1});
}
createSandCube(){
return this.createSimpleTextureCube(this.sandTexture, {x: 1, y: 1, z: 1});
}
createRockCube(){
return this.createSimpleTextureCube(this.rockTexture, {x: 1, y: 1, z: 1});
}
createTreeTrunkCube(){
let materials = [
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.treeTrunkSideTexture, side: THREE.DoubleSide}), //right
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.treeTrunkSideTexture, side: THREE.DoubleSide}), //left
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.treeTrunkTopTexture, side: THREE.DoubleSide}), //top
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.treeTrunkBottomTexture, side: THREE.DoubleSide}), //bottom
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.treeTrunkSideTexture, side: THREE.DoubleSide}), //front
new THREE.MeshBasicMaterial({color: 0xffffff, map: this.treeTrunkSideTexture, side: THREE.DoubleSide}) //side
];
let geometry = new THREE.BoxGeometry(1, 1, 1);
return new THREE.Mesh(geometry, materials);
}
createTreeLeafCube(){
return this.createTransparentTextureCube(this.treeLeafTexture, {x: 1, y: 1, z: 1});
}
}
|
9729ec63a4eb186435fd2cdb6b3f1e05021c06de
|
[
"Markdown",
"JavaScript"
] | 3 |
Markdown
|
silveiravh/cefet-web-weblot
|
964892c13afab7d52924bf2e388cb11bd9790cbf
|
5928003ce01a7e62468223e41fe326363c9fcde8
|
refs/heads/main
|
<repo_name>StefanAmur/php-order-form<file_sep>/README.md
# PHP Order Form
## The mission
You need to make a form for a webshop, and add validation to the form. The customer can order various sandwiches, and then both the restaurant owner and the customer receive an e-mail with the details of the order.
You will add a counter at the bottom of the page that shows the total amount of money that has been spent on this page for this user. This counter should keep going up even when the user closes his browser.
## Learning objectives
Note about the icons/emojis:
✔ - means that I consider the objective reached or the feature requirement met
❌ - means it's still work in progress 🤫
- ✔ Be able to tell the difference between the superglobals `$_GET`, `$_POST`, `$_COOKIE` and `$_SESSION` variable.
- ✔ Be able to write basic validation for PHP.
- ✔ Be able to sent an email with PHP
## Assignment duration & type
We had 3 days to complete it (25/10 - 27/10) and it was a solo exercise.
## Required features
### Step 1: Validation
> ✔ Validate that the email address is a valid email
>
> ✔ Make sure that all fields are required
>
> ✔ Make sure that street number and postal code are only numbers
>
> ✔ After sending the form, if there are errors display them to the user
>
> ✔ If the form is invalid, make sure that the values entered previously are still there so the user doesn't have to enter them again
>
> ✔ If the form is valid, show the user a message
#### How did it go?
Went well, for email validation I used `FILTER_VALIDATE_EMAIL` as it was a tip in the assignment.
For the validity part of the other fields, I used RegEx to validate them.
For the "fields required" I just checked if they were empty.
> 🤔 💡 I could have used only RegEx for both empty field and correct format validation. The way I set up RegEx, it already checks if the string has at least one character. Might look into using only RegEx but still have different error messages depending on what check fails.
If there were any errors due to empty/invalid fields I stored them in an array and display each of them to the user.
Code example for validation:
```
if (empty($_POST["streetnumber"])) {
array_push($error_array, "Street number required");
} else {
$streetnumber = test_input($_POST["streetnumber"]);
if (!preg_match("/^[\d]+$/", $streetnumber)) {
array_push($error_array, "Street number can only contain... numbers");
}
}
```
### Step 2: Make sure the address is saved
> ✔ Save all the address information as long as the user doesn't close the browser.
>
> ✔ Pre fill the address fields with the saved address.
#### How did it go?
I used `$_SESSION` to save all address information, but ofc, only if there were no errors
Code example for pre filling input field:
```
if (!isset($_POST['email'])) {
if (isset($_SESSION['email'])) {
$_POST['email'] = $_SESSION['email'];
}
}
```
### Step 3: ✔ Switch between drinks and food
There are 2 different $product arrays, one with drinks, the other with food. Depending on which link at the top of the page you click, you should be able to order food or drinks (never both). The food items should be the default.
#### How did it go?
In the exercise, the code that we got had 2 arrays with the same name, and each contained either the foods or the drinks. Because the drinks array came after the food one, the drinks were were always displayed on page load.
First choice:
```
if (str_ends_with("$_SERVER[REQUEST_URI]", '?food=0')) {
$products = [
['name' => 'Cola', 'price' => 2],
['name' => 'Fanta', 'price' => 2],
['name' => 'Sprite', 'price' => 2],
['name' => 'Ice-tea', 'price' => 3],
];
}
```
Second try after I was advised to use `$_GET`
```
if (isset($_GET['food'])) {
if ($_GET['food'] == '0')
$products = [
['name' => 'Cola', 'price' => 2],
['name' => 'Fanta', 'price' => 2],
['name' => 'Sprite', 'price' => 2],
['name' => 'Ice-tea', 'price' => 3],
];
}
```
This way the drinks array was displayed only if the user clicked on the drinks link.
### Step 4: ✔ Calculate the delivery time
Calculate the expected delivery time for the product. For normal delivery all orders are fulfilled in 2 hours, for express delivery it is only 45 minutes. Add this expected time to the confirmation message. If you are wondering: they deliver with drones.
#### How did it go?
I checked whether the user ticked the express delivery checkbox and updated the total order price and the estimated delivery time based on that.
```
if (isset($_POST['express_delivery'])) {
$totalOrderPrice = $orderPrice + $expressCost;
$deliveryTime = date("H:i", time() + 2700);
} else {
$totalOrderPrice = $orderPrice;
$deliveryTime = date("H:i", time() + 7200);
}
```
### Step 5: ✔ Total revenue counter
Add a counter at the bottom of the page that shows the total amount of money that has been spent on this page from this browser. Should you use a `COOKIE` or a `SESSION` variable for this?
#### How did it go?
It was obvious that I should use a `cookie` since it will persist even after browser restart, however, initially I was having a hard time making it work but Sicco (my teacher🙏) quickly pointed out what my debugger kept telling me so many times: I was echo-ing something before setting up the cookie. After I fixed that, it worked like magic.
```
if (!isset($_COOKIE['history'])) {
$totalValue = $totalOrderPrice;
setcookie('history', strval($totalOrderPrice), time() + (60 * 60 * 24 * 30), '/');
} else {
$totalValue = $_COOKIE['history'] + $totalOrderPrice;
setcookie('history', strval($totalValue), time() + (60 * 60 * 24 * 30), '/');
}
```
PS: `$totalValue` represents the total amount spent by the user with all his orders, not just the last one.
### Step 6: ✔ Send the email
Use the `mail()` function in PHP to send an email with a summary of the order. The email should contain all information filled in by the user + the total price of all ordered items. Display the expected delivery time. Make sure to not forget the extra cost for express delivery! Sent this email to the user + a predefined email of the restaurant owner.
#### How did it go?
Initially I expected it to be harder than it actually was. The PHP `mail()` function is pretty straightforward and easy to use.
I just needed to set up SMTP and enable access for less secure apps in the new Gmail account created only for this 😂.
The "predefined email" part was basically a requirement to use define in PHP
```
define("HamEmail", "Bcc: <EMAIL>");
```
```
mail($email, 'Your order from "The Personal Ham Processor"', $message, HamEmail);
```
### Nice to have features
- ❌ Change the checkboxes to an input field to accept a quantity. Change the price calculation and email to support buying multiple items
- ❌ Make it possible to buy both drinks and food in one order. Still have the selections toggleable (no javascript!)
- ❌ Change the products to use Objects instead of an array
<img src="./img/arnold.jpg" alt="<NAME> I'll be back" height="300"/>
<file_sep>/index.php
<?php
//this line makes PHP behave in a more strict way
declare(strict_types=1);
//we are going to use session variables so we need to enable sessions
session_start();
// check if cookies are set and set the totalValue depending on that
if (isset($_COOKIE['history'])) {
$totalValue = $_COOKIE['history'];
} else {
$totalValue = 0;
}
function whatIsHappening() {
echo '<h2>$_GET</h2>';
var_dump($_GET);
echo '<h2>$_POST</h2>';
var_dump($_POST);
echo '<h2>$_COOKIE</h2>';
var_dump($_COOKIE);
echo '<h2>$_SESSION</h2>';
var_dump($_SESSION);
}
//your products with their price.
$products = [
['name' => 'Club Ham', 'price' => 3.20],
['name' => 'Club Cheese', 'price' => 3],
['name' => 'Club Cheese & Ham', 'price' => 4],
['name' => 'Club Chicken', 'price' => 4],
['name' => 'Club Salmon', 'price' => 5]
];
if (isset($_GET['food'])) {
if ($_GET['food'] == '0')
$products = [
['name' => 'Cola', 'price' => 2],
['name' => 'Fanta', 'price' => 2],
['name' => 'Sprite', 'price' => 2],
['name' => 'Ice-tea', 'price' => 3],
];
}
//check if a session already exists, and if it does, use the values stored in it
checkSessionAndGetValues();
$expressCost = 5;
$orderPrice = 0;
$totalOrderPrice = 0;
$error_array = [];
$ordered_products = [];
$ordered_products_name = [];
$email = $street = $streetnumber = $city = $zipcode = "";
// define a constant to be used as a header in the mail()
define("HamEmail", "Bcc: stefan.am<EMAIL>");
if ($_SERVER["REQUEST_METHOD"] == "POST") {
if (empty($_POST["email"])) {
array_push($error_array, "Email required");
} else {
$email = test_input($_POST["email"]);
if (!filter_var($email, FILTER_VALIDATE_EMAIL)) {
array_push($error_array, "Invalid email format");
}
}
if (empty($_POST["street"])) {
array_push($error_array, "Street name required");
} else {
$street = test_input($_POST["street"]);
if (!preg_match("/^[a-zA-Z \d]+$/", $street)) {
array_push($error_array, "Street name can only contain letters, numbers or spaces");
}
}
if (empty($_POST["streetnumber"])) {
array_push($error_array, "Street number required");
} else {
$streetnumber = test_input($_POST["streetnumber"]);
if (!preg_match("/^[\d]+$/", $streetnumber)) {
array_push($error_array, "Street number can only contain... numbers");
}
}
if (empty($_POST["city"])) {
array_push($error_array, "City is required");
} else {
$city = test_input($_POST["city"]);
if (!preg_match("/^[a-zA-Z-' \d]+$/", $city)) {
array_push($error_array, "City name can only contain letters, numbers, dashes, single quotes or spaces");
}
}
if (empty($_POST["zipcode"])) {
array_push($error_array, "Zipcode is required");
} else {
$zipcode = test_input($_POST["zipcode"]);
if (!preg_match("/^[\d]+$/", $zipcode)) {
array_push($error_array, "Zipcode can only contain numbers");
}
}
if (!isset($_POST["products"])) {
array_push($error_array, "You forgot to actually order something.");
} else {
foreach ($_POST["products"] as $i => $product) {
array_push($ordered_products, $products[$i]);
array_push($ordered_products_name, $products[$i]['name']);
}
}
// check if there are any errors generated by empty/incorrect fields
// if there are any, display them. if not, store the values in session and place order
if (sizeof($error_array) == 0) {
$_SESSION['email'] = $email;
$_SESSION['street'] = $street;
$_SESSION['streetnumber'] = $streetnumber;
$_SESSION['city'] = $city;
$_SESSION['zipcode'] = $zipcode;
// calculate total sum for the ordered items
$orderPrice = array_sum(array_column($ordered_products, 'price'));
// calculate total amount to be paid and total delivery time depending on express_delivery choice
if (isset($_POST['express_delivery'])) {
$totalOrderPrice = $orderPrice + $expressCost;
$deliveryTime = date("H:i", time() + 2700);
} else {
$totalOrderPrice = $orderPrice;
$deliveryTime = date("H:i", time() + 7200);
}
// check if the cookie is set and update the totalValue and cookie accordingly
if (!isset($_COOKIE['history'])) {
$totalValue = $totalOrderPrice;
setcookie('history', strval($totalOrderPrice), time() + (60 * 60 * 24 * 30), '/');
} else {
$totalValue = $_COOKIE['history'] + $totalOrderPrice;
setcookie('history', strval($totalValue), time() + (60 * 60 * 24 * 30), '/');
}
$currentDate = date("D j M\, H:i");
$message = "Order summary \"The Personal Ham Processor\", $currentDate\nDelivery address is: $street $streetnumber, $zipcode-$city\nTotal amount paid: $totalOrderPrice euros\nEstimated delivery time: $deliveryTime";
mail($email, 'Your order from "The Personal Ham Processor"', $message, HamEmail);
// display a message to the user with the total amount paid and estimated delivery time
echo '<p class="alert alert-success">Order has been placed successfully! Total amount is ' . $totalOrderPrice . ' euros.
Estimated delivery time is ' . $deliveryTime . '.</p>';
} else {
foreach ($error_array as $value) {
echo '<p class="alert alert-danger">' . $value . '</p>';
}
}
}
function test_input($data) {
$data = trim($data);
$data = stripslashes($data);
$data = htmlspecialchars($data);
return $data;
}
// make a function that checks if the inputs are empty,
// and if they are empty check if there are values stored in $_SESSION,
// and if they exist, display them
function checkSessionAndGetValues() {
if (!isset($_POST['email'])) {
if (isset($_SESSION['email'])) {
$_POST['email'] = $_SESSION['email'];
}
}
if (!isset($_POST['street'])) {
if (isset($_SESSION['street'])) {
$_POST['street'] = $_SESSION['street'];
}
}
if (!isset($_POST['streetnumber'])) {
if (isset($_SESSION['streetnumber'])) {
$_POST['streetnumber'] = $_SESSION['streetnumber'];
}
}
if (!isset($_POST['city'])) {
if (isset($_SESSION['city'])) {
$_POST['city'] = $_SESSION['city'];
}
}
if (!isset($_POST['zipcode'])) {
if (isset($_SESSION['zipcode'])) {
$_POST['zipcode'] = $_SESSION['zipcode'];
}
}
}
require 'form-view.php';
|
a2325ad94ff22f9d88e52feafceb0f5b4d656ccb
|
[
"Markdown",
"PHP"
] | 2 |
Markdown
|
StefanAmur/php-order-form
|
90da582730661df751cc96b561d4332616e74816
|
d9ae305949815a0aacea7994ed80b70710acbd96
|
refs/heads/master
|
<repo_name>andersonmarin/cesar<file_sep>/js/default.js
var alfabeto = ['a','b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'];
function Cesar(op) {
var entrada = document.getElementById("entrada");
var saida = document.getElementById("saida");
var lblalf = document.getElementById("lblalf");
lblalf.innerHTML = "";
saida.value = entrada.value.Limpar();
for (var i = 0; i < alfabeto.length; i++)
saida.value = saida.value.split(alfabeto[i]).join("$" + alfabeto[i]);
var n = alfabeto.length; // define n como 26
var c = 0;
var k = document.getElementById("desloc").value * op; //define k como o deslocamento * op (1(cifrar) ou -1(decifrar)
var m = 0;
for(m; m < n; m++) {
c = (m + k) % n; //m é a posição atual no alfabeto; k é o deslocamento; n é o numero de letras no alfebeto
//c=(m + k) mod n
c = c >= 0 ? c : c + n; //se c não for >= 0 soma n
saida.value = saida.value.split("$" + alfabeto[m]).join(alfabeto[c]); //troca as letras
lblalf.innerHTML += alfabeto[c] + " ";
}
}
String.prototype.Limpar = function() {
str = this;
var from = "ãàáäâèéëêìíïîõòóöôùúüûñç";
var to = "aaaaaeeeeiiiiooooouuuunc";
for (var i = 0, l = from.length; i < l; i++) {
str = str.split(from.charAt(i)).join(to.charAt(i));
}
return str;
};<file_sep>/index.php
<html>
<head>
<title>Cifra de César</title>
<meta charset="utf-8">
<link rel="stylesheet" href="css/default.css">
<link href='https://fonts.googleapis.com/css?family=Cuprum:400,700' rel='stylesheet' type='text/css'>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script type="text/javascript" src="js/default.js"></script>
</head>
<body>
<table>
<tr class="menu">
<td colspan="2">
<h1>Cifra de César -
<?php $host= gethostname();
$ip = gethostbyname($host);
print($ip)
?>/cesar
</h1>
<nav>
<ul>
<li class="no-b">K = <input id="desloc" min="0" max="255" value="0" type="number"></li>
<li class="btn" onclick="Cesar(1)">Cifrar</li>
<li class="btn" onclick="Cesar(-1)">Decifrar</li>
</ul>
</nav>
</td>
<!-- <td><label>a b c d e f g h i j k l m n o p q r s t u v w x y z</label><br><label id="lblalf"></label></td> -->
</tr>
<tr>
<td><textarea id="entrada" placeholder="Entrada"></textarea></td>
<td><textarea id="saida" placeholder="Saída"></textarea></td>
</tr>
<tr class="alfa">
<td colspan="2">
<label>a b c d e f g h i j k l m n o p q r s t u v w x y z</label><br><label id="lblalf"></label>
</td>
</tr>
</table>
</body>
</html>
|
be28c20807ce9705f51e152522c3defd64a3e91d
|
[
"JavaScript",
"PHP"
] | 2 |
JavaScript
|
andersonmarin/cesar
|
5c4f1a12351b6fed59ef1a5799bfbc265f9197e5
|
e35b064f7a7279d7865e4c3c52362b3b4cbef3e9
|
refs/heads/master
|
<repo_name>Leward/JavaquariumSE<file_sep>/src/Poissons/Algue.java
package Poissons;
/**
* Created by quentin on 31/01/2017.
*/
public class Algue implements LivingThing {
private boolean dead = false;
@Override
public void die() {
if(dead) {
throw new IllegalStateException("A dead algae cannot die");
}
dead = true;
}
@Override
public boolean isDead() {
return dead;
}
}
<file_sep>/src/Aquarium/Aquarium.java
package Aquarium;
import Poissons.*;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class Aquarium {
private List<Poisson> poissons;
private List<Algue> algues;
private static Aquarium INSTANCE = null;
private Aquarium() {
}
public void initAquarium(int nbPoissons, int nbAlgues) {
FishFactory fishFactory = new FishFactory();
poissons = IntStream.range(0, nbPoissons)
.mapToObj(i -> "Poisson " + i)
.map(fishFactory::randomWithName)
.collect(Collectors.toList());
algues = IntStream.range(0, nbAlgues)
.mapToObj(i -> new Algue())
.collect(Collectors.toList());
}
/*public Aquarium(int nbPoissons, int nbAlgues) {
FishFactory fishFactory = new FishFactory();
poissons = IntStream.range(0, nbPoissons)
.mapToObj(i -> "Poisson " + i)
.map(fishFactory::randomWithName)
.collect(Collectors.toList());
algues = IntStream.range(0, nbAlgues)
.mapToObj(i -> new Algue())
.collect(Collectors.toList());
}*/
public static Aquarium getInstance() {
if (INSTANCE == null) {
INSTANCE = new Aquarium();
}
return INSTANCE;
}
public List<Poisson> getPoissons() {
return poissons;
}
public List<Algue> getAlgues() {
return algues;
}
public void newTour() {
poissons.forEach(this::newTour);
poissons = poissons.stream()
.filter(Poisson::isAlive)
.collect(Collectors.toList());
algues = algues.stream()
.filter(Algue::isAlive)
.collect(Collectors.toList());
}
private void newTour(Poisson poisson) {
if(poisson.isDead()) {
return;
}
if (poisson instanceof Carnivore) {
Optional<Poisson> otherFish = pickOtherRandomFish(poisson);
otherFish.ifPresent(((Carnivore) poisson)::mange);
} else if (poisson instanceof Herbivore) {
Optional<Algue> algue = pickRandomAlgue();
algue.ifPresent(((Herbivore) poisson)::mange);
}
}
private boolean thereAreOtherFishes() {
return poissons.size() > 1;
}
private boolean thereIsNoOtherFish() {
return !thereAreOtherFishes();
}
private Optional<Poisson> pickOtherRandomFish(Poisson fishNotToPick) {
if (thereIsNoOtherFish()) {
return Optional.empty();
}
while (true) {
Poisson otherFish = poissons.get(new Random().nextInt(poissons.size()));
if (otherFish != fishNotToPick) {
return Optional.of(otherFish);
}
}
}
private Optional<Algue> pickRandomAlgue() {
if (algues.size() == 0) {
return Optional.empty();
}
Algue algue = algues.get(new Random().nextInt(algues.size()));
return Optional.of(algue);
}
}
|
bd5fccce5659e62d9af788cf7d552c92fd2b4212
|
[
"Java"
] | 2 |
Java
|
Leward/JavaquariumSE
|
8dc3e60cb626b53e5a781e64ac4bfddfef78ce88
|
5c37f0399fbbbe860fb1a81d149b38ef3e7f6221
|
refs/heads/master
|
<repo_name>xQcLmAo/ThotBot<file_sep>/README.md
# ThotBot
Personal Bot I guess
<file_sep>/DiscordBotCode.cpp
{
"name": "greeter-bot",
"version": "1.0.0",
"description": "My own Discord bot",
"main": "bot.js",
"author": "YOUR-NAME-HERE",
"dependencies": {}
}
var Discord = require('discord.io');
var logger = require('winston');
var auth = require('./auth.json');
// Configure logger settings
logger.remove(logger.transports.Console);
logger.add(logger.transports.Console, {
colorize: true
});
logger.level = 'debug';
// Initialize Discord Bot
var bot = new Discord.Client({
token: auth.token,
autorun: true
});
bot.on('ready', function (evt) {
logger.info('Connected');
logger.info('Logged in as: ');
logger.info(bot.username + ' - (' + bot.id + ')');
});
bot.on('message', function (user, userID, channelID, message, evt) {
// Our bot needs to know if it will execute a command
// It will listen for messages that will start with `!`
if (message.substring(0, 1) == '!') {
var args = message.substring(1).split(' ');
var cmd = args[0];
args = args.splice(1);
switch(cmd) {
// !ping
case 'ping':
bot.sendMessage({
to: channelID,
message: 'Pong!'
});
break;
// Just add any case commands if you want to..
}
}
});
|
2826171963b58ca7b32a57ad15ffe8f7cd3fafa6
|
[
"Markdown",
"C++"
] | 2 |
Markdown
|
xQcLmAo/ThotBot
|
06aac455395f9a18fbce0d02262c4a3a96c1f111
|
03779373b56cf33a824d940d3362543628d7f022
|
refs/heads/master
|
<file_sep>import '../imports/startup/sever';<file_sep>const Sale = new Mongo.Collection('sale');
export default Sale;<file_sep>import '../../api/customers/methods';
import '../../api/customers/sever/publications';
|
9d0f07b5c7b58530cc17126a531aef944e3b5b15
|
[
"JavaScript"
] | 3 |
JavaScript
|
yuniit/rabbit
|
c3bbb6f4c6096ae33374279bdb1154f5d9f040be
|
bcd1958164880f78caf80efbc0683e852725cdaa
|
refs/heads/master
|
<file_sep>#include<iostream>
#include<random>
#include<ctime>
#include "Board.h"
int main()
{
int score = 0;
// Initial game setup
srand(time(0));
GameBoard board;
board.generateBoard();
board.displayBoard();
// User inputs their move
board.getInput();
// Movement function is called inside input function so that parameters can be passed in directly instead of returning multiple values to main()
board.displayBoard();
std::cout << "Current score: " << score << std::endl;
}<file_sep>#include<iostream>
#include<random>
#include<Windows.h>
#include "Board.h"
char GameBoard::generatePiece()
{
// This generates a specific piece to use for the generateBoard function
int piece = rand() % 6;
switch (piece)
{
case Blue:
return 'B';
case Green:
return 'G';
case Purple:
return 'P';
case Red:
return 'R';
case White:
return 'W';
case Yellow:
return 'Y';
}
}
void GameBoard::generateBoard()
{
// This generates the initial game board array at the start of the game
for (int y = 0; y < boardSize; y++)
{
for (int x = 0; x < boardSize; x++)
{
do
{
boardArray[x][y] = generatePiece();
} while ((boardArray[x][y] == boardArray[x - 1][y] && boardArray[x][y] == boardArray[x - 2][y]) || (boardArray[x][y] == boardArray[x][y - 1] && boardArray[x][y] == boardArray[x][y - 2]));
// This loop will ensure that the board contains no 3 string matches when generated
}
}
}
void GameBoard::displayBoard()
{
HANDLE hConsole;
hConsole = GetStdHandle(STD_OUTPUT_HANDLE);
// This displays the x co-ordinates of the game board on the top line
std::cout << " ";
for (int i = 0; i < boardSize; i++)
{
std::cout << axis[i] << " ";
}
std::cout << std::endl;
for (int y = 0; y < boardSize; y++)
{
// This displays the y co-ordinates of the game board on the left side
SetConsoleTextAttribute(hConsole, 7);
std::cout << axis[y] << " ";
// This loop displays the appropriate character in the appropriate colour for the game piece in the array at that location
for (int x = 0; x < boardSize; x++)
{
switch (boardArray[x][y])
{
case 'B':
SetConsoleTextAttribute(hConsole, 1);
std::cout << "B ";
break;
case 'G':
SetConsoleTextAttribute(hConsole, 2);
std::cout << "G ";
break;
case 'P':
SetConsoleTextAttribute(hConsole, 5);
std::cout << "P ";
break;
case 'R':
SetConsoleTextAttribute(hConsole, 4);
std::cout << "R ";
break;
case 'Y':
SetConsoleTextAttribute(hConsole, 6);
std::cout << "Y ";
break;
case 'W':
SetConsoleTextAttribute(hConsole, 15);
std::cout << "W ";
break;
default:
std::cout << "Error";
}
}
SetConsoleTextAttribute(hConsole, 7);
std::cout << std::endl;
}
}
void GameBoard::getInput()
{
int xChoice = 0, yChoice = 0, directionChoice = 0, returnDirection = 0, movedPieceX = 0, movedPieceY = 0;
std::cout << "\nEnter the x co-ordinate of the piece you want to move, then enter the y co-ordinate:\n";
std::cin >> xChoice;
std::cin >> yChoice;
while (xChoice < 0 || xChoice > boardSize || yChoice < 0 || yChoice > boardSize)
{
std::cout << "\nInvalid co-ordinates, please enter again:\n";
std::cin >> xChoice;
std::cin >> yChoice;
}
// Loop for validating the co-ordinate input
std::cout << "\nChoose a direction to move the piece at (" << xChoice << ", " << yChoice << "):\n\n1. Up\n2. Down\n3. Left\n4. Right\n\nPress 0 to cancel and select another piece.\n";
std::cin >> directionChoice;
while (directionChoice < Cancel || directionChoice > Right)
{
std::cout << "Invalid choice, please enter again:\n";
std::cin >> directionChoice;
}
while ((xChoice == 0 && directionChoice == Left) || (xChoice == boardSize && directionChoice == Right) || (yChoice == 0 && directionChoice == Up) || (yChoice == boardSize && directionChoice == Down))
{
std::cout << "\nThis piece cannot move in that direction, please choose a different direction:\n";
}
// Loops for validating the direction input
// This is so that the piece can easily be swapped back if no match is made by the move
movedPieceX = xChoice;
movedPieceY = yChoice;
switch (directionChoice)
{
case Up:
returnDirection = Down;
movedPieceY = yChoice - 1;
break;
case Down:
returnDirection = Up;
movedPieceY = yChoice + 1;
break;
case Left:
returnDirection = Right;
movedPieceX = xChoice - 1;
break;
case Right:
returnDirection = Left;
movedPieceX = xChoice + 1;
break;
default:
std::cout << "There was an error with return direction.\n";
}
std::cout << std::endl;
// Piece movement called directly from this function to avoid breaking input down into three seperate functions and returning the inputs to main()
movePiece(xChoice, yChoice, directionChoice);
checkBoard(movedPieceX, movedPieceY, returnDirection);
}
void GameBoard::movePiece(int xPosition, int yPosition, int direction)
{
// Temporary variable to hold the character being swapped
char swappedPiece = ' ';
switch (direction)
{
case Up:
swappedPiece = boardArray[xPosition][yPosition - 1];
boardArray[xPosition][yPosition - 1] = boardArray[xPosition][yPosition];
boardArray[xPosition][yPosition] = swappedPiece;
break;
case Down:
swappedPiece = boardArray[xPosition][yPosition + 1];
boardArray[xPosition][yPosition + 1] = boardArray[xPosition][yPosition];
boardArray[xPosition][yPosition] = swappedPiece;
break;
case Left:
swappedPiece = boardArray[xPosition - 1][yPosition];
boardArray[xPosition - 1][yPosition] = boardArray[xPosition][yPosition];
boardArray[xPosition][yPosition] = swappedPiece;
break;
case Right:
swappedPiece = boardArray[xPosition + 1][yPosition];
boardArray[xPosition + 1][yPosition] = boardArray[xPosition][yPosition];
boardArray[xPosition][yPosition] = swappedPiece;
break;
case Cancel:
// Runs input function again so the player can enter different co-ordinate values
getInput();
break;
default:
std::cout << "There was an error with piece movement.\n";
}
}
int GameBoard::checkBoard(int movedX, int movedY, int returnDirection)
{
int totalScoreBonus = 0;
for (int y = 0; y < (boardSize - 2); y++)
{
for (int x = 0; x < (boardSize - 2); y++)
{
totalScoreBonus += checkPiece(x, y);
totalScoreBonus += checkPiece(y, x);
}
}
if (totalScoreBonus == 0)
{
movePiece(movedX, movedY, returnDirection);
std::cout << "No match was made by this move, so the piece was returned to its original position.\n\n";
}
return totalScoreBonus;
}
int GameBoard::checkPiece(int position1, int position2)
{
int scoreBonus = 0, cascadeCount = 0;
do
{
if (boardArray[position1][position2] == boardArray[position1 + 1][position2] && boardArray[position1][position2] == boardArray[position1 + 2][position2])
{
//cascadeCount++;
if (boardArray[position1][position2] == boardArray[position1 + 3][position2] && boardArray[position1][position2] == boardArray[position1 + 4][position2])
{
if (boardArray[position1 + 2][position2] == boardArray[position1 + 2][position2 + 1] && boardArray[position1][position2] == boardArray[position1 + 2][position2 + 2])
{
// 5 x 3 T-shape string
scoreBonus += (fiveStringScore * tShapeMultiplier);
std::cout << "+" << (fiveStringScore * tShapeMultiplier) << " points!\n";
for (int i = position1; i < 4; i++)
{
replacePiece(i, position2);
}
for (int i = position2 + 1; i < 2; i++)
{
replacePiece(position1 + 2, i);
}
}
else
{
// 5 string
scoreBonus += fiveStringScore;
std::cout << "+" << fiveStringScore << " points!\n";
for (int i = position1; i < 4; i++)
{
replacePiece(i, position2);
}
}
}
else if (boardArray[position1][position2] == boardArray[position1 + 3][position2])
{
// 4 string
scoreBonus += fourStringScore;
std::cout << "+" << fourStringScore << " points!\n";
for (int i = position1; i < 3; i++)
{
replacePiece(i, position2);
}
}
else
{
if (boardArray[position1 + 1][position2] == boardArray[position1 + 1][position2 + 1] && boardArray[position1][position2] == boardArray[position1 + 1][position2 + 2])
{
// 3 x 3 T-shape string
scoreBonus += (threeStringScore * tShapeMultiplier);
std::cout << "+" << (threeStringScore * tShapeMultiplier) << " points!\n";
for (int i = position1; i < 2; i++)
{
replacePiece(i, position2);
}
for (int i = position2 + 1; i < 2; i++)
{
replacePiece(position1 + 2, i);
}
}
else
{
// 3 string
scoreBonus += threeStringScore;
std::cout << "+" << threeStringScore << " points!\n";
for (int i = position1; i < 2; i++)
{
replacePiece(i, position2);
}
}
}
scoreBonus += (cascadeScore * (cascadeCount ^ cascadeMultiplier));
}
else
{
cascadeCount = 0;
}
} while (cascadeCount > 0);
return scoreBonus;
}
void GameBoard::replacePiece(int xPosition, int yPosition)
{
for (int i = yPosition; i < boardSize; i++)
{
if (i == boardSize)
{
boardArray[xPosition][i] = generatePiece();
}
else
{
boardArray[xPosition][i] = boardArray[xPosition][i + 1];
}
}
}<file_sep>#pragma once
class GameBoard
{
// These values will be used to handle the scoring system
const int threeStringScore = 50, fourStringScore = 100, fiveStringScore = 500, tShapeMultiplier = 3, cascadeScore = 50, cascadeMultiplier = 2;
// These values will be used for the game board
const int boardSize = 8;
int axis[8] = { 0,1,2,3,4,5,6,7 };
char boardArray[8][8];
enum colours
{
Blue,
Green,
Purple,
Red,
White,
Yellow
};
enum directionsMenu
{
Cancel,
Up,
Down,
Left,
Right
};
public:
char generatePiece();
void generateBoard();
void displayBoard();
void getInput();
void movePiece(int xPosition, int yPosition, int direction);
int checkBoard(int movedX, int movedY, int returnDirection);
int checkPiece(int position1, int position2);
void replacePiece(int xPosition, int yPosition);
};
|
f1b9af645a7f2b60f9bd2e29e9cbc767c54c00bd
|
[
"C++"
] | 3 |
C++
|
noodlefox/bejewelled
|
0e9a0096e89a3be39db86e7bdfc6170d00a0ddea
|
3abce3dc409414435cdabbd30285ddcada4d3eb4
|
refs/heads/master
|
<file_sep>DROP SCHEMA IF EXISTS "Spotify" CASCADE;
CREATE SCHEMA "Spotify" AUTHORIZATION postgres;
CREATE TYPE e_GENERE AS ENUM ('Rock', 'Shoegaze', 'Math-Rock', 'Emo', 'Indie', 'Noise-Rock', 'Art-Rock', 'Cold-Wave', 'Dream Pop', 'Alternative-Rock', 'Bedroom Pop');
DROP TABLE IF EXISTS artists;
CREATE TABLE artists (
artistid serial,
artist_name varchar(90),
about varchar(1000),
monthly_listeners int DEFAULT 0,
followers int DEFAULT 0
);
ALTER TABLE artists ALTER COLUMN artistid SET NOT NULL;
ALTER TABLE artists ALTER COLUMN artist_name SET NOT NULL;
ALTER TABLE artists ALTER COLUMN monthly_listeners SET NOT NULL;
ALTER TABLE artists ALTER COLUMN followers SET NOT NULL;
ALTER TABLE artists ADD CONSTRAINT artists_pk PRIMARY KEY (artistid);
ALTER TABLE artists ADD CONSTRAINT name_check CHECK(LENGTH(artist_name) > 1);
DROP TABLE IF EXISTS "plans";
CREATE TABLE "plans" (
planid serial,
plan_name varchar(20),
price NUMERIC(4,2) DEFAULT 0.0,
is_premium bool DEFAULT TRUE,
numb_of_accounts int,
plan_decription TEXT
);
ALTER TABLE "plans" ALTER COLUMN planid SET NOT NULL;
ALTER TABLE "plans" ALTER COLUMN plan_name SET NOT NULL;
ALTER TABLE "plans" ALTER COLUMN price SET NOT NULL;
ALTER TABLE "plans" ALTER COLUMN is_premium SET NOT NULL;
ALTER TABLE "plans" ALTER COLUMN numb_of_accounts SET NOT NULL;
ALTER TABLE "plans" ADD CONSTRAINT plans_pk PRIMARY KEY (planid);
ALTER TABLE "plans" ADD CONSTRAINT name_unique UNIQUE(plan_name);
DROP TABLE IF EXISTS playlists;
CREATE TABLE playlists (
playlistid serial,
create_date date,
update_date timestamp (0) DEFAULT CURRENT_TIMESTAMP,
playlist_name varchar (100),
playlist_description TEXT
);
ALTER TABLE playlists ALTER COLUMN playlistid SET NOT NULL;
ALTER TABLE playlists ALTER COLUMN create_date SET NOT NULL;
ALTER TABLE playlists ALTER COLUMN update_date SET NOT NULL;
ALTER TABLE playlists ALTER COLUMN playlist_name SET NOT NULL;
ALTER TABLE playlists ADD CONSTRAINT playlists_pk PRIMARY KEY (playlistid);
ALTER TABLE playlists ADD CONSTRAINT name_check CHECK(LENGTH(playlist_name) > 2);
DROP TABLE IF EXISTS albums;
CREATE TABLE albums (
albumid serial,
update_date timestamp(0) DEFAULT CURRENT_TIMESTAMP,
album_name varchar(100),
artist serial,
release_date date
);
ALTER TABLE albums ALTER COLUMN albumid SET NOT NULL;
ALTER TABLE albums ALTER COLUMN update_date SET NOT NULL;
ALTER TABLE albums ALTER COLUMN update_date SET NOT NULL;
ALTER TABLE albums ALTER COLUMN album_name SET NOT NULL;
ALTER TABLE albums ALTER COLUMN artist SET NOT NULL;
ALTER TABLE albums ALTER COLUMN release_date SET NOT NULL;
ALTER TABLE albums ADD CONSTRAINT albums_pk PRIMARY KEY (albumid);
ALTER TABLE albums ADD CONSTRAINT albums_fk FOREIGN KEY (artist) REFERENCES "Spotify".artists(artistid);
DROP TABLE IF EXISTS songs;
CREATE TABLE songs (
songid serial,
song_name varchar(100),
duration time,
update_date timestamp(0) DEFAULT CURRENT_TIMESTAMP,
album serial,
listens int DEFAULT 0
);
ALTER TABLE songs ALTER COLUMN songid SET NOT NULL;
ALTER TABLE songs ALTER COLUMN song_name SET NOT NULL;
ALTER TABLE songs ALTER COLUMN duration SET NOT NULL;
ALTER TABLE songs ALTER COLUMN update_date SET NOT NULL;
ALTER TABLE songs ALTER COLUMN album SET NOT NULL;
ALTER TABLE songs ALTER COLUMN listens SET NOT NULL;
ALTER TABLE songs ADD CONSTRAINT songs_pk PRIMARY KEY (songid);
ALTER TABLE songs ADD CONSTRAINT songs_fk FOREIGN KEY (album) REFERENCES "Spotify".albums(albumid);
ALTER TABLE songs ADD CONSTRAINT name_check CHECK(LENGTH(song_name) > 1);
ALTER TABLE songs ADD CONSTRAINT duration_check CHECK(duration > '00:00:05');
DROP TABLE IF EXISTS city;
CREATE TABLE city (
cityid serial PRIMARY KEY,
"name" Varchar(50),
country Varchar(60)
);
ALTER TABLE city ALTER COLUMN cityid SET NOT NULL;
ALTER TABLE city ALTER COLUMN "name" SET NOT NULL;
ALTER TABLE city ALTER COLUMN country SET NOT NULL;
DROP TABLE IF EXISTS users;
CREATE TABLE users (
userid serial,
username varchar(30),
"password" varchar(35),
isadmin bool DEFAULT FALSE,
planid int,
cityid int,
premium_days int DEFAULT 0
);
ALTER TABLE users ALTER COLUMN userid SET NOT NULL;
ALTER TABLE users ALTER COLUMN username SET NOT NULL;
ALTER TABLE users ALTER COLUMN "password" SET NOT NULL;
ALTER TABLE users ALTER COLUMN isadmin SET NOT NULL;
ALTER TABLE users ALTER COLUMN planid SET NOT NULL;
ALTER TABLE users ALTER COLUMN cityid SET NOT NULL;
ALTER TABLE users ADD CONSTRAINT username_check CHECK(LENGTH(username) > 1);
ALTER TABLE users ADD CONSTRAINT password_check CHECK(LENGTH("password") >= 5);
ALTER TABLE users ADD CONSTRAINT users_pk PRIMARY KEY (userid);
ALTER TABLE users ADD CONSTRAINT users_fk FOREIGN KEY (planid) REFERENCES "Spotify"."plans"(planid);
ALTER TABLE users ADD CONSTRAINT users_fk2 FOREIGN KEY (cityid) REFERENCES "Spotify".city(cityid);
DROP TABLE IF EXISTS playlists_has_songs;
CREATE TABLE playlists_has_songs (
playlistid int NOT NULL,
songid int NOT NULL,
CONSTRAINT playlists_has_songs_fk FOREIGN KEY (songid) REFERENCES "Spotify".songs(songid),
CONSTRAINT playlists_has_songs_fk_1 FOREIGN KEY (playlistid) REFERENCES "Spotify".playlists(playlistid)
);
DROP TABLE IF EXISTS user_likes_songs;
CREATE TABLE user_likes_songs (
songid int NOT NULL,
userid int NOT NULL,
CONSTRAINT user_likes_songs_fk FOREIGN KEY (userid) REFERENCES "Spotify".users(userid),
CONSTRAINT user_likes_songs_fk_1 FOREIGN KEY (songid) REFERENCES "Spotify".songs(songid)
);
DROP TABLE IF EXISTS playlists_for_users;
CREATE TABLE playlists_for_users (
userid int NOT NULL,
playlistid int NOT NULL,
CONSTRAINT playlist_for_user_fk FOREIGN KEY (userid) REFERENCES "Spotify".users(userid),
CONSTRAINT playlist_for_user_fk_1 FOREIGN KEY (playlistid) REFERENCES "Spotify".playlists(playlistid)
);
DROP TABLE IF EXISTS user_has_playlist;
CREATE TABLE user_has_playlist (
playlistid int NOT NULL,
userid int NOT NULL,
CONSTRAINT user_has_playlist_fk FOREIGN KEY (userid) REFERENCES "Spotify".users(userid),
CONSTRAINT user_has_playlist_fk_1 FOREIGN KEY (playlistid) REFERENCES "Spotify".playlists(playlistid)
);
DROP TABLE IF EXISTS song_has_geners;
CREATE TABLE song_has_geners(
songid int NOT NULL,
genere e_GENERE NOT NULL,
CONSTRAINT song_has_generes_fk FOREIGN KEY (songid) REFERENCES "Spotify".songs(songid)
);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(1, 'My Bloody Valentine', 'My Bloody Valentine are an Irish-English band formed in ', 1265688, 415288);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(2, 'Tangled Hair', 'Tangled hair is a three-piece band from South Lo', 30101, 12808);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(3, 'LSD and the Search for God', 'Critically acclaimed debut EP from San Francisco''s ps', 161436, 42981);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(4, '<NAME>', NULL, 16402, 2328);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(5, 'My Dead Girlfriend', '死んだ僕の彼女 (shinda boku no kanojo) (My Dead Girlfriend) ', 16333, 7416);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(6, 'Julie', NULL, 53121, 7015);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(7, 'empire! empire! (i was a lonely estate)', 'Empire! Empire! (I Was a Lonely Estate) was an American em', 30665, 32976);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(8, 'DIIV', 'DIIV is an American rock band from Brooklyn, New York City, ', 509361, 279589);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(9, 'Fleeting Joys', '"Swirling guitars... layers of noise... drugged out soft ', 27312, 12922);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(10, 'Whirr', 'Whirr is an American shoegaze band from the San Francisco', 75186, 43388);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(11, 'WalusKraksaKryzys', NULL, 75989, 3984);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(12, 'Animal Ghosts', 'New from multi-instrumentalist Cliff Barnes comes ', 2736, 1137);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(13, 'Slowdive', 'Slowdive are an English rock band that formed in ', 1164498, 420199);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(14, 'COLLAPSE', NULL, 663, 556);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(15, 'Radiohead', 'Radiohead – angielski zespół rockowy z Abingdon w Oxfordshire,', 12587535, 6100573);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(16, 'Yuragi', 'Japanese shoegaze band, Yuragi blends a beauty of silence and dynamic noise sound into their new album, "Still Dreaming, Still Deafening" released from FLAKE SOUNDS in 2018. They’ve played on Fuji Rock Festival 2019 and supported international artist tour like , , , @imeanus and more. They look the worldwide music scene beyond the Japanese market.', 31012, 12404);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(17, '<NAME>', 'party music for philosophers or philosopher-wannabes.', 78373, 14691);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(18, 'Zwidy', NULL, 1565, 1412);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(19, 'Rigby', 'LA based solo project by 21 year old kelly rose golden', 26944, 2228);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(20, 'Flower Face', 'Flower Face is the solo music project of 21 year old Canadian Ruby Mckinnon', 193374, 32403);
INSERT INTO artists (artistid, artist_name, about, monthly_listeners, followers) VALUES(21, 'Daze', 'We are dreamy rockers from Huston', 644, 597);
INSERT INTO city (cityid, "name", country) VALUES(1, 'Columbus City ', 'USA');
INSERT INTO city (cityid, "name", country) VALUES(2, 'Tarnobrzeg', 'Poland');
INSERT INTO city (cityid, "name", country) VALUES(3, 'Gorzyce', 'Poland');
INSERT INTO city (cityid, "name", country) VALUES(4, 'Los Angeles', 'USA');
INSERT INTO city (cityid, "name", country) VALUES(5, 'London', 'England');
INSERT INTO city (cityid, "name", country) VALUES(6, 'Berlin', 'Germany');
INSERT INTO city (cityid, "name", country) VALUES(7, 'Warszawa', 'Poland');
INSERT INTO city (cityid, "name", country) VALUES(8, 'Tokyo', 'Japan');
INSERT INTO city (cityid, "name", country) VALUES(9, 'Prag', 'Czech Republic');
INSERT INTO city (cityid, "name", country) VALUES(10, 'Budapest', 'Hungary');
INSERT INTO "plans" (planid, plan_name, price, is_premium, numb_of_accounts, plan_decription) VALUES(1, 'Individual', 19.99, true, 1, 'bruh znajdz sobie kogos w koncu');
INSERT INTO "plans" (planid, plan_name, price, is_premium, numb_of_accounts, plan_decription) VALUES(2, 'Duo', 23.99, true, 2, 'Plan dla par');
INSERT INTO "plans" (planid, plan_name, price, is_premium, numb_of_accounts, plan_decription) VALUES(3, 'Family', 29.99, true, 6, 'Plan dla rodzin');
INSERT INTO "plans" (planid, plan_name, price, is_premium, numb_of_accounts, plan_decription) VALUES(4, 'Student', 9.99, true, 1, 'Biedak gupi student');
INSERT INTO "plans" (planid, plan_name, price, is_premium, numb_of_accounts, plan_decription) VALUES(5, 'Free', 0.00, false, 1, 'biedby buedaj');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(1, '2020-02-06', '2021-05-18 20:25:53.000', 'shoegaze', 'opis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(2, '2020-05-08', '2021-05-18 20:25:53.000', 'rock', NULL);
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(3, '2020-12-11', '2021-05-18 20:25:53.000', 'dreampop', NULL);
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(4, '2020-11-12', '2021-05-18 20:25:53.000', 'shoegaze/dreampop', 'opis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(5, '2020-07-04', '2021-05-18 20:25:53.000', 'ppp', 'opis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(6, '2020-03-01', '2021-05-18 20:25:53.000', 'huge pp', NULL);
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(7, '2020-01-05', '2021-05-18 20:25:53.000', 'nazwa', 'opisopisopis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(8, '2020-02-08', '2021-05-18 20:25:53.000', 'playlista', 'opisopis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(9, '2020-05-09', '2021-05-18 20:25:53.000', 'to playlista', NULL);
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(10, '2020-01-02', '2021-05-18 20:25:53.000', 'hej', NULL);
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(11, '2021-04-01', '2021-05-18 20:25:53.000', 'dlaczego ', 'opis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(12, '2021-01-04', '2021-05-18 20:25:53.000', 'znow', 'opisopisopisopisopis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(13, '2021-03-08', '2021-05-18 20:25:53.000', 'musze', NULL);
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(14, '2021-01-12', '2021-05-18 20:25:53.000', 'toppp', 'opis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(15, '2021-02-01', '2021-05-18 20:25:53.000', 'wpisywac', 'opisopisopisopisopis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(16, '2019-05-02', '2021-05-19 12:13:34.000', 'siu', 'opisopisopisopisopisopisopisopisopisopis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(17, '2019-12-20', '2021-05-19 12:13:34.000', 'wziu', 'opisopisopisopisopisopisopisopisopisopisopisopisopisopisopis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(18, '2019-06-30', '2021-05-19 12:13:34.000', 'bzium', 'opisopiso');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(19, '2019-02-16', '2021-05-19 12:13:34.000', 'rum', 'opisop');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(20, '2020-05-07', '2021-05-19 12:13:34.000', 'groom', 'oajhdfojhaopd');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(21, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(22, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'killerxmaciek666');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(23, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'Dgaday');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(24, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'Kubejj');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(25, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'Frov');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(26, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'zabol123');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(27, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(28, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(29, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(30, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(31, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(32, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(33, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(34, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(35, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(36, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(37, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(38, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(39, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(40, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(41, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(42, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(43, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', '<NAME>');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(44, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'Moe Lester');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(45, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'Methamorphosis');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(46, '2021-05-16', '2021-05-18 20:25:53.000', 'Spotify Weekly', 'Euphoria');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(47, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', 'Tiny Evil');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(48, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', 'SUS');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(49, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', 'Amogus');
INSERT INTO playlists (playlistid, create_date, update_date, playlist_name, playlist_description) VALUES(50, '2021-05-16', '2021-05-19 12:13:34.000', 'Spotify Weekly', 'rat');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(1, '2021-05-18 20:25:53.000', 'Underdrawing For Three Forms Of Unhappiness At The State Of Existence', 5, '2012-02-03');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(2, '2021-05-18 20:25:53.000', 'Loveless', 1, '1991-11-04');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(3, '2021-05-18 20:25:53.000', 'We Do What We Can', 2, '2018-04-07');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(4, '2021-05-18 20:25:53.000', 'LSD and the Search for God', 3, '2007-02-05');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(5, '2021-05-18 20:25:53.000', 'Małe Pokoje W Dużym Mieście', 4, '2021-04-21');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(6, '2021-05-18 20:25:53.000', 'Deceiver', 8, '2019-04-22');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(7, '2021-05-18 20:25:53.000', 'Flutter', 6, '2020-10-22');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(8, '2021-05-18 20:25:53.000', 'What It Takes To Move Forward', 7, '2009-06-12');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(9, '2021-05-18 20:25:53.000', 'Desopondent Transponder', 9, '2006-02-01');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(10, '2021-05-18 20:25:53.000', 'Distressor', 10, '2012-05-20');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(11, '2021-05-18 20:25:53.000', 'MiłyMłodyCzłowiek', 11, '2019-11-29');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(12, '2021-05-18 20:25:53.000', 'Wail', 12, '2020-04-16');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(13, '2021-05-18 20:25:53.000', 'Souvalki', 13, '1994-07-05');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(14, '2021-05-18 20:25:53.000', 'Delirium Poetry', 14, '2019-10-05');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(15, '2021-05-18 20:25:53.000', 'OK Computer', 15, '1997-03-10');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(16, '2021-05-18 22:39:21.000', 'sweet days and her last kiss', 5, '2008-06-10');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(17, '2021-05-18 22:39:21.000', 'm b v', 1, '2013-12-16');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(18, '2021-05-18 22:39:21.000', 'Feels Like You', 10, '2019-02-19');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(19, '2021-05-18 22:39:21.000', 'In Raindbows', 15, '2007-10-21');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(20, '2021-05-18 22:43:17.000', 'nightlife', 16, '2016-06-29');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(21, '2021-05-19 11:28:29.000', 'capitol', 17, '2021-02-04');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(22, '2021-05-19 11:28:29.000', 'Zwidy', 18, '2017-06-10');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(23, '2021-05-19 11:28:29.000', 'Headache', 19, '2019-07-01');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(24, '2021-05-19 11:28:29.000', 'Baby Teeth', 20, '2018-02-25');
INSERT INTO albums (albumid, update_date, album_name, artist, release_date) VALUES(25, '2021-05-19 11:28:29.000', 'Ritual', 21, '2018-10-27');
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(1, 'Conception For Three Forms Of Unhappiness At The State Of Existence', '00:05:14', '2021-05-18 20:25:53.000', 1, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(2, 'Skyscraper Kills My Ghost In Your Memory', '00:03:25', '2021-05-18 20:25:53.000', 1, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(3, 'Vendemiaire no Atama', '00:03:44', '2021-05-18 20:25:53.000', 1, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(4, 'Nasty Mayor’s Daughter', '00:05:40', '2021-05-18 20:25:53.000', 1, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(5, 'Ouka', '00:03:49', '2021-05-18 20:25:53.000', 1, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(6, 'Aki no Hachiouji', '00:04:07', '2021-05-18 20:25:53.000', 1, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(7, 'only shallow', '00:04:16', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(8, 'loomer', '00:02:38', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(9, 'touched', '00:00:56', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(10, 'to here knows when', '00:05:31', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(11, 'when you sleep', '00:04:11', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(12, 'i only said', '00:05:34', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(13, 'come in alone', '00:03:58', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(14, 'sometimes', '00:05:19', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(15, 'blown a wish', '00:03:36', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(16, 'what you want', '00:05:33', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(17, 'soon', '00:06:59', '2021-05-18 20:25:53.000', 2, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(18, 'Keep Doing What You''re Doing', '00:05:51', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(19, 'Yeah, It Does Look Like A Spider', '00:05:54', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(20, 'Nao Is My Driver', '00:04:11', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(21, 'Werme', '00:04:57', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(22, 'Catalina', '00:04:28', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(23, '<NAME>', '00:04:23', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(24, 'Turned out Nice Again', '00:04:44', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(25, 'Camera 1, Camera 2', '00:03:51', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(26, 'Time Flies', '00:04:05', '2021-05-18 20:25:53.000', 3, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(27, 'This Time', '00:03:18', '2021-05-18 20:25:53.000', 4, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(28, 'I Don''t Care', '00:04:02', '2021-05-18 20:25:53.000', 4, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(29, 'Backwards', '00:04:18', '2021-05-18 20:25:53.000', 4, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(30, 'Starting Over', '00:05:04', '2021-05-18 20:25:53.000', 4, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(31, 'Starshine', '00:05:19', '2021-05-18 20:25:53.000', 4, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(32, 'Miękki Asfalt', '00:01:41', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(33, 'Nie Śpię', '00:03:10', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(34, 'Małe Pokoje W Dużym Mieście', '00:03:19', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(35, 'Orange Juice Jabłkowy (skit)', '00:02:46', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(36, 'Letnie Noce', '00:01:38', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(37, 'Twarz', '00:02:46', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(38, 'Promyk', '00:02:39', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(39, 'Persyflaż', '00:03:10', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(40, 'Czy <NAME>?', '00:03:24', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(41, 'Outro', '00:01:21', '2021-05-18 20:25:53.000', 5, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(42, 'Horsehead', '00:05:08', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(43, 'Like Before You Were Born', '00:03:04', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(44, 'Skin Game', '00:04:25', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(45, 'Between Tides', '00:04:43', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(46, 'Taker', '00:04:28', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(47, 'For The Guilty', '00:03:39', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(48, 'The Spark', '00:03:59', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(49, 'Lorelei', '00:03:58', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(50, 'Blankenship', '00:03:56', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(51, 'Acheron', '00:07:08', '2021-05-18 20:25:53.000', 6, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(52, 'Flutter', '00:04:27', '2021-05-18 20:25:53.000', 7, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(53, 'How to Make Love Stay', '00:04:24', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(54, 'Keep What You Have Built Up Here', '00:05:09', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(55, 'What Safe Means', '00:04:01', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(56, 'It Happened Because You Left', '00:08:09', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(57, 'Rally the Troops! Poke Holes In Their Defenses! Line Our Coffers with Their Coffins!', '00:04:23', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(58, 'It''s a Plague, and You''re Invited', '00:04:11', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(59, 'Everything Is Connected and Everything Matters (A Temporary Solution to a Permanent Problem)', '00:04:01', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(60, 'The Next Step to Regaining Control', '00:06:08', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(61, 'With Your Greatest Fears Realized, You Will Not Be Comforted', '00:03:07', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(62, 'I Am a Snail, and You Are a Pace I Cannot Match', '00:05:14', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(63, 'An Idea Is a Greater Monument Than a Cathedral', '00:06:52', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(64, 'Archival Footage', '00:01:43', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(65, 'It Was Your Heart That Saved You ', '00:02:24', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(66, 'So How Many Points Do You Have ''Till You Gain, You Know, the Ultimate Power?', '00:04:08', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(67, 'Accually, I''m Just Wearing Your Glasses', '00:09:21', '2021-05-18 20:25:53.000', 8, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(68, 'The Brakeup', '00:05:20', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(69, 'Lovely Crawl', '00:03:40', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(70, 'Go and Come Back', '00:04:30', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(71, 'I Want More Life', '00:02:54', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(72, 'Satelite', '00:03:11', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(73, 'While I''m Waiting', '00:03:13', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(74, 'Magnificent Oblivion', '00:05:35', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(75, 'Where Do I End', '00:03:13', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(76, 'Young Girls'' Fangs', '00:01:52', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(77, 'Patron Saint', '00:03:07', '2021-05-18 20:25:53.000', 9, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(78, 'Preface', '00:02:04', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(79, 'Leave', '00:04:13', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(80, 'Blue', '00:03:29', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(81, 'Ghost', '00:04:12', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(82, 'Meaningless', '00:02:25', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(83, 'Child', '00:03:11', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(84, 'Sandy', '00:05:10', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(85, 'Leave Demo', '00:03:45', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(86, 'Meaningless Demo', '00:02:07', '2021-05-18 20:25:53.000', 10, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(87, 'Tlen', '00:03:17', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(88, 'Czeczerecze', '00:02:52', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(89, 'CoToZaStan', '00:02:36', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(90, 'DziwneDźwieki', '00:03:04', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(91, 'MiłyMłodyCzłowiek', '00:02:30', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(92, 'AlboTak', '00:02:40', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(93, 'KilogramDobrychChwil', '00:02:36', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(94, 'NaNoże', '00:03:50', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(95, 'JestemSam', '00:01:24', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(96, 'ChłopSięTopi', '00:03:03', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(97, 'NakarmićZłeWrażenia', '00:05:13', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(98, 'DzieciWeMgle', '00:03:37', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(99, 'ToTu', '00:01:08', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(100, 'Zabawa', '00:03:16', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(101, 'WszystkoŹle', '00:04:28', '2021-05-18 20:25:53.000', 11, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(102, 'Collide', '00:04:37', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(103, 'Syringes', '00:04:22', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(104, 'Aches', '00:07:14', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(105, 'Stomach', '00:05:46', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(106, 'Lost', '00:01:43', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(107, 'Moonbeam', '00:05:12', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(108, 'Eyes', '00:06:32', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(109, 'Drip', '00:05:43', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(110, 'Rehab', '00:05:12', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(111, 'Distance', '00:01:51', '2021-05-18 20:25:53.000', 12, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(112, 'Alison', '00:03:50', '2021-05-18 20:25:53.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(113, 'Machine Gun', '00:04:26', '2021-05-18 20:25:53.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(114, '40 Days', '00:03:14', '2021-05-18 20:25:53.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(115, 'Sing', '00:04:48', '2021-05-18 20:25:53.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(116, 'Here She Comes', '00:02:17', '2021-05-18 20:25:53.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(117, 'Souvlaki Space Station', '00:05:57', '2021-05-18 20:25:53.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(118, 'When The Sun Hits', '00:04:45', '2021-05-18 20:25:56.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(119, 'Altogether', '00:03:41', '2021-05-18 20:25:56.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(120, 'Melon Yellow', '00:03:53', '2021-05-18 20:25:56.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(121, 'Dagger', '00:03:35', '2021-05-18 20:25:56.000', 13, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(122, 'MEADOW', '00:04:55', '2021-05-18 20:25:56.000', 14, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(123, 'PRAY', '00:05:15', '2021-05-18 20:25:56.000', 14, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(124, 'SERENADE', '00:05:02', '2021-05-18 20:25:56.000', 14, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(125, 'KIND', '00:02:17', '2021-05-18 20:25:56.000', 14, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(126, 'TOKYO', '00:05:41', '2021-05-18 20:25:56.000', 14, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(127, 'Airbag', '00:04:47', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(128, 'Paranoid Android', '00:06:27', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(129, 'Subterraean Homesick Alien', '00:04:27', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(130, 'Exit Music (For A Film)', '00:04:27', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(131, 'Let Down', '00:04:59', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(132, 'Karma Police', '00:04:24', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(133, 'Fitter Happier', '00:01:57', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(134, 'Electioneering', '00:03:50', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(135, 'Climbing Up The Walls', '00:04:45', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(136, 'No Suprises', '00:03:49', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(137, 'Lucky', '00:04:18', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(138, 'The Tourist', '00:05:26', '2021-05-18 20:25:56.000', 15, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(139, 'sweet days and her last kiss', '00:04:29', '2021-05-19 11:45:39.000', 16, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(140, 'we eat in subway and pass over his comment in silence', '00:04:29', '2021-05-19 11:45:39.000', 16, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(141, 'nou ni tokeru asobi', '00:04:53', '2021-05-19 11:45:39.000', 16, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(142, 'kinoshita fuyou', '00:02:29', '2021-05-19 11:45:39.000', 16, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(143, 'she found now', '00:05:06', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(144, 'only tomorrow', '00:06:21', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(145, 'who sees you', '00:06:12', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(146, 'is this an yes', '00:05:06', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(147, 'if i am', '00:03:54', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(148, 'new you', '00:04:58', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(149, 'in another way', '00:05:30', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(150, 'nothing is', '00:03:34', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(151, 'wonder 2', '00:05:51', '2021-05-19 11:45:39.000', 17, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(152, 'Mellow', '00:05:13', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(153, 'Wavelength', '00:04:57', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(154, 'Younger Than You', '00:04:29', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(155, 'R<NAME>', '00:05:25', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(156, 'Before Your Head Off', '00:04:07', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(157, 'How Time Stretches', '00:03:36', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(158, 'Rental', '00:04:03', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(159, 'Vividly', '00:03:58', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(160, 'Play the Slow Ones', '00:04:43', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(161, 'Under the Same Name', '00:04:39', '2021-05-19 11:50:59.000', 18, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(162, '15 Step', '00:03:57', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(163, 'Bodysnatchers', '00:04:02', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(164, 'Nude', '00:04:15', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(165, 'Wierd Fishes / Arpeggi', '00:05:18', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(166, 'All I Need', '00:03:48', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(167, 'Faust Arp', '00:02:09', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(168, 'Reckoner', '00:04:50', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(169, 'House Of Cards', '00:04:28', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(170, 'Jigsaw Falling Into Place', '00:04:08', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(171, 'Videotape', '00:04:29', '2021-05-19 11:55:30.000', 19, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(172, 'soon', '00:02:46', '2021-05-19 11:57:34.000', 20, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(173, 'night is young', '00:05:44', '2021-05-19 11:57:34.000', 20, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(174, 'AO.', '00:07:31', '2021-05-19 11:57:34.000', 20, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(175, 'sleeptight', '00:03:39', '2021-05-19 11:57:34.000', 20, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(176, 'capitol', '00:03:51', '2021-05-19 11:59:02.000', 21, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(177, '4:35', '00:04:35', '2021-05-19 12:05:56.000', 22, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(178, 'Polip', '00:03:03', '2021-05-19 12:05:56.000', 22, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(179, 'Zamiokulkas', '00:02:31', '2021-05-19 12:05:56.000', 22, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(180, '<NAME> Z <NAME>iż Ze Strefy Komfortu', '00:04:59', '2021-05-19 12:05:56.000', 22, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(181, 'Headache', '00:05:34', '2021-05-19 12:05:56.000', 23, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(182, 'Baby Teeth', '00:04:41', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(183, 'Honey and Milk', '00:03:43', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(184, '<NAME>', '00:03:17', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(185, 'April to Death', '00:03:52', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(186, 'Angela', '00:04:44', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(187, 'Bedroom Ghost', '00:03:35', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(188, 'Interlude', '00:01:36', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(189, 'Tell Me When It Hurts', '00:04:47', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(190, 'Sleeping Season', '00:03:59', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(191, 'Always You', '00:03:44', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(192, 'Another Life', '00:04:49', '2021-05-19 12:05:56.000', 24, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(193, 'Eulogy', '00:01:12', '2021-05-19 12:09:43.000', 25, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(194, 'Joven', '00:03:16', '2021-05-19 12:09:43.000', 25, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(195, 'Another Day', '00:04:02', '2021-05-19 12:09:43.000', 25, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(196, 'Guilty', '00:03:17', '2021-05-19 12:09:43.000', 25, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(197, 'Temper', '00:03:45', '2021-05-19 12:09:43.000', 25, 0);
INSERT INTO songs (songid, song_name, duration, update_date, album, listens) VALUES(198, 'Sleep', '00:03:59', '2021-05-19 12:09:43.000', 25, 0);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(1, '<NAME>', 'nazNn9L\S.KuU4/,8cp]d;s+', false, 2, 3, 30);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(2, 'killerxmaciek666', '-PHZb}qUnn=s6CPQQ$Dx:NY/', false, 3, 2, 19);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(3, 'Dgaday', '_D2phWb^8jvgra/2cA=B(''WL', false, 1, 1, 5);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(4, 'Kubejj', 'L}aHnQ~,M)$]^Z^C(V!n}3ug', false, 3, 4, 9);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid) VALUES(5, 'Frov', 'ZtC\_v$a''qw&t>k5eq<ZHc9(', false, 5, 5);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(6, 'zabol123', ')SC;!:e6-Km{#./`{(!2sHZD', false, 1, 5, 30);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid) VALUES(7, '<NAME>', '6kmwB/?&VHPPr?''R3.pN\w7J', false, 5, 6);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(8, '<NAME>', 'v!Q@''HzYP*LJ"84b39PM:v$j', false, 3, 7, 30);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(9, '<NAME>', 'UL][*+_,8f5hEFr--ZM;k9`$', false, 1, 8, 10);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(10, '<NAME>', 'k/$"]"T?;F=u9j2**e,n&}$}', false, 2, 8, 30);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(11, '<NAME>', 'yS''}pC?y!37tWGF{J,mg_kRR', false, 3, 9, 10);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(12, '<NAME>', 'g"~~c%zX`ddW7#DxUawNTFvr', false, 1, 9, 21);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(13, '<NAME>', 'JGHkYH3z7pn&_99\>+YZ$*w}', false, 3, 8, 2);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(14, '<NAME>', 'PE(_h-AB]/W4._gAm3{''\Fwp', false, 4, 7, 22);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(15, '<NAME>', 'A]yT}f,6(pz4:Bb5>CK+5m%b', false, 4, 6, 16);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(16, '<NAME>', 'Mde#2@6x\$H;k}-a=?*(SC?H', false, 3, 5, 3);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(17, '<NAME>', '-<v_nH=S-a?euL97sCU7+%Yg', false, 2, 4, 10);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(18, '<NAME>', 'AZmKbL/6L\=3Q-!Q#zS;?6m"', false, 1, 3, 10);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(19, '<NAME>', 'FubtceY%5C`ejJ!Yg:~u*ud!', false, 3, 4, 24);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(20, '<NAME>', '8KLkxCCm@Bc9[h6,''~L/&VG6', false, 2, 2, 14);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(21, '<NAME>', 'eGzvB;5GQR7M8M/[ZxMwJ?b/', false, 3, 2, 10);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(22, '<NAME>', '+}m%fSg)@hSrR5.HJ8''_:dAG', false, 4, 1, 16);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(23, '<NAME>', 'g@MLQvMvfcD"{)^DW/s-^)39', false, 3, 1, 10);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(24, 'Moe Lester', 'xJj=''D9?4@(y3NEed-94dc5c', false, 3, 2, 30);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid) VALUES(25, 'Methamorphosis', '6de`^%D{ux&;\Y?RUbEns8FA', false, 5, 2);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid) VALUES(26, 'Euphoria', 'c9SD<6z\$_S5HaBf@w-2f`3\', false, 5, 6);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(27, 'Tiny Evil', '!L(^UkS7E?<PASSWORD>Z@F`Kh''Hf', false, 3, 8, 7);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(28, 'SUS', '33~%UFH!Yf3[?9>wu["G:Hv!', false, 3, 7, 9);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(29, 'Amogus', 'nGW*4$''{GaeZS#>=)2bFg\4n', false, 1, 6, 27);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(30, 'rat', '<PASSWORD>', false, 1, 5, 4);
INSERT INTO users (userid, username, "password", isadmin, planid, cityid, premium_days) VALUES(31, 'Spotify', 'xxxxxx', true, 1, 4, 7);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 1);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 2);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 3);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 4);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 5);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 6);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 7);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 8);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 9);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 10);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 11);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 12);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 13);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 14);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 15);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 16);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 17);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 81);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 52);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(1, 75);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 138);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 137);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 136);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 135);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 134);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 133);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 132);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 131);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 130);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 18);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 19);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 20);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 21);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 22);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 23);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 24);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 25);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 26);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 27);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(2, 28);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 29);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 30);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 31);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 32);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 33);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 34);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 35);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 36);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 37);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 38);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 39);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 40);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 129);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 128);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 127);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 126);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 125);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 124);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 123);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(3, 122);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 121);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 120);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 119);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 118);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 117);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 116);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 115);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 114);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 113);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 112);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 111);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 110);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 41);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 42);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 43);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 44);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 45);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 46);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 47);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(4, 48);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 49);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 50);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 51);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 52);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 53);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 54);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 55);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 56);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 57);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 58);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 59);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 60);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 61);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 62);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 63);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 64);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 65);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 66);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 67);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(5, 68);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 69);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 70);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 109);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 108);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 107);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 106);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 105);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 104);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 103);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 102);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 101);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 100);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 99);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 98);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 97);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 96);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 95);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 94);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 93);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(6, 92);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 91);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 90);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 71);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 72);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 73);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 74);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 75);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 76);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 77);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 78);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 79);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 80);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 81);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 82);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 83);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 85);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 86);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 87);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 88);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(7, 89);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 90);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 13);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 3);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 1);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 2);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 4);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 35);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 6);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 123);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 4);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 57);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 7);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 5);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 22);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 86);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 8);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 6);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 79);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 9);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(8, 42);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 7);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 80);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 8);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 122);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 70);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 7);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 62);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 60);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 6);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 59);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 45);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 9);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 4);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 48);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 7);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 4);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 23);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 8);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 73);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(9, 7);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 3);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 26);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 6);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 2);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 54);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 15);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 115);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 113);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 76);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 125);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 6);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 126);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 135);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 12);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 5);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 58);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 57);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 69);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 67);
INSERT INTO playlists_has_songs (playlistid, songid) VALUES(10, 87);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(1, 1);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(2, 2);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(3, 3);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(4, 4);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(5, 5);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(6, 6);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(7, 1);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(8, 1);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(9, 23);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(10, 2);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(11, 3);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(12, 3);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(13, 11);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(14, 29);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(15, 28);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(16, 21);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(17, 14);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(18, 16);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(19, 18);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(20, 25);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(21, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(22, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(23, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(24, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(25, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(26, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(27, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(28, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(29, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(30, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(31, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(32, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(33, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(34, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(35, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(36, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(37, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(38, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(39, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(40, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(41, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(42, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(43, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(44, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(45, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(46, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(47, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(48, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(49, 31);
INSERT INTO user_has_playlist (playlistid, userid) VALUES(50, 31);
INSERT INTO user_likes_songs (songid, userid) VALUES(1, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(1, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(1, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(2, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(2, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(3, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(4, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(4, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(4, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(4, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(5, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(5, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(5, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(5, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(6, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(6, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(7, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(7, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(7, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(7, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(7, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(8, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(8, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(8, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(8, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(8, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(8, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(9, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(9, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(9, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(9, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(9, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(10, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(10, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(10, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(10, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(10, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(10, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(10, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(11, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(11, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(11, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(11, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(11, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(11, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(11, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(12, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(12, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(12, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(12, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(13, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(13, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(13, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(13, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(13, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(14, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(14, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(14, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(14, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(14, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(15, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(15, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(15, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(15, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(15, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(15, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(16, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(16, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(16, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(16, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(16, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(17, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(17, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(17, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(17, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(17, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(17, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(17, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(18, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(19, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(20, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(21, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(21, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(21, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(21, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(21, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(21, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(22, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(23, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(23, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(23, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(23, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(23, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(24, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(24, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(24, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(24, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(24, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(24, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(25, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(26, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(27, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(28, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(29, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(30, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(31, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(32, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(33, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(34, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(35, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(36, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(36, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(36, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(36, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(36, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(37, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(38, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(39, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(40, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(41, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(41, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(41, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(41, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(41, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(41, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(41, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(42, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(42, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(42, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(42, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(42, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(43, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(43, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(44, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(44, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(44, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(44, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(44, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(44, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(45, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(45, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(45, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(45, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(45, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(45, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(46, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(46, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(46, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(46, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(46, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(46, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(47, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(47, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(47, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(47, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(47, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(47, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(48, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(49, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(50, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(51, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(52, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(53, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(54, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(54, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(54, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(54, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(54, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(54, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(54, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(55, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(56, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(57, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(58, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(59, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(60, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(60, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(60, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(60, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(60, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(60, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(60, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(61, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(61, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(61, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(61, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(61, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(61, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(61, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(62, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(62, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(62, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(62, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(62, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(62, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(63, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(64, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(65, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(65, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(65, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(65, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(65, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(65, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(65, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(66, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(67, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(68, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(69, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(70, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(71, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(72, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(73, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(74, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(75, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(76, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(77, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(78, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(79, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(80, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(81, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(82, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(83, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(84, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(85, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(85, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(85, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(85, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(85, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(85, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(85, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(86, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(87, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(87, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(87, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(87, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(87, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(87, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(87, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(88, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(88, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(88, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(88, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(88, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(88, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(88, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(89, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(90, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(91, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(92, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(93, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(94, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(95, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(96, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(96, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(96, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(96, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(96, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(96, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(96, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(97, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(98, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(98, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(99, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(100, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(101, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(102, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(102, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(102, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(103, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(103, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(103, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(103, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(104, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(105, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(105, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(105, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(105, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(105, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(106, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(106, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(106, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(106, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(106, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(106, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(106, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(107, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(107, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(107, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(107, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(107, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(107, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(108, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(108, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(108, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(108, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(108, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(109, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(109, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(109, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(109, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(109, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(109, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(110, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(110, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(110, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(110, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(110, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(110, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(111, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(111, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(111, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(111, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(112, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(112, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(112, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(112, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(112, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(113, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(113, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(113, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(113, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(113, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(113, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(114, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(114, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(114, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(114, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(114, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(115, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(115, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(115, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(115, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(116, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(116, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(116, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(116, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(116, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(117, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(117, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(117, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(117, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(117, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(118, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(118, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(118, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(118, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(118, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(118, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(119, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(119, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(119, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(119, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(120, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(121, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(122, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(123, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(123, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(123, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(123, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(123, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(124, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(124, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(124, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(124, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(124, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(124, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(125, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(125, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(125, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(125, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(125, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(125, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(125, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(126, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(126, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(126, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(126, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(126, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(126, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(127, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(128, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(129, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(130, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(131, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(131, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(131, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(131, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(131, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(131, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(131, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(132, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(133, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(133, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(133, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(133, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(133, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(134, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(135, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(135, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(135, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(135, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(135, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(135, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(135, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(136, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(136, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(136, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(136, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(136, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(136, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(136, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(137, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(137, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(137, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(137, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(138, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(138, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(3, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(3, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(3, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(139, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(139, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(139, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(140, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(140, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(140, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(140, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(140, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(140, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(141, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(141, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(141, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(141, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(141, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(141, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(142, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(142, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(142, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(142, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(142, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(143, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(143, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(143, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(143, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(143, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(143, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(143, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(144, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(144, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(144, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(144, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(144, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(144, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(144, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(145, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(145, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(145, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(146, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(146, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(146, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(146, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(146, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(146, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(146, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(147, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(148, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(148, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(148, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(148, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(148, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(148, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(148, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(149, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(150, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(150, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(150, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(150, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(150, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(150, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(151, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(151, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(151, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(151, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(152, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(152, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(152, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(152, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(152, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(152, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(152, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(153, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(153, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(153, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(153, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(153, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(153, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(153, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(154, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(155, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(156, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(156, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(156, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(156, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(156, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(157, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(157, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(157, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(157, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(157, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(157, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(157, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(158, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(159, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(159, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(159, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(159, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(160, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(160, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(160, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(160, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(160, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(161, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(161, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(161, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(161, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(161, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(161, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(161, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(162, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(162, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(162, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(163, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(163, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(163, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(163, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(163, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(164, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(164, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(164, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(164, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(164, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(164, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(165, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(165, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(165, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(165, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(165, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(165, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(166, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(166, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(166, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(166, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(166, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(166, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(166, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(167, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(167, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(167, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(167, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(167, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(168, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(168, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(168, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(168, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(168, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(168, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(168, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(169, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(169, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(169, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(169, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(169, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(169, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(169, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(170, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(170, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(170, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(170, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(171, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(171, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(171, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(171, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(171, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(171, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(172, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(172, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(172, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(172, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(172, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(172, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(172, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(173, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(173, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(173, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(173, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(173, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(173, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(173, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(174, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(175, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(175, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(175, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(175, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(175, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(175, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(175, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(176, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(176, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(176, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(176, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(176, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(176, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(176, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(177, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(177, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(177, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(177, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(177, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(177, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(177, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(178, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(179, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(180, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(180, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(180, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(181, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(181, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(181, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(181, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(181, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(181, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(181, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(182, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(182, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(182, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(182, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(182, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(182, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(182, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(183, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(184, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(184, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(184, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(184, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(184, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(184, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(185, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(185, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(185, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(185, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(185, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(185, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(186, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(186, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(186, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(186, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(186, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(186, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(186, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(187, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(188, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(189, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(189, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(189, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(190, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(190, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(190, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(190, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(190, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(191, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(191, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(191, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(191, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(191, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(191, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(192, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(192, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(192, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(192, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(192, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(192, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(193, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(193, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(193, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(193, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(193, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(193, 21);
INSERT INTO user_likes_songs (songid, userid) VALUES(194, 22);
INSERT INTO user_likes_songs (songid, userid) VALUES(194, 23);
INSERT INTO user_likes_songs (songid, userid) VALUES(194, 24);
INSERT INTO user_likes_songs (songid, userid) VALUES(194, 25);
INSERT INTO user_likes_songs (songid, userid) VALUES(194, 26);
INSERT INTO user_likes_songs (songid, userid) VALUES(194, 27);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 28);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 29);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 30);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 1);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 2);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 3);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 4);
INSERT INTO user_likes_songs (songid, userid) VALUES(195, 5);
INSERT INTO user_likes_songs (songid, userid) VALUES(196, 6);
INSERT INTO user_likes_songs (songid, userid) VALUES(196, 7);
INSERT INTO user_likes_songs (songid, userid) VALUES(196, 8);
INSERT INTO user_likes_songs (songid, userid) VALUES(196, 9);
INSERT INTO user_likes_songs (songid, userid) VALUES(196, 10);
INSERT INTO user_likes_songs (songid, userid) VALUES(196, 11);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 12);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 13);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 14);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 15);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 16);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 17);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 18);
INSERT INTO user_likes_songs (songid, userid) VALUES(197, 19);
INSERT INTO user_likes_songs (songid, userid) VALUES(198, 20);
INSERT INTO user_likes_songs (songid, userid) VALUES(198, 21);
INSERT INTO song_has_geners (songid, genere) VALUES(1, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(1, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(1, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(2, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(2, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(2, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(3, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(3, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(3, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(4, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(4, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(4, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(5, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(5, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(5, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(6, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(6, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(6, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(7, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(8, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(9, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(10, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(11, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(12, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(13, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(14, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(15, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(16, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(17, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(18, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(19, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(20, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(21, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(22, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(23, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(24, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(25, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(26, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(18, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(19, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(20, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(21, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(22, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(23, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(24, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(25, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(26, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(18, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(19, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(20, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(21, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(22, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(23, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(24, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(26, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(27, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(28, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(29, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(30, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(31, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(28, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(29, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(30, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(31, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(28, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(29, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(30, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(31, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(28, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(29, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(30, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(31, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(32, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(33, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(34, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(36, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(37, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(38, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(39, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(40, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(41, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(33, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(34, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(36, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(37, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(38, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(39, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(40, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(41, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(38, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(42, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(43, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(44, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(45, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(46, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(47, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(48, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(49, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(50, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(51, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(42, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(43, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(44, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(45, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(46, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(47, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(48, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(49, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(50, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(51, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(42, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(43, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(44, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(45, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(46, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(47, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(48, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(49, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(50, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(51, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(52, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(52, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(52, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(53, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(54, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(55, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(56, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(57, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(58, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(59, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(60, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(61, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(62, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(63, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(64, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(65, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(66, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(67, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(53, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(54, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(55, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(56, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(57, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(58, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(59, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(60, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(61, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(62, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(63, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(64, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(65, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(66, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(67, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(53, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(54, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(55, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(56, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(57, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(58, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(59, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(60, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(61, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(62, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(63, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(64, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(65, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(66, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(67, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(68, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(69, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(70, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(71, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(72, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(73, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(74, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(75, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(76, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(77, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(68, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(69, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(70, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(71, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(72, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(73, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(74, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(75, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(76, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(77, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(78, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(79, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(80, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(81, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(82, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(83, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(84, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(85, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(86, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(78, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(79, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(80, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(81, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(82, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(83, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(84, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(85, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(86, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(78, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(79, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(80, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(81, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(82, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(83, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(84, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(85, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(86, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(87, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(88, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(89, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(90, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(91, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(92, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(93, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(94, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(95, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(96, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(97, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(98, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(99, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(100, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(101, 'Cold-Wave'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(87, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(88, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(89, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(90, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(91, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(92, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(93, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(94, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(95, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(96, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(97, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(98, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(99, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(100, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(101, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(87, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(88, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(89, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(90, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(91, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(92, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(93, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(94, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(95, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(96, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(97, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(98, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(99, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(100, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(101, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(102, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(103, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(104, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(105, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(106, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(107, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(108, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(109, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(110, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(111, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(102, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(103, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(104, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(105, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(106, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(107, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(108, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(109, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(110, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(111, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(112, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(113, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(114, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(115, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(116, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(117, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(118, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(119, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(120, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(121, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(112, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(113, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(114, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(115, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(116, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(117, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(118, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(119, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(120, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(121, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(122, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(123, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(124, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(125, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(126, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(122, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(123, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(124, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(125, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(126, 'Noise-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(127, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(128, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(129, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(130, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(131, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(132, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(133, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(134, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(135, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(136, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(137, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(138, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(127, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(128, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(129, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(130, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(131, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(132, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(133, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(134, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(135, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(136, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(137, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(138, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(139, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(140, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(141, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(142, 'Dream Pop'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(139, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(140, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(141, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(142, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(139, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(140, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(141, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(142, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(143, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(144, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(145, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(146, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(147, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(148, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(149, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(150, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(151, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(143, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(144, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(145, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(146, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(147, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(148, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(149, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(150, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(151, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(152, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(153, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(154, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(155, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(156, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(157, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(158, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(159, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(160, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(161, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(152, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(153, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(154, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(155, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(156, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(157, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(158, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(159, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(160, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(161, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(162, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(163, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(164, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(165, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(166, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(167, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(168, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(169, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(170, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(171, 'Art-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(162, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(163, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(164, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(165, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(166, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(167, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(168, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(169, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(170, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(171, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(172, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(173, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(174, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(175, 'Shoegaze'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(172, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(173, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(174, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(175, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(176, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(177, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(178, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(179, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(180, 'Emo'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(177, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(178, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(179, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(180, 'Math-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(181, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(182, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(183, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(184, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(185, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(186, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(187, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(188, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(189, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(190, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(191, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(192, 'Indie'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(193, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(194, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(195, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(196, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(197, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(198, 'Alternative-Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(193, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(194, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(195, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(196, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(197, 'Rock'::e_genere::e_genere);
INSERT INTO song_has_geners (songid, genere) VALUES(198, 'Rock'::e_genere::e_genere);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(1, 21);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(2, 22);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(3, 23);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(4, 24);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(5, 25);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(6, 26);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(7, 27);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(8, 28);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(9, 29);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(10, 30);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(11, 31);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(12, 32);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(13, 33);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(14, 34);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(15, 35);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(16, 36);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(17, 37);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(18, 38);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(19, 39);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(20, 40);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(21, 41);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(22, 42);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(23, 43);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(24, 44);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(25, 45);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(26, 46);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(27, 47);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(28, 48);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(29, 49);
INSERT INTO playlists_for_users (userid, playlistid) VALUES(30, 50);
|
c7a2674e3e426d4525bcb80626d9a58031823f27
|
[
"SQL"
] | 1 |
SQL
|
DaroTL/baza-danych-spotify
|
403e33c1547f62b78c4ca6210eca768996b3ed9e
|
dc8b92b98a3dee8861fe25d2e4c18cd37ffbb0e9
|
refs/heads/master
|
<file_sep># TestsSpringBatch
Esse projeto é descrito [nesse artigo do Medium](https://medium.com/@giuliana-bezerra/testes-de-integracao-com-spring-batch-2e019787d081).
<file_sep>CREATE TABLE `customer` (
`name` varchar(255) NOT NULL,
`age` integer,
`city` varchar(255) DEFAULT '',
`state` varchar(255) DEFAULT '',
`address` varchar(255) DEFAULT '',
`cell_phone` varchar(255) DEFAULT '',
`email` varchar(255) DEFAULT '',
`work_phone` varchar(255) DEFAULT '',
PRIMARY KEY (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
CREATE TABLE `account` (
`id` varchar(255) NOT NULL,
`customer` varchar(255) DEFAULT '',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;<file_sep>package com.example.transactions;
import java.util.Collections;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.batch.item.validator.ValidationException;
import org.springframework.batch.item.validator.Validator;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.stereotype.Component;
@Component
public class CustomerValidator implements Validator {
private final NamedParameterJdbcTemplate jdbcTemplate;
private static final String FIND_USUARIO = "SELECT COUNT(*) FROM customer WHERE name = :name";
public CustomerValidator(@Qualifier("appDatasource") DataSource dataSource) {
this.jdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
}
@Override
public void validate(Object obj) throws ValidationException {
if (obj instanceof Customer) {
Customer customer = (Customer) obj;
Map<String, String> parameterMap = Collections.singletonMap("name", customer.getName());
Long count = jdbcTemplate.queryForObject(FIND_USUARIO, parameterMap, Long.class);
if (count > 0) {
throw new ValidationException(String.format("Customer %s já existe!", customer.getName()));
}
}
}
}
<file_sep>package com.example.transactions;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.NotNull;
public class Customer {
@NotNull
private String name;
private Integer age;
private String state;
private String city;
private String address;
private String cellPhone;
private String email;
private String workPhone;
private Account account;
private List<Account> accounts = new ArrayList<>();
@Override
public String toString() {
String customer = "Customer{name='" + name + ", age='" + age + ", state=" + state + ", city=" + city
+ ", address=" + address + ", cellPhone=" + cellPhone + ", email=" + email + ", workPhone=" + workPhone
+ "'";
String accounts = ", accounts=[";
for (Account account : this.accounts)
accounts += account + ", ";
accounts = accounts.substring(0, accounts.length() - 2) + "]}";
return customer + accounts;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getCellPhone() {
return cellPhone;
}
public void setCellPhone(String cellPhone) {
this.cellPhone = cellPhone;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getWorkPhone() {
return workPhone;
}
public void setWorkPhone(String workPhone) {
this.workPhone = workPhone;
}
public Account getAccount() {
return account;
}
public void setAccount(Account account) {
this.account = account;
}
public List<Account> getAccounts() {
return accounts;
}
public void setAccounts(List<Account> accounts) {
this.accounts = accounts;
}
}
<file_sep>package com.example.transactions;
import javax.validation.constraints.NotNull;
public class Account {
@NotNull
private String id;
private String customer;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getCustomer() {
return customer;
}
public void setCustomer(String customer) {
this.customer = customer;
}
}
<file_sep>package com.example.transactions.steps;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.test.JobLauncherTestUtils;
import org.springframework.batch.test.context.SpringBatchTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.data.jdbc.AutoConfigureDataJdbc;
import org.springframework.context.annotation.PropertySource;
import org.springframework.jdbc.core.JdbcOperations;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import com.example.transactions.BatchConfig;
import com.example.transactions.CustomerValidator;
import com.example.transactions.DatasourceConfig;
//This enables all the Spring goodness in JUnit 5.
@ExtendWith(SpringExtension.class)
//Coloca as classes que criam os beans necessários para os testes no contexto de execução do Spring.
@ContextConfiguration(classes = { BatchConfig.class, DatasourceConfig.class, CustomerValidator.class })
//Informa o arquivo de propriedades para o teste
@PropertySource("classpath:application.properties")
//Configura automaticamente os bancos descritos no application properties
@AutoConfigureDataJdbc
//Utiliza o profile de teste para não externalizar as propriedades
@ActiveProfiles("test")
//Provides utilities for testing Spring Batch jobs. The specific one
//we care about in this example is the JobLauncherTestUtils.
@SpringBatchTest
public class StepImportCustomerIT {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
@Autowired
private DataSource dataSource;
private JdbcOperations jdbcTemplate;
@BeforeEach
public void setUp() {
this.jdbcTemplate = new JdbcTemplate(this.dataSource);
}
/**
* Testa o step e verifica o resultado esperado, por exemplo, no banco de dados.
*/
@Test
public void test() {
JobParameters jobParameters = new JobParametersBuilder()
.addString("customersFile", "classpath:customersFile.csv").toJobParameters();
JobExecution jobExecution = this.jobLauncherTestUtils.launchStep("stepImportCustomers", jobParameters);
assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());
List<Map<String, String>> customers = this.jdbcTemplate.query("select * from customer order by name",
(rs, rowNum) -> {
Map<String, String> item = new HashMap<>();
item.put("name", rs.getString("name"));
return item;
});
Map<String, String> customer1 = customers.get(0);
Map<String, String> customer2 = customers.get(1);
Map<String, String> customer3 = customers.get(2);
Map<String, String> customer4 = customers.get(3);
assertEquals("Cliente Teste 1", customer1.get("name"));
assertEquals("Cliente Teste 2", customer2.get("name"));
assertEquals("Cliente Teste 3", customer3.get("name"));
assertEquals("Cliente Teste 4", customer4.get("name"));
}
}
<file_sep>insert into customer (name, age) VALUES ('joao', 20);
<file_sep>package com.example.transactions.steps;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.test.JobLauncherTestUtils;
import org.springframework.batch.test.context.SpringBatchTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.data.jdbc.AutoConfigureDataJdbc;
import org.springframework.context.annotation.PropertySource;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import com.example.transactions.BatchConfig;
import com.example.transactions.CustomerValidator;
import com.example.transactions.DatasourceConfig;
//This enables all the Spring goodness in JUnit 5.
@ExtendWith(SpringExtension.class)
//Coloca as classes que criam os beans necessários para os testes no contexto de execução do Spring.
@ContextConfiguration(classes = { BatchConfig.class, DatasourceConfig.class, CustomerValidator.class })
//Informa o arquivo de propriedades para o teste
@PropertySource("classpath:application.properties")
//Configura automaticamente os bancos descritos no application properties
@AutoConfigureDataJdbc
//Utiliza o profile de teste para não externalizar as propriedades
@ActiveProfiles("test")
//Provides utilities for testing Spring Batch jobs. The specific one
//we care about in this example is the JobLauncherTestUtils.
@SpringBatchTest
public class TransactionsIT {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
/**
* Testar o job inteiro é interessante para verificar se o resultado final foi o
* esperado, por exemplo, conferir metadados do spring batch para saber quantos
* itens foram lidos, escritos, ...
*/
@Test
public void test() throws Exception {
JobParameters jobParameters = new JobParametersBuilder()
.addString("customersFile", "file:src/test/resources/customersFile.csv").toJobParameters();
JobExecution jobExecution = this.jobLauncherTestUtils.launchJob(jobParameters);
assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());
StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next();
assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus());
assertEquals(4, stepExecution.getReadCount());
assertEquals(4, stepExecution.getWriteCount());
}
}
<file_sep>package com.example.transactions;
import javax.sql.DataSource;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.cloud.task.configuration.DefaultTaskConfigurer;
import org.springframework.cloud.task.configuration.TaskConfigurer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
@Configuration
public class DatasourceConfig {
@Bean
@Primary
@ConfigurationProperties(prefix = "spring.datasource")
public DataSource springDataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "appDatasource")
@ConfigurationProperties(prefix = "app.datasource")
public DataSource sourceDataSource() {
return DataSourceBuilder.create().build();
}
@Bean
public TaskConfigurer taskConfigurer() {
return new DefaultTaskConfigurer(springDataSource());
}
}
|
8a5aff6968535bd990e4b5fcc6376fb65c2f5294
|
[
"Markdown",
"SQL",
"Java"
] | 9 |
Markdown
|
giuliana-bezerra/TestsSpringBatch
|
688637dd9fe002058fa45c3baa908da5b27747cb
|
8affa4a9b25eb0fc2201630bd33689abc859ca73
|
refs/heads/master
|
<repo_name>kklgf/universityBD<file_sep>/README.md
# University Database
## Projekt na laboratoria Bazy Danych.
##### Grupa:
- Cyra Agata
- ~~<NAME>~~ (urlop dziekański)
- <NAME>
##### Projekt:
- Temat: Baza danych dla uczelni.
- Serwer: wykorzystanie SQLite -- baza danych jest zintegrowana z aplikacją, która ma do niej bezpośredni dostęp (serwer nie jest potrzebny)
- Technologie: .Net i Entity Framework (Code First Approach)
- Link: https://github.com/kklgf/universityBD
## Current database schema

## User guide

## Features
- Database schema implementation and creation [Patryk]
- Inteligent adding of new entities from terminal [Patryk]
- Inteligent searching of entities from terminal [Patryk]
- Viewing the grades of a particular student: function Student.StudentGrades(); [Agata]
- Viewing the courses of a particular employee: function Employee.EmployeesCourses(); [Agata]
- Viewing the ECTS points of a particular student: function Student.StudentsECTS(); [Agata]
- Viewing the "attendance list" (list of students at the particular section): function Section.AttendanceList(); [Agata]
- Viewing free places on a particular section: function Section.FreePlaces(); [Agata]
- Random data generation for whole database [Patryk]
## Data corectness insurance
- blocking student from having two classes at the same time: using the HasClassesAtTheTime(studentID, section) while enrolling for classes: something like a trigger imitation as they are not supported by Entity Framework
- blocking the course overload (more students than capacity): while adding a new enrollment
- ensuring there is an existing row (in another table) while adding an attribute -- you need to choose from existing
## Dependencies
- Faker.Net:
dotnet add package Faker.Net --version 1.3.77
- NBuilder:
dotnet add package NBuilder --version 6.1.0
## Entire code breakdown
### File: Program.cs
#### Class: Program
> class responsible for interaction with user\
> catches the answers and switches appropriate functions depending on what user decides to do
##### static void Main(string[] args)
> displaying main menu right after the start of an application
##### static void Search(UniversityContext database)
> choice of a table and redirection into the matching function in the chosen class [example: Student.Search()]
##### static void SeeTable(UniversityContext database)
> choice of a table and redirection
##### static object Add()
> choice of a table and redirection
##### static void SpecificViews()
> choice of a view (out of 5 available) and redirection into the appropriate class with the function implementation
##### static void Seed(UniversityContext context)
> data generation using Faker
##### static void WrongAction()
> informing user about chosing incorrect value while deciding about actions
---------------------------------------------------
### File: Course.cs
#### Class: Course
> object class being mapped into the database table
[Key]\
public int CourseID { get; set; }\
[ForeignKey("Department")]\
public int DepartmentID { get; set; }\
public Department Department { get; set; }\
public String Name { get; set; }\
public int ECTS { get; set; }\
##### public static Course NewCourse()
> constructor: to construct a new course, the existence of any Department row is required and specification of other course class properties
##### public static void SeeAll()
> being called from program main function prepares the view of the whole courses table in the database
##### public static void print(IQueryable\<Course> query)
> being called from SeeAll() or Search() function displays the result of the query
##### public static void Search()
> responsible for searching in the courses table --- requires specification of a value by which user decides to search in the database
##### public static Course SearchToAdd()
> used for searching while adding a new row into a different table
---------------------------------------------------
### File: Department.cs
#### Class: Department
> object class being mapped into the database table
public int DepartmentID { get; set; }\
public String Name { get; set; }\
##### public static Department NewDepartment()
> constructor: requires specification of the new department name
##### public static void SeeAll()
> being called from program main function prepares the view of the whole departments table in the database
##### public static void print(IQueryable\<Department> query)
> being called from SeeAll() or Search() function displays the result of the query
##### public static void Search()
> responsible for searching in the department table --- requires specification of department name or ID value
##### public static Department SearchToAdd()
> used for searching while adding a new row into a different table
---------------------------------------------------
### File: Employee.cs
#### Class: Employee
> object class being mapped into the database table
public int EmployeeID { get; set; }\
public String Name { get; set; }\
public String Surname { get; set; }\
public String Address { get; set; }\
public String City { get; set; }\
public String Country { get; set; }\
public String Phone { get; set; }\
public String Email { get; set; }\
public int Salary { get; set; }\
\[ForeignKey("Department")]\
public int DepartmentID { get; set; }\
public Department Department { get; set; }\
##### public static void SeeAll()
> being called from program main function prepares the view of the whole employees table in the database
##### public static void print(IQueryable\<Employee> query)
> displays the result of the query from SeeAll() or Search()
##### public static Employee NewEmployee()
> constructor: to construct a new Employee an existence of any Department row is required (choice from already existing ones) and specification of other class properties
##### public static void Search()
> responsible for searching in the employee table --- requires specification of a known value
##### public static Employee SearchToAdd()
> used for seaching while adding a new row into a different table
##### public static void EmployeesCourses()
> prepares the view of a courses being teached by the chosen employee\
> being called directly from the progam main function, first asks for the employee specification\
> when the chosen employee is found, database query is being prepared taking advantage of the sections table which connects employees with courses:\
> from all the sections where the chosen employee teaches, courseID value is selected and then with the use of this ID, course name is displayed (selection from the courses table with the known courseID)
---------------------------------------------------
### File: Enrollment.cs
#### Class: Enrollment
> object class being mapped into the database table\
> enrollment is a connection between student and a section (existence of both are required to create a new enrollment)\
> the class does not have a key itself, each object of this class is identified by a combination of a SectionID and a StudentID as foreign keys
\[ForeignKey("Section")]\
public int SectionID { get; set; }\
public Section Section { get; set; }\
\[ForeignKey("Student")]\
public int StudentID { get; set; }\
public Student Student { get; set; }\
##### public static Enrollment NewEnrollment()
> creating a new enrollment: specifying the class properties by choice from already existing ones\
> 1: choice of a section: for a successful enrollment any free places at the section is required --- comparing section capacity with Section.CountStudentsOnSection(section)\
> 2. choice of a student: student can't have any other classes at the time of the new section (use of Student.HasClassesAtTheTime(studentID, section))
##### public static void SeeAll()
> prepares the view of the whole enrollments table in the database
##### public static void print(IQueryable\<Enrollment> query)
> displays the result of the query
##### public static void Search()
> used for searching in the enrollment table -- requires specification of a known value
##### public static Enrollment SearchToAdd()
> used for searching while adding a new row into a different table
---------------------------------------------------
### File: Grade.cs
#### Class: Grade
> object class being mapped into the database table\
> to create a new grade, the existence of any row in Students and Courses table is required
\[ForeignKey("Course")]\
public int CourseID { get; set; }\
public Course Course { get; set; }\
\[ForeignKey("Student")]\
public int StudentID { get; set; }\
public Student Student { get; set; }\
public int Year { get; set; }\
public int Semester { get; set; }\
public int Score { get; set; }\
##### public static Grade NewGrade()
> to create a new grade, the existence of any row in Students and Courses table is required, and specifiaction of other class properties
##### public static void SeeAll()
> prepares the view of the whole grades table in the database
##### public static void print(IQueryable\<Grade> query)
> displays the result of the query
##### public static void Search()
> used for searching in the grades table -- requires specification of a known value
##### public static Grade SearchToAdd()
---------------------------------------------------
### File: Section.cs
#### Class: Section
> object class being mapped into the database table\
> section is a class given by a particular professor\
> constructing a section creates a connection between an employee (professor) and a course, so both of these must have existed before to create a new section
public int SectionID { get; set; }\
\[ForeignKey("Course")]\
public int CourseID { get; set; }\
public Course Course { get; set; }\
\[ForeignKey("Employees")]\
public int EmployeeID { get; set; }\
public Employee Employee { get; set; }\
public int Day { get; set; }\
public String StartTime { get; set; }\
public int Length { get; set; }\
public int Capacity { get; set; }\
##### public static Section NewSection()
> specification of the class poroperties and ensuring whether chosen employee and course exist\
> moreover, the chosen professor must be an employee in the department which organizes the course, which means that employee.DepartmentID must be the same as the course.DepartmentID: otherwise
the creation of such a section is not possible
##### public static void SeeAll()
> prepares the view of the whole sections table in the database
##### public static void print(IQueryable\<Section> query)
> displays the result of the query
##### public static void Search()
> used for seaching in the sections table --- requires specification of a known value
##### public static Section SearchToAdd()
> used for searching while adding a row into a different table
##### public static int CountStudentsOnSection(Section section, UniversityContext context)
> counting students already enrolled for a particular section (used to prevent section overload and give an information about free places on a section)
##### public static void AttendanceList()
> prepares a list of students attending a particular section: uses the students table and the enrollments table as a connection:\
> first the section is specified, then using the SectionID, all matching enrollments are found. Enrollments table contains information about studentID, which is then used for selecting students' names
and surnames from the students table
##### public static void FreePlaces()
> displays the number of free places available for section\
> used while enrolling a student (freePlaces>0 is a required condition to make an successful enrollment)
---------------------------------------------------
### File: Student.cs
#### Class: Student
> object class being mapped into the database table
public int StudentID { get; set; }\
public String Name { get; set; }\
public String Surname { get; set; }\
public String Address { get; set; }\
public String City { get; set; }\
public String Country { get; set; }\
public String Phone { get; set; }\
public String Email { get; set; }\
public int GraduationYear { get; set; }
##### public static Student NewStudent()
> constructor: creating a new student: requires the specification of all the above class properties (studentID is generated automatically, no need of specifying it)
##### public static void Search()
> function used for searching in the database: requires specification of a known value
##### public static void print(IQueryable\<Student> query)
> displays the result of the query
##### public static void SeeAll()
> prepares the view of the whole table
##### public static Student SearchToAdd()
> function being called to seach for students while adding a new row into a different table
##### public static bool HasClassesAtTheTime(int studentID, Section section)
> function used while enrolling for classes (adding a new row into the enrollment table)
> gives the answer whether student can enroll for a Section section or has another classes at the time: then enrollment is not available
##### public static void StudentsGrades()
> resonsible for creating a view with grades of a student
##### public static void StudentsECTS()
> responsible for calculating the ETCS points of a particular student\
> first user needs to specify the student (by inserting it's ID) and the semester they are interested in\
> ECTS points per course are stored in the courses table\
> for a chosen student, all grades from a matching semester are being selected\
> if a score in the Grades table is higher than 2 (which means that a student managed to pass the course), using a reference of a CourseID in the Grades table, ECTS points value from the courses table is selected
and added to the result\
> at the end, the result is displayed
---------------------------------------------------
### File: UniversityContext.cs
#### Class: UniversityContext : DbContext
> class inheritating from DbContext responsible for creating a connection with a database and mapping classes into entities
public DbSet\<Course> Courses { get; set; }\
public DbSet\<Department> Departments { get; set; }\
public DbSet\<Employee> Employees { get; set; }\
public DbSet\<Enrollment> Enrollments { get; set; }\
public DbSet\<Grade> Grades { get; set; }\
public DbSet\<Section> Sections { get; set; }\
public DbSet\<Student> Students { get; set; }\
---------------------------------------------------
### File: DepartmentNames.cs
#### Class: DepartmentNames
> file used for data generation to make department names sound less awkward than the ones generated automatically
##### public static List\<String> GetListOfNames()
---------------------------------------------------
### File: WeekDays.cs
#### Class: WeekDays
> class used for parsing numbers stored in the database into the string values like 'Monday' to be displayed for the user
##### public static String Parse(int number)
<file_sep>/universityBD/Section.cs
using Microsoft.EntityFrameworkCore.Storage;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
namespace universityBD
{
class Section
{
public int SectionID { get; set; }
[ForeignKey("Course")]
public int CourseID { get; set; }
public Course Course { get; set; }
[ForeignKey("Employees")]
public int EmployeeID { get; set; }
public Employee Employee { get; set; }
public int Day { get; set; }
public String StartTime { get; set; }
public int Length { get; set; }
public int Capacity { get; set; }
public static Section NewSection()
{
UniversityContext database = new UniversityContext();
Console.WriteLine("\nYou need to specify those values.");
Console.WriteLine("Course (chose from existing):");
int CourseID = Course.SearchToAdd().CourseID;
var course = database.Courses.Where(c => c.CourseID == CourseID).FirstOrDefault();
Console.WriteLine("ProfessorID (chose from existing):");
var potentialProfessors = database.Employees.Where(e => e.DepartmentID == course.DepartmentID);
Employee.print(potentialProfessors);
Console.WriteLine("\nEnter chosen professor ID: ");
int EmployeeID = int.Parse(Console.ReadLine());
bool professorFound = false;
while (!professorFound)
{
var profesor = database.Employees.Where(e => e.EmployeeID == EmployeeID).FirstOrDefault();
if (profesor.DepartmentID != course.DepartmentID)
{
Console.WriteLine("THIS PROFESSOR DOES NOT TEACH IN THE DEPARTMENT YOU CHOSE!");
Console.WriteLine("Please choose another professor from the ones below:\n");
Employee.print(potentialProfessors);
EmployeeID = int.Parse(Console.ReadLine());
}
else professorFound = true;
}
Console.Write("Day [number]: ");
int Day = int.Parse(Console.ReadLine());
Console.Write("StartTime: ");
String StartTime = Console.ReadLine();
Console.Write("Length: ");
int Length = int.Parse(Console.ReadLine());
Console.Write("Capacity: ");
int Capacity = int.Parse(Console.ReadLine());
Section section = new Section
{
CourseID = CourseID,
EmployeeID = EmployeeID,
Day = Day,
StartTime = StartTime,
Length = Length,
Capacity = Capacity
};
return section;
}
public static void SeeAll()
{
Console.WriteLine("Showing all the SECTIONS in the database:");
UniversityContext database = new UniversityContext();
var query = database.Sections;
print(query);
}
public static void print(IQueryable<Section> query)
{
UniversityContext database = new UniversityContext();
Console.WriteLine("\nID".PadRight(5) + "| " + "Course Name".PadRight(50) + "| " + "Profesor".PadRight(30) + "| " + "Day".PadRight(10)
+ "| " + "Start Time".PadRight(11) + "| " + "Length".PadRight(7) + "| " + "Capacity".PadRight(10) + "| " + "Free Places".PadRight(5));
Console.WriteLine("-----------------------------------------------------------------------------------" +
"------------------------------------------------------------------");
foreach (var item in query)
{
int freePlaces = item.Capacity - CountStudentsOnSection(item, database);
var course = (Course)database.Courses.Where(e => e.CourseID == item.CourseID).FirstOrDefault();
var employee = (Employee)database.Employees.Where(e => e.EmployeeID == item.EmployeeID).FirstOrDefault();
Console.WriteLine(item.SectionID.ToString().PadRight(4) + "| " + course.Name.PadRight(50) + "| " + employee.Name.PadRight(14) + " " +
employee.Surname.PadRight(15) + "| " + WeekDays.Parse(item.Day).PadRight(10) + "| " + item.StartTime.PadRight(11) + "| "
+ item.Length.ToString().PadRight(7) + "| " + item.Capacity.ToString().PadRight(10) + "| " + freePlaces.ToString().PadRight(5));
}
}
public static void Search()
{
System.Linq.IQueryable<universityBD.Section> query = null;
UniversityContext database = new UniversityContext();
bool run = true;
while (run)
{
run = false;
Console.WriteLine("\nBy which value you want to search?");
Console.WriteLine("1. ID");
Console.WriteLine("2. Course");
Console.WriteLine("3. Profesor");
Console.WriteLine("4. Day");
Console.WriteLine("5. StartTime");
Console.WriteLine("6. Length");
Console.WriteLine("7. Capacity");
Console.WriteLine("O. Cancel");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
switch (action)
{
case 1:
Console.Write("ID: ");
int id = int.Parse(Console.ReadLine());
query = database.Sections.Where(s => s.SectionID == id);
break;
case 2:
Console.WriteLine("Course (chose from existing):");
int CourseID = Course.SearchToAdd().CourseID;
query = database.Sections.Where(s => s.CourseID == CourseID);
break;
case 3:
Console.WriteLine("Profesor (chose from existing):");
int EmployeeID = Employee.SearchToAdd().EmployeeID;
query = database.Sections.Where(s => s.EmployeeID == EmployeeID);
break;
case 4:
Console.Write("Day: ");
int Day = int.Parse(Console.ReadLine());
query = database.Sections.Where(s => s.Day == Day);
break;
case 5:
Console.Write("StartTime: ");
String StartTime = Console.ReadLine();
query = database.Sections.Where(s => s.StartTime.Contains(StartTime));
break;
case 6:
Console.Write("Length: ");
int Length = int.Parse(Console.ReadLine());
query = database.Sections.Where(s => s.Length == Length);
break;
case 7:
Console.Write("Capacity: ");
int Capacity = int.Parse(Console.ReadLine());
query = database.Sections.Where(s => s.Capacity == Capacity);
break;
case 0:
return;
default:
Console.WriteLine("\n###############################");
Console.WriteLine("ERROR: CHOSEN INCORRECT VALUE");
Console.WriteLine("###############################\n");
run = true;
break;
}
}
print(query);
}
public static Section SearchToAdd()
{
UniversityContext database = new UniversityContext();
Section result = null;
bool run = true;
while (run)
{
Search();
Console.WriteLine("Now chose Section by inserting it's ID. Write '0' to abort.");
Console.Write("Your choice: ");
int id = int.Parse(Console.ReadLine());
switch (id)
{
case 0:
result = null;
run = false;
break;
default:
var query = database.Sections.Where(s => s.SectionID == id).FirstOrDefault(); ;
if (query != null)
{
run = false;
result = query;
}
else
{
Console.WriteLine("There is not Section with ID = " + id);
Console.WriteLine("Try again");
}
break;
}
}
return result;
}
public static int CountStudentsOnSection(Section section, UniversityContext context)
{
int result = 0;
var foundEnrollments = from enrollments in context.Enrollments
where enrollments.SectionID == section.SectionID
select enrollments;
foreach (var enrollment in foundEnrollments) { result++; }
return result;
}
public static void AttendanceList()
{
UniversityContext database = new UniversityContext();
Console.WriteLine("First find the Section you're interested in: ");
Search();
Console.WriteLine("Now choose the Section by inserting it's ID. Write '0' to abort.");
Console.Write("Your choice: ");
int id = int.Parse(Console.ReadLine());
Console.WriteLine("\nAttendance list on this section:\n");
switch (id)
{
case 0:
break;
default:
var section = (Section)database.Sections.Where(e => e.SectionID == id).FirstOrDefault();
var foundEnrollments = from enrollments in database.Enrollments
where enrollments.SectionID == section.SectionID
select enrollments;
foreach (var enrollment in foundEnrollments)
{
var student = (Student)database.Students.Where(e => e.StudentID == enrollment.StudentID).FirstOrDefault();
Console.WriteLine(student.Name + " " + student.Surname);
}
break;
}
}
public static void FreePlaces()
{
UniversityContext database = new UniversityContext();
Console.WriteLine("First find the Section you're interested in: ");
Search();
Console.WriteLine("Now choose the Section by inserting it's ID. Write '0' to abort.");
int id = int.Parse(Console.ReadLine());
switch(id)
{
case 0:
break;
default:
var section = (Section)database.Sections.Where(e => e.SectionID == id).FirstOrDefault();
int freePlaces = section.Capacity - CountStudentsOnSection(section, database);
Console.WriteLine("There are " + freePlaces + " free places on this section.");
break;
}
}
}
}
<file_sep>/universityBD/Employee.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
namespace universityBD
{
class Employee
{
public int EmployeeID { get; set; }
public String Name { get; set; }
public String Surname { get; set; }
public String Address { get; set; }
public String City { get; set; }
public String Country { get; set; }
public String Phone { get; set; }
public String Email { get; set; }
public int Salary { get; set; }
[ForeignKey("Department")]
public int DepartmentID { get; set; }
public Department Department { get; set; }
public static void SeeAll()
{
Console.WriteLine("Showing all the EMPLOYEES in the database:");
UniversityContext database = new UniversityContext();
var query = database.Employees;
print(query);
}
public static void print(IQueryable<Employee> query)
{
UniversityContext database = new UniversityContext();
Console.WriteLine("\nID".PadRight(5) + "| " + "Name".PadRight(13) + " " + "Surname".PadRight(15) +
"| " + "Address".PadRight(27) + "| " + "City".PadRight(20) + "| " + "Country".PadRight(35) +
"| " + "Phone".PadRight(22) + "| " + "Email".PadRight(39) + "| " + "Salary".PadRight(8) + "| " + "Department Name".PadRight(15));
Console.WriteLine("---------------------------------------------------------------------------------------------------------------" +
"----------------------------------------------------------------------------------------------------------------------------");
foreach (var item in query)
{
var department = (Department)database.Departments.Where(e => e.DepartmentID == item.DepartmentID).FirstOrDefault();
Console.WriteLine(item.EmployeeID.ToString().PadRight(4) + "| " + item.Name.PadRight(13) + " " + item.Surname.PadRight(15) + "| "
+ item.Address.PadRight(27) + "| " + item.City.PadRight(20) + "| " + item.Country.PadRight(35) + "| " + item.Phone.ToString().PadRight(22)
+ "| " + item.Email.PadRight(39) + "| " + item.Salary.ToString().PadRight(8) + "| " + department.Name.PadRight(15));
}
}
public static Employee NewEmployee()
{
Console.WriteLine("\nAdding a new EMPLOYEE\nYou need to specify those values.");
Console.Write("Name: ");
String Name = Console.ReadLine();
Console.Write("Surname: ");
String Surname = Console.ReadLine();
Console.Write("Address: ");
String Address = Console.ReadLine();
Console.Write("City: ");
String City = Console.ReadLine();
Console.Write("Country: ");
String Country = Console.ReadLine();
Console.Write("Phone: ");
String Phone = Console.ReadLine();
Console.Write("Email: ");
String Email = Console.ReadLine();
Console.Write("Salary: ");
int Salary = int.Parse(Console.ReadLine());
Console.WriteLine("Department (chose from existing):");
int DepartmentID = Department.SearchToAdd().DepartmentID;
Employee employee = new Employee
{
Name = Name,
Surname = Surname,
Address = Address,
City = City,
Country = Country,
Phone = Phone,
Email = Email,
Salary = Salary,
DepartmentID = DepartmentID
};
return employee;
}
public static void Search()
{
System.Linq.IQueryable<universityBD.Employee> query = null;
UniversityContext database = new UniversityContext();
bool run = true;
while (run)
{
run = false;
Console.WriteLine("\nBy which value you want to SEARCH?");
Console.WriteLine("1. ID");
Console.WriteLine("2. Name");
Console.WriteLine("3. Surname");
Console.WriteLine("4. Adress");
Console.WriteLine("5. City");
Console.WriteLine("6. Country");
Console.WriteLine("7. Phone");
Console.WriteLine("8. Email");
Console.WriteLine("9. Salary");
Console.WriteLine("10. Department");
Console.WriteLine("O. Cancel");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
Console.Write("Employee's ID: ");
int id = int.Parse(Console.ReadLine());
query = database.Employees.Where(e => e.EmployeeID == id);
break;
case 2:
Console.Write("Employee's Name: ");
String Name = Console.ReadLine();
query = database.Employees.Where(e => e.Name.Contains(Name));
break;
case 3:
Console.Write("Employee's Surname:");
String Surname = Console.ReadLine();
query = database.Employees.Where(e => e.Surname.Contains(Surname));
break;
case 4:
Console.Write("Employee's Address:");
String Address = Console.ReadLine();
query = database.Employees.Where(e => e.Address.Contains(Address));
break;
case 5:
Console.Write("Employee's City:");
String City = Console.ReadLine();
query = database.Employees.Where(e => e.City.Contains(City));
break;
case 6:
Console.Write("Employee's Country:");
String Country = Console.ReadLine();
query = database.Employees.Where(e => e.Country.Contains(Country));
break;
case 7:
Console.Write("Employee's Phone:");
String Phone = Console.ReadLine();
query = database.Employees.Where(e => e.Phone.Contains(Phone));
break;
case 8:
Console.Write("Employee's Email:");
String Email = Console.ReadLine();
query = database.Employees.Where(e => e.Email.Contains(Email));
break;
case 9:
Console.Write("Employee's Salary:");
int Salary = int.Parse(Console.ReadLine());
query = database.Employees.Where(e => e.Salary == Salary);
break;
case 10:
Console.Write("Employee's Department (chose from existing):");
int DepartmentID = Department.SearchToAdd().DepartmentID;
query = database.Employees.Where(e => e.DepartmentID == DepartmentID);
break;
case 0:
return;
default:
Console.WriteLine("\n###############################");
Console.WriteLine("ERROR: CHOSEN INCORRECT VALUE");
Console.WriteLine("###############################");
run = true;
break;
}
}
print(query);
}
public static Employee SearchToAdd()
{
UniversityContext database = new UniversityContext();
Employee result = null;
bool run = true;
while (run)
{
Search();
Console.WriteLine("Now chose Employee by inserting it's ID. Write '0' to abort.");
Console.Write("Your choice: ");
int id = int.Parse(Console.ReadLine());
switch (id)
{
case 0:
result = null;
break;
default:
var query = database.Employees.Where(e => e.EmployeeID == id).FirstOrDefault(); ;
if (query != null)
{
run = false;
result = query;
}
else
{
Console.WriteLine("There is not Employee with ID = " + id);
Console.WriteLine("Try again");
}
break;
}
}
return result;
}
public static void EmployeesCourses()
{
UniversityContext database = new UniversityContext();
Console.WriteLine("First find the employee whose courses you'd like to see");
Search();
Console.WriteLine("Now choose the Employee by inserting it's ID. Write '0' to abort.");
Console.Write("Your choice: ");
int id = int.Parse(Console.ReadLine());
var employee = database.Employees.Where(e => e.EmployeeID == id).FirstOrDefault();
switch(id)
{
case 0:
break;
default:
Console.WriteLine("");
var foundSection = from sections in database.Sections
where sections.EmployeeID == employee.EmployeeID
select sections;
foreach (var section in foundSection)
{
var course = (Course)database.Courses.Where(e => e.CourseID == section.CourseID).FirstOrDefault();
Console.WriteLine(employee.Name + " " + employee.Surname + ": " + course.Name);
}
break;
}
}
}
}
<file_sep>/universityBD/Department.cs
using FizzWare.NBuilder;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace universityBD
{
class Department
{
public int DepartmentID { get; set; }
public String Name { get; set; }
public static Department NewDepartment()
{
Console.WriteLine("\nYou need to specify this value.");
Console.Write("Name: ");
String Name = Console.ReadLine();
Department department = new Department
{
Name = Name
};
return department;
}
public static void SeeAll()
{
Console.WriteLine("Showing all the DEPARTMENTS in the database:");
UniversityContext database = new UniversityContext();
var query = database.Departments;
print(query);
}
public static void print(IQueryable<Department> query)
{
Console.WriteLine("\nID".PadRight(5) + "| " + "Name".PadRight(30));
Console.WriteLine("----------------------------------------");
foreach (var item in query)
{
Console.WriteLine(item.DepartmentID.ToString().PadRight(4) + "| " + item.Name.PadRight(30));
}
}
public static void Search()
{
System.Linq.IQueryable<universityBD.Department> query = null;
UniversityContext database = new UniversityContext();
bool run = true;
while (run)
{
run = false;
Console.WriteLine("\nBy which value you want to search?");
Console.WriteLine("1. ID");
Console.WriteLine("2. Name");
Console.WriteLine("0. Cancel");
Console.WriteLine("\n###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
Console.Write("Department ID: ");
int id = int.Parse(Console.ReadLine());
query = database.Departments.Where(d => d.DepartmentID == id);
break;
case 2:
Console.Write("Department Name: ");
String Name = Console.ReadLine();
query = database.Departments.Where(d => d.Name.Contains(Name));
break;
case 0:
return;
default:
Console.WriteLine("\n###############################");
Console.WriteLine("ERROR: CHOSEN INCORRECT VALUE");
Console.WriteLine("###############################");
run = true;
break;
}
}
print(query);
}
public static Department SearchToAdd()
{
UniversityContext database = new UniversityContext();
Department result = null;
bool run = true;
while (run)
{
Search();
Console.WriteLine("Now chose Department by inserting it's ID. Write '0' to abort.");
Console.Write("Your choice: ");
int id = int.Parse(Console.ReadLine());
switch (id)
{
case 0:
result = null;
run = false;
break;
default:
var query = database.Departments.Where(d => d.DepartmentID == id).FirstOrDefault(); ;
if (query != null)
{
run = false;
result = query;
}
else
{
Console.WriteLine("There is not Department with ID = " + id);
Console.WriteLine("Try again");
}
break;
}
}
return result;
}
}
}
<file_sep>/universityBD/Student.cs
using Microsoft.EntityFrameworkCore.Query;
using Microsoft.EntityFrameworkCore.Scaffolding.Metadata;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Text;
namespace universityBD
{
class Student
{
public int StudentID { get; set; }
public String Name { get; set; }
public String Surname { get; set; }
public String Address { get; set; }
public String City { get; set; }
public String Country { get; set; }
public String Phone { get; set; }
public String Email { get; set; }
public int GraduationYear { get; set; }
public static Student NewStudent()
{
Console.WriteLine("\n###############################");
Console.WriteLine("You need to specify those values.");
Console.Write("Name: ");
String Name = Console.ReadLine();
Console.Write("Surname: ");
String Surname = Console.ReadLine();
Console.Write("Address: ");
String Address = Console.ReadLine();
Console.Write("City: ");
String City = Console.ReadLine();
Console.Write("Country: ");
String Country = Console.ReadLine();
Console.Write("Phone: ");
String Phone = Console.ReadLine();
Console.Write("Email: ");
String Email = Console.ReadLine();
Console.Write("GraduationYear: ");
int GraduationYear = int.Parse(Console.ReadLine());
Student student = new Student
{
Name = Name,
Surname = Surname,
Address = Address,
City = City,
Country = Country,
Phone = Phone,
Email = Email,
GraduationYear = GraduationYear
};
return student;
}
public static void Search()
{
System.Linq.IQueryable<universityBD.Student> query = null;
UniversityContext database = new UniversityContext();
bool run = true;
while (run)
{
run = false;
Console.WriteLine("\n###############################");
Console.WriteLine("By which value you want to search?");
Console.WriteLine("1. ID");
Console.WriteLine("2. Name");
Console.WriteLine("3. Surname");
Console.WriteLine("4. Address");
Console.WriteLine("5. City");
Console.WriteLine("6. Country");
Console.WriteLine("7. Phone");
Console.WriteLine("8. Email");
Console.WriteLine("9. Graduation year");
Console.WriteLine("0. Cancel");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
Console.Write("ID: ");
int id = int.Parse(Console.ReadLine());
query = database.Students.Where(s => s.StudentID == id);
break;
case 2:
Console.Write("Name: ");
String Name = Console.ReadLine();
query = database.Students.Where(s => s.Name.Contains(Name));
break;
case 3:
Console.Write("Surname: ");
String Surname = Console.ReadLine();
query = database.Students.Where(s => s.Surname.Contains(Surname));
break;
case 4:
Console.Write("Address: ");
String Address = Console.ReadLine();
query = database.Students.Where(s => s.Address.Contains(Address));
break;
case 5:
Console.Write("City: ");
String City = Console.ReadLine();
query = database.Students.Where(s => s.City.Contains(City));
break;
case 6:
Console.Write("Country: ");
String Country = Console.ReadLine();
query = database.Students.Where(s => s.Country.Contains(Country));
break;
case 7:
Console.Write("Phone: ");
String Phone = Console.ReadLine();
query = database.Students.Where(s => s.Phone.Contains(Phone));
break;
case 8:
Console.Write("Email: ");
String Email = Console.ReadLine();
query = database.Students.Where(s => s.Email.Contains(Email));
break;
case 9:
Console.Write("Graduation year: ");
int GraduationYear = int.Parse(Console.ReadLine());
query = database.Students.Where(s => s.GraduationYear == GraduationYear);
break;
case 0:
return;
default:
Console.WriteLine("\n###############################");
Console.WriteLine("ERROR: CHOSEN INCORRECT VALUE");
Console.WriteLine("###############################");
run = true;
break;
}
}
Console.WriteLine("###############################");
print(query);
}
public static void print(IQueryable<Student> query)
{
Console.WriteLine("\nID".PadRight(5) + "| " + "Name".PadRight(15) + "| " + "Surname".PadRight(15) + "| " + "Address".PadRight(30) +
"| " + "City".PadRight(20) + "| " + "Country".PadRight(45) + "| " + "Phone".PadRight(25) + "| " + "Email".PadRight(40) +
"| " + "Graduation Year".PadRight(20));
Console.WriteLine("------------------------------------------------------------------------------------------------------------------------" +
"-----------------------------------------------------------------------------------------------------------------");
foreach (var item in query)
{
Console.WriteLine(item.StudentID.ToString().PadRight(4) + "| " + item.Name.PadRight(15) + "| " + item.Surname.PadRight(15)
+ "| " + item.Address.PadRight(30) + "| " + item.City.PadRight(20) + "| " + item.Country.PadRight(45) + "| " + item.Phone.PadRight(25)
+ "| " + item.Email.PadRight(40) + "| " + item.GraduationYear.ToString().PadRight(20));
}
}
public static void SeeAll()
{
Console.WriteLine("Showing all the STUDENTS in the database:");
UniversityContext database = new UniversityContext();
var query = database.Students;
print(query);
}
public static Student SearchToAdd()
{
UniversityContext database = new UniversityContext();
Student result = null;
bool run = true;
while (run)
{
Search();
Console.Write("Now chose Student by inserting it's ID / Write '0' to abort: ");
int id = int.Parse(Console.ReadLine());
switch (id)
{
case 0:
result = null;
break;
default:
var query = database.Students.Where(s => s.StudentID == id).FirstOrDefault(); ;
if (query != null)
{
run = false;
result = query;
}
else
{
Console.WriteLine("There is not Student with ID = " + id);
Console.WriteLine("Try again");
}
break;
}
}
return result;
}
public static bool HasClassesAtTheTime(int studentID, Section section)
{
UniversityContext database = new UniversityContext();
var foundSections = from sections in database.Sections
join enrollments in database.Enrollments
on sections.SectionID equals enrollments.SectionID
where enrollments.StudentID == studentID
select sections;
foreach(var foundSection in foundSections)
{
if(foundSection.Day == section.Day && foundSection.StartTime == section.StartTime)
{ return true; }
}
return false;
}
public static void StudentsGrades()
{
UniversityContext database = new UniversityContext();
Console.WriteLine("\n###############################");
Console.WriteLine("\nFirst find the student whose grades you'd like to see");
Search();
Console.WriteLine("\n###############################");
Console.Write("Choose selected Student by inserting it's ID / Write '0' to abort: ");
int id = int.Parse(Console.ReadLine());
switch(id)
{
case 0:
break;
default:
var student = (Student)database.Students.Where(e => e.StudentID == id).FirstOrDefault();
var query = from courses in database.Courses
join grades in database.Grades
on courses.CourseID equals grades.CourseID
join students in database.Students
on grades.StudentID equals students.StudentID
where students.StudentID == id
select grades;
Console.WriteLine("\nName".PadRight(16) + "| " + "Surname".PadRight(15) + "| " + "Course Name".PadRight(50) +
"| " + "SCORE".PadRight(10));
Console.WriteLine("-----------------------------------------------------------------------------------------------");
foreach (var item in query)
{
var foundCourse = from courses in database.Courses
where courses.CourseID == item.CourseID
select courses;
foreach (var course in foundCourse)
{
Console.WriteLine(student.Name.PadRight(15) + "| " + student.Surname.PadRight(15)
+ "| " + course.Name.PadRight(50) + "| " + item.Score.ToString().PadRight(10));
}
}
break;
}
}
public static void StudentsECTS()
{
UniversityContext database = new UniversityContext();
int result = 0;
Console.WriteLine("First find the student whose ECTS points you'd like to see");
Search();
Console.Write("Choose selected Student by inserting it's ID / Write '0' to abort: ");
int id = int.Parse(Console.ReadLine());
switch (id)
{
case 0:
break;
default:
Console.Write("Choose the semeester you are interested in: ");
int semester = int.Parse(Console.ReadLine());
var query = from courses in database.Courses
join grades in database.Grades
on courses.CourseID equals grades.CourseID
join students in database.Students
on grades.StudentID equals students.StudentID
where students.StudentID == id
select grades;
foreach (var item in query)
{
if (item.Score > 2)
{
var passedCourse = (Course)database.Courses.Where(e => e.CourseID == item.CourseID).FirstOrDefault();
result += passedCourse.ECTS;
}
}
Console.WriteLine("\nCollected ECTS points: " + result);
break;
}
}
}
}
<file_sep>/universityBD/WeekDays.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace universityBD
{
public class WeekDays
{
public static String Parse(int number)
{
switch(number)
{
case 1:
return "Monday";
case 2:
return "Tuesday";
case 3:
return "Wednesday";
case 4:
return "Thursday";
case 5:
return "Friday";
case 6:
return "Saturday";
case 7:
return "Sunday";
default:
return "ERROR";
}
}
}
}
<file_sep>/universityBD/Grade.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
namespace universityBD
{
class Grade
{
[ForeignKey("Course")]
public int CourseID { get; set; }
public Course Course { get; set; }
[ForeignKey("Student")]
public int StudentID { get; set; }
public Student Student { get; set; }
public int Year { get; set; }
public int Semester { get; set; }
public int Score { get; set; }
public static Grade NewGrade()
{
Console.WriteLine("\nYou need to specify those values.");
Console.WriteLine("CourseID (chose from existing):");
int CourseID = Course.SearchToAdd().CourseID;
Console.WriteLine("StudentID (chose from existing):");
int StudentID = Student.SearchToAdd().StudentID;
Console.WriteLine("Year:");
int Year = int.Parse(Console.ReadLine());
Console.WriteLine("Semester:");
int Semester = int.Parse(Console.ReadLine());
Console.WriteLine("Score:");
int Score = int.Parse(Console.ReadLine());
Grade grade = new Grade
{
CourseID = CourseID,
StudentID = StudentID,
Year = Year,
Semester = Semester,
Score = Score
};
return grade;
}
public static void SeeAll()
{
Console.WriteLine("Showing all the GRADES in the database:");
UniversityContext database = new UniversityContext();
var query = database.Grades;
print(query);
}
public static void print(IQueryable<Grade> query)
{
UniversityContext database = new UniversityContext();
Console.WriteLine("");
Console.WriteLine("Course".PadRight(50) + "| " + "Student".PadRight(30) + "| " + "Year".PadRight(6) + "| " + "Semester".PadRight(10) + "| " + "Score".PadRight(7));
Console.WriteLine("---------------------------------------------------------------------" +
"---------------------------------------------");
foreach (var item in query)
{
var student = (Student)database.Students.Where(e => e.StudentID == item.StudentID).FirstOrDefault();
var course = (Course)database.Courses.Where(e => e.CourseID == item.CourseID).FirstOrDefault();
Console.WriteLine(course.Name.PadRight(50) + "| " + student.Name.PadRight(14) + " " + student.Surname.PadRight(15) + "| "
+ item.Year.ToString().PadRight(6) + "| " + item.Semester.ToString().PadRight(10) + "| " + item.Score.ToString().PadRight(7));
}
}
public static void Search()
{
System.Linq.IQueryable<universityBD.Grade> query = null;
UniversityContext database = new UniversityContext();
bool run = true;
while (run)
{
run = false;
Console.WriteLine("\nBy which value you want to search?");
Console.WriteLine("1. Course");
Console.WriteLine("2. Student");
Console.WriteLine("3. Year");
Console.WriteLine("4. Semester");
Console.WriteLine("5. Score");
int action = int.Parse(Console.ReadLine());
switch (action)
{
case 1:
Console.WriteLine("Course (chose from existing):");
int CourseID = Course.SearchToAdd().CourseID;
query = database.Grades.Where(w => w.CourseID == CourseID);
break;
case 2:
Console.WriteLine("Student (chose from existing):");
int StudentID = Student.SearchToAdd().StudentID;
query = database.Grades.Where(w => w.StudentID == StudentID);
break;
case 3:
Console.WriteLine("Year:");
int Year = int.Parse(Console.ReadLine());
query = database.Grades.Where(w => w.Year == Year);
break;
case 4:
Console.WriteLine("Semester:");
int Semester = int.Parse(Console.ReadLine());
query = database.Grades.Where(w => w.Semester == Semester);
break;
case 5:
Console.WriteLine("Score:");
int Score = int.Parse(Console.ReadLine());
query = database.Grades.Where(w => w.Score == Score);
break;
case 0:
return;
default:
Console.WriteLine("ERROR: CHOSEN INCORRECT VALUE");
run = true;
break;
}
}
print(query);
}
public static Grade SearchToAdd()
{
UniversityContext database = new UniversityContext();
Grade result = null;
bool run = true;
while (run)
{
Search();
Console.WriteLine("Now chose Enrollment by inserting Course's and Student's ID. Write '0' to abort.");
int idc = int.Parse(Console.ReadLine());
int ids = int.Parse(Console.ReadLine());
if (idc == 0 || ids == 0)
{
result = null;
break;
}
else
{
var query = database.Grades.Where(e => e.CourseID == idc && e.StudentID == ids).FirstOrDefault(); ;
if (query != null)
{
run = false;
result = query;
}
else
{
Console.WriteLine("There is not Grade with specified ID's");
Console.WriteLine("Try again");
}
break;
}
}
return result;
}
}
}
<file_sep>/universityBD/DepartmentNames.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace universityBD
{
class DepartmentNames
{
public static List<String> GetListOfNames()
{
return Names;
}
private readonly static List<String> Names = new List<string> {
"Department of Chemical Engineering and Applied Chemistry",
"Department of Civil and Mineral Engineering",
"<NAME> Sr. Department of Electrical and Computer Engineering",
"Department of Materials Science and Engineering",
"Department of Mechanical and Industrial Engineering",
"Department of Anthropology",
"Department of Art History",
"<NAME>lap Department of Astronomy and Astrophysics",
"Department of Cell and Systems Biology",
"Department of Chemistry",
"Department of Classics",
"Department of Computer Science",
"Department of East Asian Studies",
"Department of Ecology and Evolutionary Biology",
"Department of Economics",
"Department of English",
"Department of French",
"Department of Geography and Planning",
"Department of Earth Sciences",
"Department of Germanic Languages and Literatures",
"Department of History",
"Department of Italian Studies",
"Department of Linguistics",
"Department of Mathematics",
"Department of Near and Middle Eastern Civilizations",
"Department of Philosophy",
"Department of Physics",
"Department of Political Science",
"Department of Psychology",
"Department for the Study of Religion",
"Department of Slavic Languages and Literatures",
"Department of Sociology",
"Department of Spanish and Portuguese",
"Department of Statistical Sciences",
"Department of Anesthesiology and Pain Medicine",
"Department of Biochemistry",
"Department of Family and Community Medicine",
"Department of Immunology",
"Department of Laboratory Medicine and Pathobiology",
"Department of Medical Biophysics",
"Department of Medical Imaging",
"Department of Medicine",
"Department of Molecular Genetics",
"Department of Nutritional Sciences",
"Department of Obstetrics and Gynaecology",
"Department of Occupational Science and Occupational Therapy",
"Department of Ophthalmology and Vision Sciences",
"Department of Otolaryngology - Head and Neck Surgery",
"Department of Paediatrics",
"Department of Pharmacology and Toxicology",
"Department of Physical Therapy",
"Department of Physiology",
"Department of Psychiatry",
"Department of Radiation Oncology",
"Department of Speech-Language Pathology",
"Department of Surgery",
"Department of Curriculum, Teaching and Learning",
"Department of Applied Psychology and Human Development",
"Department of Social Justice Education",
"Department of Leadership, Higher and Adult Education",
"Department of Anthropology",
"Department of Biology",
"Department of Chemical and Physical Sciences",
"Department of Economics",
"Department of English and Drama",
"Department of Geography",
"Department of Historical Studies",
"Department of Language Studies",
"Department of Management",
"Department of Mathematical and Computational Sciences",
"Department of Philosophy",
"Department of Political Science",
"Department of Psychology",
"Department of Sociology",
"Department of Visual Studies",
"Department of Management",
"Department of English",
"Department of Philosophy",
"Department of Biological Sciences",
"Department of Computer and Mathematical Sciences",
"Department of Psychology",
"Department of Physical and Environmental Sciences",
"Department of Anthropology",
"Department of Human Geography",
"Department of Political Science",
"Department of Sociology",
"Department of Arts, Culture and Media",
"Department of Historical and Cultural Studies"
}; // Data taken from https://www.vpacademic.utoronto.ca/academic-units/academic-unit-list-departments-edus/
}
}
<file_sep>/universityBD/Program.cs
using FizzWare.NBuilder;
using System;
using System.Collections.Generic;
using System.Data.Entity.Infrastructure;
using System.Linq;
namespace universityBD
{
class Program
{
static void Main(string[] args)
{
double version = 0.1;
Console.WriteLine("###############################");
Console.WriteLine("UniversityDB version " + version);
Console.WriteLine("###############################");
UniversityContext database = new UniversityContext();
bool run = true;
bool seedUsed = false;
while (run)
{
Console.WriteLine("\n###############################");
Console.WriteLine("What you want to do?");
Console.WriteLine("1. Search in database");
Console.WriteLine("2. Add to database");
Console.WriteLine("3. See the whole table");
Console.WriteLine("4. See a specific view");
if (!seedUsed) {
Console.WriteLine("9. Generate data");
}
Console.WriteLine("0. Close");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
Search(database);
break;
case 2:
object toBeAdded = Add();
if (toBeAdded != null)
{
database.Add(toBeAdded);
database.SaveChanges();
}
break;
case 3:
SeeTable(database);
break;
case 4:
SpecificViews();
break;
case 9:
if (!seedUsed)
{
Seed(database);
}
seedUsed = true;
break;
case 0:
run = false;
break;
default:
WrongAction();
break;
}
}
}
static void Search(UniversityContext database)
{
Console.WriteLine("\n###############################");
Console.WriteLine("In which table you want to search?");
Console.WriteLine("1. Courses");
Console.WriteLine("2. Departments");
Console.WriteLine("3. Employees");
Console.WriteLine("4. Enrollments");
Console.WriteLine("5. Grades");
Console.WriteLine("6. Sections");
Console.WriteLine("7. Students");
Console.WriteLine("0. Cancel");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
Course.Search();
break;
case 2:
Department.Search();
break;
case 3:
Employee.Search();
break;
case 4:
Enrollment.Search();
break;
case 5:
Grade.Search();
break;
case 6:
Section.Search();
break;
case 7:
Student.Search();
break;
case 0:
break;
default:
WrongAction();
break;
}
}
static void SeeTable(UniversityContext database)
{
Console.WriteLine("\n###############################");
Console.WriteLine("Which table are you interested in?");
Console.WriteLine("1. Courses");
Console.WriteLine("2. Departments");
Console.WriteLine("3. Employees");
Console.WriteLine("4. Enrollments");
Console.WriteLine("5. Grades");
Console.WriteLine("6. Sections");
Console.WriteLine("7. Students");
Console.WriteLine("0. Cancel");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
Course.SeeAll();
break;
case 2:
Department.SeeAll();
break;
case 3:
Employee.SeeAll();
break;
case 4:
Enrollment.SeeAll();
break;
case 5:
Grade.SeeAll();
break;
case 6:
Section.SeeAll();
break;
case 7:
Student.SeeAll();
break;
case 0:
break;
default:
WrongAction();
break;
}
}
static object Add()
{
Console.WriteLine("\n###############################");
Console.WriteLine("To which table do you want to add row?");
Console.WriteLine("1. Courses");
Console.WriteLine("2. Departments");
Console.WriteLine("3. Employees");
Console.WriteLine("4. Enrollments");
Console.WriteLine("5. Grades");
Console.WriteLine("6. Sections");
Console.WriteLine("7. Students");
Console.WriteLine("O. Cancel");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
return Course.NewCourse();
case 2:
return Department.NewDepartment();
case 3:
return Employee.NewEmployee();
case 4:
return Enrollment.NewEnrollment();
case 5:
return Grade.NewGrade();
case 6:
return Section.NewSection();
case 7:
return Student.NewStudent();
case 0:
break;
default:
WrongAction();
break;
}
return null;
}
static void SpecificViews()
{
Console.WriteLine("\n###############################");
Console.WriteLine("What are you interested in?");
Console.WriteLine("1. Grades of a particular student (search for student)");
Console.WriteLine("2. ECTS points of a particular student (search for student)");
Console.WriteLine("3. Courses of a particular employee (search for employee)");
Console.WriteLine("4. Attendance list on a particular section (search for section)");
Console.WriteLine("5. Free places on a particular section (search for section)");
Console.WriteLine("0. Cancel");
Console.WriteLine("###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
switch (action)
{
case 1:
Student.StudentsGrades();
break;
case 2:
Student.StudentsECTS();
break;
case 3:
Employee.EmployeesCourses();
break;
case 4:
Section.AttendanceList();
break;
case 5:
Section.FreePlaces();
break;
case 0:
break;
default:
WrongAction();
break;
}
}
static void Seed(UniversityContext context)
{
context.Database.EnsureDeleted();
context.Database.EnsureCreated();
int quantity = 33;
// Generate departments
var depNames = DepartmentNames.GetListOfNames();
int depQ = Math.Min(quantity, depNames.Count());
var selectedD = Pick<String>.UniqueRandomList(With.Exactly(depQ).Elements).From(depNames);
var iterD = new Stack<String>(selectedD);
var departments = Builder<Department>.CreateListOfSize(depQ)
.All()
.With(d => d.Name = iterD.Pop())
.Build();
foreach (var department in departments)
{
context.Add(department);
}
context.SaveChanges();
// Generate courses
/*int coursesPerDepartment = 10;
var allCourses = new List<Course>();
foreach (var dep in departments)
{
var courses = Builder<Course>.CreateListOfSize(Faker.RandomNumber.Next((int)(coursesPerDepartment*0.8), (int)(coursesPerDepartment *1.2)))
.All()
.With(d => d.Department = dep)
.With(d => d.Name = Faker.Company.CatchPhrase())
.With(d => d.ECTS = Faker.RandomNumber.Next(1, 8))
.Build();
allCourses.AddRange(courses);
}
foreach (var course in allCourses)
{
context.Add(course);
}
context.SaveChanges();*/
int coursesPerDepartment = 10;
int quantOfCourses = departments.Count() * Faker.RandomNumber.Next((int)(coursesPerDepartment * 0.8), (int)(coursesPerDepartment * 1.2));
var courses = Builder<Course>.CreateListOfSize(quantOfCourses)
.All()
.With(d => d.Department = Pick<Department>.RandomItemFrom(departments))
.With(d => d.Name = Faker.Company.CatchPhrase())
.With(d => d.ECTS = Faker.RandomNumber.Next(1,8))
.Build();
foreach (var course in courses)
{
context.Add(course);
}
context.SaveChanges();
// Generate employees
/*int employeesPerDepartment = 10;
var allEmployees = new List<Employee>();
foreach (var dep in departments)
{
var employees = Builder<Employee>.CreateListOfSize(employeesPerDepartment)
.All()
.With(d => d.Name = Faker.Name.First())
.With(d => d.Surname = Faker.Name.Last())
.With(d => d.Address = Faker.Address.StreetAddress())
.With(d => d.City = Faker.Address.City())
.With(d => d.Country = Faker.Address.Country())
.With(d => d.Phone = Faker.Phone.Number())
.With(d => d.Email = Faker.Internet.Email())
.With(d => d.Salary = Faker.RandomNumber.Next(2000, 6000))
.With(d => d.Department = dep)
.Build();
allEmployees.AddRange(employees);
}
foreach (var employee in allEmployees)
{
context.Add(employee);
}
context.SaveChanges();*/
int employeesPerDepartment = 9;
int quantOfEmployees = departments.Count() * Faker.RandomNumber.Next((int)(employeesPerDepartment * 0.8), (int)(employeesPerDepartment * 1.2));
var employees = Builder<Employee>.CreateListOfSize(quantOfEmployees)
.All()
.With(d => d.Name = Faker.Name.First())
.With(d => d.Surname = Faker.Name.Last())
.With(d => d.Address = Faker.Address.StreetAddress())
.With(d => d.City = Faker.Address.City())
.With(d => d.Country = Faker.Address.Country())
.With(d => d.Phone = Faker.Phone.Number())
.With(d => d.Email = Faker.Internet.Email())
.With(d => d.Salary = Faker.RandomNumber.Next(2000, 6000))
.With(d => d.Department = Pick<Department>.RandomItemFrom(departments))
.Build();
foreach (var employee in employees)
{
context.Add(employee);
}
context.SaveChanges();
// Generate students
int studentsPerDepartment = 100;
int quantOfStudents = departments.Count() * Faker.RandomNumber.Next((int)(studentsPerDepartment * 0.8), (int)(studentsPerDepartment * 1.2));
var students = Builder<Student>.CreateListOfSize(quantOfStudents)
.All()
.With(d => d.Name = Faker.Name.First())
.With(d => d.Surname = Faker.Name.Last())
.With(d => d.Address = Faker.Address.StreetAddress())
.With(d => d.City = Faker.Address.City())
.With(d => d.Country = Faker.Address.Country())
.With(d => d.Phone = Faker.Phone.Number())
.With(d => d.Email = Faker.Internet.Email())
.With(d => d.GraduationYear = Faker.RandomNumber.Next(2010, 2025))
.Build();
foreach (var student in students)
{
context.Add(student);
}
context.SaveChanges();
// Generate sections
/* var allSections = new List<Section>();
foreach (var cou in courses)
{
int sectionsPerCourse = Faker.RandomNumber.Next(3, 10);
int employeesPerCourse = Faker.RandomNumber.Next(1, 4);
var selectedE = Pick<Employee>.UniqueRandomList(With.Exactly(employeesPerCourse).Elements).From(employees);
var sections = Builder<Section>.CreateListOfSize(sectionsPerCourse)
.All()
.With(d => d.Course = cou)
.With(d => d.Employee = selectedE[Faker.RandomNumber.Next(0, selectedE.Count())])
.With(d => d.Day = Faker.RandomNumber.Next(1, 5))
.With(d => d.StartTime = Faker.RandomNumber.Next(8, 19).ToString()
+ ":" + (Faker.RandomNumber.Next(0, 3) * 15).ToString())
.With(d => d.Length = Faker.RandomNumber.Next(1, 4) * 45)
.With(d => d.Capacity = Faker.RandomNumber.Next(1, 4) * 10)
.Build();
allSections.AddRange(sections);
}
foreach (var section in allSections)
{
context.Add(section);
}
context.SaveChanges();*/
int sectionsPerCourse = 7;
int quantOfSections = courses.Count() * Faker.RandomNumber.Next((int)(sectionsPerCourse * 0.6), (int)(sectionsPerCourse * 1.3));
var sections = Builder<Section>.CreateListOfSize(quantOfSections)
.All()
.With(d => d.Course = Pick<Course>.RandomItemFrom(courses))
.With(d => d.Employee = Pick<Employee>.RandomItemFrom(employees))
.With(d => d.Day = Faker.RandomNumber.Next(1, 5))
.With(d => d.StartTime = Faker.RandomNumber.Next(8, 19).ToString("00")
+ ":" + (Faker.RandomNumber.Next(0, 3)*15).ToString("00"))
.With(d => d.Length = Faker.RandomNumber.Next(1, 4) * 45)
.With(d => d.Capacity = Faker.RandomNumber.Next(1, 4) * 10)
.Build();
foreach (var section in sections)
{
context.Add(section);
}
context.SaveChanges();
// Generate students grades
List<Student> oldStuds = students.Where(s => DateTime.Now.Year - s.GraduationYear > -4).ToList();
var allGrades = new List<Grade>();
foreach (var s in oldStuds)
{
var studYearEnded = Math.Min(DateTime.Now.Year - s.GraduationYear + 5, 6);
var coursesEnded = Math.Min(10*studYearEnded, courses.Count());
var selectedC = Pick<Course>.UniqueRandomList(With.Exactly(coursesEnded).Elements).From(courses);
var iter = new Stack<Course>(selectedC);
var grades = Builder<Grade>.CreateListOfSize(Math.Min(Faker.RandomNumber.Next(8 * studYearEnded, 10 * studYearEnded), selectedC.Count()))
.All()
.With(d => d.StudentID = s.StudentID)
.With(d => d.CourseID = iter.Pop().CourseID)
.With(d => d.Year = Faker.RandomNumber.Next(1, studYearEnded))
.With(d => d.Semester = d.Year * 2 + Faker.RandomNumber.Next(0, 1))
.With(d => d.Score = Faker.RandomNumber.Next(2, 5))
.Build();
allGrades.AddRange(grades);
}
foreach (var grade in allGrades)
{
context.Add(grade);
}
context.SaveChanges();
// Generate students enrolments
var allEnrolments = new List<Enrollment>();
foreach (var s in students)
{
var selectedS = Pick<Section>.UniqueRandomList(With.Exactly(15).Elements).From(sections);
var iter = new Stack<Section>(selectedS);
var enrolments = Builder<Enrollment>.CreateListOfSize(Faker.RandomNumber.Next(5, 15))
.All()
.With(d => d.SectionID = iter.Pop().SectionID)
.With(d => d.StudentID = s.StudentID)
.Build();
allEnrolments.AddRange(enrolments);
}
foreach (var enrolment in allEnrolments)
{
/*var section = (Section)context.Sections.Where(e => e.SectionID == enrolment.SectionID).FirstOrDefault();
int freePlaces = section.Capacity - Section.CountStudsOnTmpDB(section, context);
if (freePlaces > 0)
{*/
context.Add(enrolment);
/* context.SaveChanges();
}*/
}
context.SaveChanges();
// Correction
foreach (var section in sections)
{
int freePlaces = section.Capacity - Section.CountStudentsOnSection(section, context);
if (freePlaces < 0)
{
section.Capacity -= (freePlaces + freePlaces % 10);
}
}
context.SaveChanges();
}
static void WrongAction()
{
Console.WriteLine("\n###############################");
Console.WriteLine("ERROR: CHOSEN INCORRECT ACTION");
Console.WriteLine("###############################");
}
}
}
<file_sep>/universityBD/Enrollment.cs
using Microsoft.EntityFrameworkCore.Storage;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace universityBD
{
class Enrollment
{
[ForeignKey("Section")]
public int SectionID { get; set; }
public Section Section { get; set; }
[ForeignKey("Student")]
public int StudentID { get; set; }
public Student Student { get; set; }
public static Enrollment NewEnrollment()
{
UniversityContext database = new UniversityContext();
Console.WriteLine("\nAdding a new ENROLLMENT\nYou need to specify those values.");
Console.WriteLine("Course (chose from existing):");
int CourseID = Course.SearchToAdd().CourseID;
Console.WriteLine("SectionID (chose from existing):");
var query = from sections in database.Sections where sections.CourseID == CourseID select sections;
Section.print(query);
bool SectionAvailable = false;
bool run = true;
int SectionID = 0;
while(run)
{
Console.Write("Put section ID here: ");
SectionID = int.Parse(Console.ReadLine());
query = from sections in database.Sections
where sections.SectionID == SectionID
select sections;
foreach(var item in query)
{ SectionAvailable = (item.Capacity > Section.CountStudentsOnSection(item, database)); }
if (SectionAvailable)
{
Console.WriteLine("Congratulations! This section is available!");
run = false;
}
else
{
Console.WriteLine("This section does not have enough free places for you. Try another one!");
Console.WriteLine("Press 1 to continue, 0 to quit");
int tryAgain = int.Parse(Console.ReadLine());
if (tryAgain == 0) { run = false; }
}
}
if(SectionAvailable)
{
Console.WriteLine("StudentID (chose from existing):");
int StudentID = Student.SearchToAdd().StudentID;
bool hasOtherClasses = false;
foreach(var item in query)
{ hasOtherClasses = Student.HasClassesAtTheTime(StudentID, item);}
if (hasOtherClasses) { Console.WriteLine("Another section at the time!"); }
else
{
Enrollment enrollment = new Enrollment
{
SectionID = SectionID,
StudentID = StudentID
};
Console.WriteLine("This enrollment has been successful!");
return enrollment;
}
}
return null;
}
public static void SeeAll()
{
Console.WriteLine("Showing all the ENROLLMENTS in the database:");
UniversityContext database = new UniversityContext();
var query = database.Enrollments;
print(query);
}
public static void print(IQueryable<Enrollment> query)
{
UniversityContext database = new UniversityContext();
Console.WriteLine("");
Console.WriteLine("Course Name".PadRight(50) + "| " + "Profesor".PadRight(30) + "| " + "Student".PadRight(30));
Console.WriteLine("------------------------------------------------------------------------------------------------");
foreach (var item in query)
{
var student = (Student)database.Students.Where(e => e.StudentID == item.StudentID).FirstOrDefault();
var section = (Section)database.Sections.Where(e => e.SectionID == item.SectionID).FirstOrDefault();
var course = (Course)database.Courses.Where(e => e.CourseID == section.CourseID).FirstOrDefault();
var employee = (Employee)database.Employees.Where(e => e.EmployeeID == section.EmployeeID).FirstOrDefault();
Console.WriteLine(course.Name.PadRight(50) + "| " + employee.Name.PadRight(14)
+ " " + employee.Surname.PadRight(15) + "| " + student.Name.PadRight(14) + " " + student.Surname.PadRight(15));
}
}
public static void Search()
{
System.Linq.IQueryable<universityBD.Enrollment> query = null;
UniversityContext database = new UniversityContext();
bool run = true;
while (run)
{
run = false;
Console.WriteLine("\nBy which value you want to search?");
Console.WriteLine("1. Section");
Console.WriteLine("2. Student");
int action = int.Parse(Console.ReadLine());
switch (action)
{
case 1:
Console.WriteLine("Section (chose from existing):");
int SectionID = Section.SearchToAdd().SectionID;
query = database.Enrollments.Where(e => e.SectionID == SectionID);
break;
case 2:
Console.WriteLine("Student (chose from existing):");
int StudentID = Student.SearchToAdd().StudentID;
query = database.Enrollments.Where(e => e.StudentID == StudentID);
break;
case 0:
return;
default:
Console.WriteLine("ERROR: CHOSEN INCORRECT VALUE");
run = true;
break;
}
}
print(query);
}
public static Enrollment SearchToAdd()
{
UniversityContext database = new UniversityContext();
Enrollment result = null;
bool run = true;
while (run)
{
Search();
Console.WriteLine("Now chose Enrollment by inserting Section's and Student's ID. Write '0' to abort.");
int idse = int.Parse(Console.ReadLine());
int ids = int.Parse(Console.ReadLine());
if (idse == 0 || ids == 0)
{
result = null;
break;
}
else
{
var query = database.Enrollments.Where(e => e.SectionID == idse && e.StudentID == ids).FirstOrDefault(); ;
if (query != null)
{
run = false;
result = query;
}
else
{
Console.WriteLine("There is not Enrollment with specified ID's");
Console.WriteLine("Try again");
}
break;
}
}
return result;
}
}
}
<file_sep>/universityBD/Migrations/20200520132325_initial.cs
using Microsoft.EntityFrameworkCore.Migrations;
namespace universityBD.Migrations
{
public partial class initial : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Departments",
columns: table => new
{
DepartmentID = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Name = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Departments", x => x.DepartmentID);
});
migrationBuilder.CreateTable(
name: "Students",
columns: table => new
{
StudentID = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Name = table.Column<string>(nullable: true),
Surname = table.Column<string>(nullable: true),
Address = table.Column<string>(nullable: true),
City = table.Column<string>(nullable: true),
Country = table.Column<string>(nullable: true),
Phone = table.Column<string>(nullable: true),
Email = table.Column<string>(nullable: true),
GraduationYear = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Students", x => x.StudentID);
});
migrationBuilder.CreateTable(
name: "Courses",
columns: table => new
{
CourseID = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
DepartmentID = table.Column<int>(nullable: false),
Name = table.Column<string>(nullable: true),
ECTS = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Courses", x => x.CourseID);
table.ForeignKey(
name: "FK_Courses_Departments_DepartmentID",
column: x => x.DepartmentID,
principalTable: "Departments",
principalColumn: "DepartmentID",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Employees",
columns: table => new
{
EmployeeID = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Name = table.Column<string>(nullable: true),
Surname = table.Column<string>(nullable: true),
Address = table.Column<string>(nullable: true),
City = table.Column<string>(nullable: true),
Country = table.Column<string>(nullable: true),
Phone = table.Column<string>(nullable: true),
Email = table.Column<string>(nullable: true),
Salary = table.Column<int>(nullable: false),
DepartmentID = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Employees", x => x.EmployeeID);
table.ForeignKey(
name: "FK_Employees_Departments_DepartmentID",
column: x => x.DepartmentID,
principalTable: "Departments",
principalColumn: "DepartmentID",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Grades",
columns: table => new
{
CourseID = table.Column<int>(nullable: false),
StudentID = table.Column<int>(nullable: false),
Year = table.Column<int>(nullable: false),
Semester = table.Column<int>(nullable: false),
Score = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Grades", x => new { x.CourseID, x.StudentID });
table.ForeignKey(
name: "FK_Grades_Courses_CourseID",
column: x => x.CourseID,
principalTable: "Courses",
principalColumn: "CourseID",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Grades_Students_StudentID",
column: x => x.StudentID,
principalTable: "Students",
principalColumn: "StudentID",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Sections",
columns: table => new
{
SectionID = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
CourseID = table.Column<int>(nullable: false),
ProfesorID = table.Column<int>(nullable: false),
EmployeeID = table.Column<int>(nullable: true),
Day = table.Column<int>(nullable: false),
StartTime = table.Column<string>(nullable: true),
Length = table.Column<int>(nullable: false),
Capacity = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Sections", x => x.SectionID);
table.ForeignKey(
name: "FK_Sections_Courses_CourseID",
column: x => x.CourseID,
principalTable: "Courses",
principalColumn: "CourseID",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Sections_Employees_EmployeeID",
column: x => x.EmployeeID,
principalTable: "Employees",
principalColumn: "EmployeeID",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "Enrollments",
columns: table => new
{
SectionID = table.Column<int>(nullable: false),
StudentID = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Enrollments", x => new { x.SectionID, x.StudentID });
table.ForeignKey(
name: "FK_Enrollments_Sections_SectionID",
column: x => x.SectionID,
principalTable: "Sections",
principalColumn: "SectionID",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Enrollments_Students_StudentID",
column: x => x.StudentID,
principalTable: "Students",
principalColumn: "StudentID",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_Courses_DepartmentID",
table: "Courses",
column: "DepartmentID");
migrationBuilder.CreateIndex(
name: "IX_Employees_DepartmentID",
table: "Employees",
column: "DepartmentID");
migrationBuilder.CreateIndex(
name: "IX_Enrollments_StudentID",
table: "Enrollments",
column: "StudentID");
migrationBuilder.CreateIndex(
name: "IX_Grades_StudentID",
table: "Grades",
column: "StudentID");
migrationBuilder.CreateIndex(
name: "IX_Sections_CourseID",
table: "Sections",
column: "CourseID");
migrationBuilder.CreateIndex(
name: "IX_Sections_EmployeeID",
table: "Sections",
column: "EmployeeID");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "Enrollments");
migrationBuilder.DropTable(
name: "Grades");
migrationBuilder.DropTable(
name: "Sections");
migrationBuilder.DropTable(
name: "Students");
migrationBuilder.DropTable(
name: "Courses");
migrationBuilder.DropTable(
name: "Employees");
migrationBuilder.DropTable(
name: "Departments");
}
}
}
<file_sep>/universityBD/UniversityContext.cs
using Microsoft.EntityFrameworkCore;
using System;
using System.Collections.Generic;
using System.Text;
namespace universityBD
{
class UniversityContext : DbContext
{
public DbSet<Course> Courses { get; set; }
public DbSet<Department> Departments { get; set; }
public DbSet<Employee> Employees { get; set; }
public DbSet<Enrollment> Enrollments { get; set; }
public DbSet<Grade> Grades { get; set; }
public DbSet<Section> Sections { get; set; }
public DbSet<Student> Students { get; set; }
protected override void OnConfiguring(DbContextOptionsBuilder options)
=> options.UseSqlite("DataSource = University.db");
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<Enrollment>().HasKey(e => new
{
e.SectionID,
e.StudentID
});
modelBuilder.Entity<Grade>().HasKey(g => new
{
g.CourseID,
g.StudentID
});
}
public static UniversityContext Create()
{
return new UniversityContext();
}
}
}
<file_sep>/universityBD/Course.cs
using FizzWare.NBuilder;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
namespace universityBD
{
class Course
{
[Key]
public int CourseID { get; set; }
[ForeignKey("Department")]
public int DepartmentID { get; set; }
public Department Department { get; set; }
public String Name { get; set; }
public int ECTS { get; set; }
public static Course NewCourse()
{
Console.WriteLine("\nYou need to specify those values.");
Console.WriteLine("Department (chose from existing):");
Department result = Department.SearchToAdd();
if(result != null)
{
int DepartmentID = result.DepartmentID;
Console.Write("Enter the new course's NAME: ");
String Name = Console.ReadLine();
Console.Write("ECTS: ");
int ECTS = int.Parse(Console.ReadLine());
Course course = new Course
{
DepartmentID = DepartmentID,
Name = Name,
ECTS = ECTS
};
return course;
}
return null;
}
public static void SeeAll()
{
Console.WriteLine("Showing all the COURSES in the database:");
UniversityContext database = new UniversityContext();
var query = database.Courses;
print(query);
}
public static void print(IQueryable<Course> query)
{
UniversityContext database = new UniversityContext();
Console.WriteLine("\nID".PadRight(5) + "| " + "Name".PadRight(50) + "| " + "ECTS".PadRight(5) + "| " + "Department Name".PadRight(30));
Console.WriteLine("------------------------------------------------------------------------------------------------------");
foreach (var item in query)
{
var department = (Department)database.Departments.Where(e => e.DepartmentID == item.DepartmentID).FirstOrDefault();
Console.WriteLine(item.CourseID.ToString().PadRight(4) + "| " + item.Name.PadRight(50) + "| " + item.ECTS.ToString().PadRight(5) +
"| " + department.Name.PadRight(30));
}
}
public static void Search()
{
System.Linq.IQueryable<universityBD.Course> query = null;
UniversityContext database = new UniversityContext();
bool run = true;
while (run)
{
run = false;
Console.WriteLine("\nBy which value you want to search?");
Console.WriteLine("1. ID");
Console.WriteLine("2. Name");
Console.WriteLine("3. ECTS");
Console.WriteLine("4. Department");
Console.WriteLine("0. Cancel");
Console.WriteLine("\n###############################");
Console.Write("Your choice: ");
int action = int.Parse(Console.ReadLine());
Console.WriteLine("###############################");
query = database.Courses;
switch (action)
{
case 1:
Console.Write("ID: ");
int id = int.Parse(Console.ReadLine());
query = database.Courses.Where(c => c.CourseID == id);
break;
case 2:
Console.Write("Name: ");
String Name = Console.ReadLine();
query = database.Courses.Where(c => c.Name.Contains(Name));
break;
case 3:
Console.Write("ECTS: ");
int ECTS = int.Parse(Console.ReadLine());
query = database.Courses.Where(c => c.ECTS == ECTS);
break;
case 4:
Console.WriteLine("Department (chose from existing):");
int DepartmentID = Department.SearchToAdd().DepartmentID;
query = database.Courses.Where(c => c.DepartmentID == DepartmentID);
break;
case 0:
return;
default:
Console.WriteLine("\n###############################");
Console.WriteLine("ERROR: CHOSEN INCORRECT VALUE");
Console.WriteLine("###############################");
run = true;
break;
}
}
print(query);
}
public static Course SearchToAdd()
{
UniversityContext database = new UniversityContext();
Course result = null;
bool run = true;
while (run)
{
Search();
Console.WriteLine("Now chose Course by inserting it's ID. Write '0' to abort.");
Console.Write("Your choice: ");
int id = int.Parse(Console.ReadLine());
switch (id)
{
case 0:
result = null;
run = false;
break;
default:
var query = database.Courses.Where(c => c.CourseID == id).FirstOrDefault(); ;
if (query != null)
{
run = false;
result = query;
}
else
{
Console.WriteLine("There is not Course with ID = " + id);
Console.WriteLine("Try again");
}
break;
}
}
return result;
}
}
}
|
fde61bbcfa8edebedbf0cb13e6ce7493f727fe26
|
[
"Markdown",
"C#"
] | 13 |
Markdown
|
kklgf/universityBD
|
774c18e746607e6f901453f72da58a21ebba8081
|
a701382af55073f54820e1fd140a510193ff7dcf
|
refs/heads/master
|
<file_sep>import { Component, ViewChild } from '@angular/core';
import { ModalPopoutComponent } from './components/modal-popout/modal-popout.component';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
//popup logic no longer required after moving to the eventspage view
//keeping in the event i have to revert to it
// @ViewChild('modal') modalPopout: ModalPopoutComponent | null = null;
// events: any[] = [];
// testDaySelected(evt: any) {
// console.log(evt,"is this thing on?");
// this.events = evt;
// this.modalPopout?.triggerModal(evt);
// }
}
<file_sep># YearToDate
## Project Description
YearToDate is a user-friendly scheduling and productivity tracking application. Individual users and groups can quickly and easily coordinate personal or
professional events in a visually appealing calendar interface. YearToDate encourages collaboration and efficiency. Ad-hoc group formation can be suggested based on
project type and duration and participants are provided with metrics in the form of burndown and sprint velocity analyses.
## Technologies Used
* Angular 10, Bootstrap, Angular Material,
* TypeScript, HTML, CSS, Spring Boot,
* Spring MVC, Hibernate, JUnit4, Mockito,
* Maven, PostgreSQL, AWS RDS
## Getting Started
* Git clone the project
* Set database environment variables in application.yaml
* Download Maven, and Angular dependencies
* Run backend then frontend
## Contributors
* Me :)
* <NAME>(https://github.com/manbodhclaude)
* <NAME>(https://github.com/ASimensis)
* <NAME>(https://github.com/lalkapeter123)
<file_sep>import { Component, OnInit, Output, ViewChild, EventEmitter } from '@angular/core';
import { ModalPopoutComponent } from 'src/app/components/modal-popout/modal-popout.component';
import { Group } from 'src/app/models/Group';
//import { userGroup } from 'src/app/models/UserGroup';
import { EventServiceService } from 'src/app/services/event-service.service';
import { GroupServiceService } from 'src/app/services/group-service.service';
import { UpdateServiceService } from 'src/app/services/update-service.service';
import { UserServiceService } from 'src/app/services/user-service.service';
@Component({
selector: 'app-eventspage',
templateUrl: './eventspage.component.html',
styleUrls: ['./eventspage.component.css']
})
export class EventspageComponent implements OnInit {
@ViewChild('modal') modalPopout: ModalPopoutComponent | null = null;
eventClicked: boolean = false;
events: any[] = [];
constructor(private userService: UserServiceService,
private updateService: UpdateServiceService,
private groupService: GroupServiceService,
private eventService: EventServiceService ) { }
ngOnInit(): void {
}
sessionData: any = JSON.parse(sessionStorage.getItem('user') || '{}');
daySelected(evt: any) {
//console.log(evt,"is this thing on?");
this.events = evt;
this.modalPopout?.triggerModal(evt);
this.eventClicked = false;
}
eventSelected(evt: any) {
//console.log(evt);
this.updateService.receiveData(evt);
this.events = evt;
this.modalPopout?.triggerModal(evt);
this.eventClicked = true;
}
logout(): void {
this.userService.logout();
//console.log("is this thing on");
}
}
<file_sep>package org.ex.models;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "t_group")
@AllArgsConstructor
@NoArgsConstructor
@Data
@EqualsAndHashCode
public class Group {
@Id
@GeneratedValue
private int id;
private String group_name;
private String description;
private int created_by;
}
<file_sep>export interface calendarTask{
id?: string;
user_id?: string;
task_name?: string;
description?: string;
start_date: Date;
end_date?: Date;
status?: string;
}<file_sep>import { Injectable } from '@angular/core';
import { Observable } from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class UpdateServiceService {
miscObject: any;
constructor() { }
receiveData(data: any): void {
this.miscObject = data;
}
sendData(): Observable<any> {
return this.miscObject;
}
}
<file_sep>package org.ex.services;
import org.ex.models.User;
import org.ex.models.dto.LoginRequest;
import org.ex.models.dto.SessionUser;
import org.ex.models.dto.UpdateUser;
import org.ex.repositories.UserDao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.stream.Collectors;
@Component("UserServiceImpl")
public class UserServiceImpl implements UserService{
private UserDao userDao;
@Autowired
public UserServiceImpl(UserDao userDao) {
this.userDao = userDao;
}
@Override
public User getUserById(int id) {
return this.userDao.getUserById(id);
}
@Override
public List<SessionUser> getUsersByGroup(int groupId) {
List<User> unsafeUsers = this.userDao.getUsersByGroup(groupId);
List<SessionUser> safeUsers = unsafeUsers.stream().map(uu -> new SessionUser(uu)).collect(Collectors.toList());
return safeUsers;
}
@Override
public boolean registerNewUser(User user) {
User queryUser = this.userDao.getUserByUserName(user.getUser_name());
if(queryUser == null) {
final String firstName = user.getFirst_name();
final String lastName = user.getLast_name();
final String userName = user.getUser_name();
final String type = user.getUser_type().toString();
final String email = user.getEmail();
final String password = <PASSWORD>();
userDao.insertUser(firstName, lastName, userName, type, email, password);
return true;
}
return false;
}
@Override
public User getUserByUserName(String username) {
return this.userDao.getUserByUserName(username);
}
@Override
public boolean updateUser(UpdateUser user) {
int id = user.getId();
String firstName = user.getFirst_name();
String lastName = user.getLast_name();
String userName = user.getUser_name();
String email = user.getEmail();
String password = <PASSWORD>.getUser_<PASSWORD>();
try {
if(password != null) {
this.userDao.updateUserWithPass(id, firstName, lastName, userName, email, password);
} else {
this.userDao.updateUserNoPass(id, firstName, lastName, userName, email);
}
return true;
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
@Override
public SessionUser validateUser(User user, LoginRequest loginRequest) {
if(user != null){
if(user.getUser_password().equals(loginRequest.getPassword())) {
SessionUser sessionUser = new SessionUser();
sessionUser.setId(user.getId());
sessionUser.setUser_name(user.getUser_name());
sessionUser.setFirst_name(user.getFirst_name());
sessionUser.setLast_name(user.getLast_name());
sessionUser.setUser_type(user.getUser_type());
sessionUser.setEmail(user.getEmail());
return sessionUser;
}
}
return null;
}
}
<file_sep>package org.ex.models;
public enum TaskStatus {
COMPLETED,
INCOMPLETED,
FAILED
}
<file_sep>package serviceTests;
import org.ex.models.Group;
import org.ex.models.dto.UserGroup;
import org.ex.repositories.GroupDao;
import org.ex.services.GroupServiceImpl;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.util.ArrayList;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
@ContextConfiguration(classes = {GroupServiceImpl.class})
@ExtendWith(SpringExtension.class)
class GroupServiceImplTest {
@MockBean
private GroupDao groupDao;
@Autowired
private GroupServiceImpl groupServiceImpl;
@Test
void testConstructor() {
assertTrue((new GroupServiceImpl(mock(GroupDao.class))).getAllGroups().isEmpty());
}
@Test
void testGetAllGroups() {
ArrayList<Group> groupList = new ArrayList<Group>(); //groupList of type Group in ArrayList
when(this.groupDao.getAllGroups()).thenReturn(groupList);
List<Group> actualAllGroups = this.groupServiceImpl.getAllGroups(); //actualAllGroups of type Group in List
assertSame(groupList, actualAllGroups); // Asserts that two objects refer to the same object of type Group
verify(this.groupDao).getAllGroups(); //verifying getAllGroups method
}
@Test
void testCreateGroup() {
Group group = new Group();
group.setId(1);
group.setDescription("The characteristics of someone or something");
group.setGroup_name("Group name");
group.setCreated_by(1);
assertTrue(this.groupServiceImpl.createGroup(group)); //assert that createGroup method is functioning
verify(this.groupDao).insertGroup(group.getGroup_name(), group.getDescription(), group.getId()); //verify groupDao is working with group created in method
}
@Test
void testAddUserToGroup() {
assertTrue(this.groupServiceImpl.addUserToGroup(new UserGroup(1, 1, 1))); //assert addUserToGroup method is working by passing in id, userId and groupId
verify(this.groupDao).userIntoGroup(1, 1); //verify groupDao is working by passing in appropriate id values
}
@Test
void testAddUserToGroup3() {
UserGroup userGroup = mock(UserGroup.class); //using mockito to create a UserGroup userGroup
when(userGroup.getUser_id()).thenReturn(1);
when(userGroup.getGroup_id()).thenReturn(1);
assertTrue(this.groupServiceImpl.addUserToGroup(userGroup)); //assert addUserToGroup method is working by setting assert to true
verify(this.groupDao).userIntoGroup(1, 1); // verifying groupDao.userIntoGroup by id's of 1 that were return earlier in the method
verify(userGroup).getGroup_id(); //verify groupId is 1
verify(userGroup).getUser_id(); //verify userId is 1
}
@Test
void testGetAllGroupsByUser() {
ArrayList<Group> groupList = new ArrayList<Group>();
when(this.groupDao.getGroupsByUser(anyInt())).thenReturn(groupList); //mocking and checking functionality of getGroupsByUser
List<Group> actualAllGroupsByUser = this.groupServiceImpl.getAllGroupsByUser(123);
assertSame(groupList, actualAllGroupsByUser); // Asserts that two objects refer to the same object of type Group
verify(this.groupDao).getGroupsByUser(anyInt()); //mocking and checking functionality of getGroupsByUser
}
}
<file_sep>export interface userGroup{
id?: number;
user_id: number;
group_id: number;
}<file_sep>import { Component, OnInit } from '@angular/core';
import { MatBottomSheet } from '@angular/material/bottom-sheet';
import { calendarTask } from 'src/app/models/calendarTask';
import { taskStatus } from 'src/app/models/taskStatus';
import { EventServiceService } from 'src/app/services/event-service.service';
import { UpdateServiceService } from 'src/app/services/update-service.service';
@Component({
selector: 'app-update-date-time',
templateUrl: './update-date-time.component.html',
styleUrls: ['./update-date-time.component.css']
})
export class UpdateDateTimeComponent implements OnInit {
eventObject: any = this.updateService.sendData();
sessionID = JSON.parse(sessionStorage.getItem('user') || '{}');
status: string = '';
start: any = this.eventObject[0].start_date;
end: any = this.eventObject[0].end_date;
title?: string = `${this.eventObject[0].title}`;
description?: string = `${this.eventObject[0].description}`;
statusValues: taskStatus[] = [
{value: 'INCOMPLETED', viewValue: 'OPEN'},
{value: 'COMPLETED', viewValue: 'CLOSED'},
{value: 'FAILED', viewValue: 'FAILED'}
]
constructor(private eventService: EventServiceService,
private updateService: UpdateServiceService,
private bottomSheet: MatBottomSheet) { }
ngOnInit(): void {
//this.getEvent();
console.log(this.eventObject);
}
// getEvent(): void {
// this.eventObject = this.updateService.sendData();
// }
updateTask(): void{
let currentTask: calendarTask = {
id: `${this.eventObject[0].id}`,
user_id: `${this.sessionID.id}`,
task_name: `${this.title}`,
description: `${this.description}`,
start_date: new Date(`${this.start}`),
end_date: new Date(`${this.end}`),
status: `${this.status}`
}
console.log(currentTask);
this.eventService.updateEvent(currentTask).subscribe((response) => console.log(JSON.stringify(response)));
this.bottomSheet.dismiss();
window.location.reload();
}
}
<file_sep>import { Injectable } from '@angular/core';
import { CalendarEvent } from 'calendar-utils';
import { Observable, of } from 'rxjs';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Group } from '../models/Group';
import { User } from '../models/User';
import { userGroup } from '../models/UserGroup';
@Injectable({
providedIn: 'root'
})
export class GroupServiceService {
baseurl: string = `http://localhost:8001/group`;
httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
}),
};
constructor(private http: HttpClient) { }
getAllGroups(): Observable<Group[]>{
return this.http.get<Group[]>(`${this.baseurl}/all`, this.httpOptions);
}
getGroupByUserID(userID: number): Observable<Group[]>{
return this.http.get<Group[]>(`${this.baseurl}/user/${userID}`, this.httpOptions);
}
createNewGroup(group: Group): Observable<any>{
return this.http.post(`${this.baseurl}/new`, group, this.httpOptions);
}
addUserToGroup(UserGroup: userGroup): Observable<any>{
return this.http.post(`${this.baseurl}/user/add`, UserGroup, this.httpOptions);
}
}
<file_sep>USER STORIES
* Login
-->Manager View
-->Staff View
*Register new user
*Change existing user password
*Join a Group (anyone)
*Send Group Invitations (Manager)
*Create New Groups (Manager)
CALENDAR STORIES
*Add new event for a given number of days
*update existing event
*view all events in personal CALENDAR
--> Manager view group member calendars(?)
*view individual event details
*drill down on different calendar views (month, week, day)<file_sep>package org.ex.services;
import org.ex.models.Group;
import org.ex.models.Task;
import org.ex.models.User;
import org.ex.repositories.GroupDao;
import org.ex.repositories.TaskDao;
import org.ex.repositories.UserDao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.sql.Timestamp;
import java.util.List;
@Component("TaskServiceImpl")
public class TaskServiceImpl implements TaskService{
private UserDao userDao;
private TaskDao taskDao;
private GroupDao groupDao;
@Autowired
public TaskServiceImpl(UserDao userDao, TaskDao taskDao, GroupDao groupDao) {
this.userDao = userDao;
this.taskDao = taskDao;
this.groupDao = groupDao;
}
@Override
public List<Task> getAllTasks() {
return this.taskDao.getAllTasks();
}
@Override
public Task getTaskById(int id) {
return this.taskDao.getTaskById(id);
}
@Override
public List<Task> getTasksByUserId(int id) {
return this.taskDao.getTasksByUserId(id);
}
@Override
public List<Task> getTasksByGroupId(int id) {
return this.taskDao.getTasksByGroupId(id);
}
@Override
public boolean createTask(Task task) {
if(task.getUser_id() != null) {
int userId = task.getUser_id();
if(this.getUserById(userId) != null) {
return persistUserTask(task);
}
} else if(task.getGroup_id() != null) {
int groupId = task.getGroup_id();
if(this.getGroupById(groupId) != null) {
return persistGroupTask(task);
}
}
return false;
}
@Override
public boolean updateTask(Task task) {
if(task.getGroup_id() != null || task.getUser_id() != null) {
try {
int id = task.getId();
String taskName = task.getTask_name();
String description = task.getDescription();
Timestamp startDate = task.getStart_date();
Timestamp endDate = task.getEnd_date();
String status = task.getStatus().toString();
int minWorked = task.getMinutes_worked();
this.taskDao.updateTask(id, taskName, description, startDate, endDate, status, minWorked);
return true;
} catch (Exception e) {
e.printStackTrace();
}
}
return false;
}
@Override
public boolean deleteTask(int id) {
try {
this.taskDao.deleteTask(id);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
private boolean persistUserTask(Task task) {
try{
int userId = task.getUser_id();
String taskName = task.getTask_name();
String description = task.getDescription();
Timestamp startDate = task.getStart_date();
Timestamp endDate = task.getEnd_date();
this.taskDao.insertNewUserTask(userId, taskName, description, startDate, endDate);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
private boolean persistGroupTask(Task task) {
try {
int groupId = task.getGroup_id();
String taskName = task.getTask_name();
String description = task.getDescription();
Timestamp startDate = task.getStart_date();
Timestamp endDate = task.getEnd_date();
this.taskDao.insertNewGroupTask(groupId, taskName, description, startDate, endDate);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
private Group getGroupById(int id) {
return this.groupDao.getGroupId(id);
}
private User getUserById(int id) {
return this.userDao.getUserById(id);
}
}
<file_sep>package org.ex.models;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.springframework.context.annotation.Scope;
import javax.persistence.*;
import java.util.HashSet;
import java.util.Set;
@Entity
@Table(name = "t_user")
@AllArgsConstructor
@NoArgsConstructor
@Data
@EqualsAndHashCode
public class User {
@Id
@GeneratedValue
private int id;
private String first_name;
private String last_name;
private String user_name;
@Enumerated(EnumType.STRING)
private UserType user_type;
private String email;
private String user_password;
}
<file_sep>export interface Group{
id?: number;
group_name: string;
description: string;
created_by: number;
}<file_sep>import { Component, OnInit } from '@angular/core';
import { MatBottomSheet } from '@angular/material/bottom-sheet';
import { UpdateServiceService } from 'src/app/services/update-service.service';
import { SetDateTimeComponent } from '../set-date-time/set-date-time.component';
import { UpdateDateTimeComponent } from '../update-date-time/update-date-time.component';
@Component({
selector: 'app-add-event-sheet',
templateUrl: './add-event-sheet.component.html',
styleUrls: ['./add-event-sheet.component.css']
})
export class AddEventSheetComponent implements OnInit {
eventObject = this.updateService.sendData();
constructor(private bottomSheet: MatBottomSheet, private updateService: UpdateServiceService) {}
openAddSheet(): void {
this.bottomSheet.open(SetDateTimeComponent);
}
openUpdateSheet(): void {
this.bottomSheet.open(UpdateDateTimeComponent);
}
ngOnInit(): void {
}
}
<file_sep>import { Component, EventEmitter, OnInit, Output } from '@angular/core';
import { CalendarView } from 'angular-calendar';
import { eachDayOfInterval, startOfDay } from 'date-fns';
import { CalendarEvent } from 'calendar-utils';
import { ModalPopoutComponent } from '../modal-popout/modal-popout.component';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { EventServiceService } from '../../services/event-service.service';
@Component({
selector: 'app-calendar-views',
templateUrl: './calendar-views.component.html',
styleUrls: ['./calendar-views.component.css']
})
export class CalendarViewsComponent implements OnInit {
view: CalendarView = CalendarView.Month;
viewDate: Date = new Date();
CalendarView = CalendarView;
sessionID = JSON.parse(sessionStorage.getItem('user') || '{}');
@Output() dateClicked: EventEmitter<{}> = new EventEmitter<{}>();
@Output() singleEventClicked: EventEmitter<{}> = new EventEmitter<{}>();
events: CalendarEvent[] = [];
constructor(private eventService: EventServiceService) { }
ngOnInit(): void {
this.getEvents(this.sessionID.id);
}
setView(view: CalendarView) {
this.view = view;
}
async getEvents(userid: number): Promise<void> {
this.events = (await this.eventService.getEventsByUserID(userid).toPromise());
this.events = this.events.map((item: any) => ({...item, start: new Date(item.start_date), end: new Date(item.end_date), title: item.task_name}));
}
dayClicked({date, events}: {date: Date; events: CalendarEvent[] }): void {
console.log(events);
this.dateClicked.emit(events);
}
eventClicked(event: CalendarEvent): void {
var eventArray: CalendarEvent[] = [];
eventArray.push(event);
this.singleEventClicked.emit(eventArray);
}
}
<file_sep>import { Injectable } from '@angular/core';
import { CalendarEvent } from 'calendar-utils';
import { Observable, of } from 'rxjs';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { calendarTask } from '../models/calendarTask';
@Injectable({
providedIn: 'root'
})
export class EventServiceService {
baseurl: string = 'http://localhost:8001/task'
httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
}),
};
constructor(private http: HttpClient) { }
getAllEvents(): Observable<CalendarEvent[] >{
return this.http.get<CalendarEvent[]>(`${this.baseurl}/all`, this.httpOptions);
}
getEventsByUserID(userID: number): Observable<CalendarEvent[]>{
return this.http.get<CalendarEvent[]>(`${this.baseurl}/user/${userID}`);
}
getEventsByTaskID(taskID: number): Observable<CalendarEvent[]>{
return this.http.get<CalendarEvent[]>(`${this.baseurl}/${taskID}`);
}
getEventsByGroupID(groupID: number): Observable<CalendarEvent[]>{
return this.http.get<CalendarEvent[]>(`${this.baseurl}/group/${groupID}`);
}
createNewEvent(task: calendarTask): Observable<any>{
console.log(task);
return this.http.post<calendarTask>(`${this.baseurl}/new`, task, this.httpOptions);
}
updateEvent(task: calendarTask): Observable<any>{
return this.http.put<calendarTask>(`${this.baseurl}/update`, task, this.httpOptions);
}
deleteEvent(taskID: number): Observable<any>{
return this.http.delete<any>(`${this.baseurl}/delete/${taskID}`);
}
}
<file_sep>import { Component, Input, OnInit } from '@angular/core';
import { FormControl, Validators } from '@angular/forms';
import { UserServiceService } from 'src/app/services/user-service.service';
import { Login } from '../../models/Login';
import { Account } from '../../models/Account';
import { User } from '../../models/User';
@Component({
selector: 'app-login-register-form',
templateUrl: './login-register-form.component.html',
styleUrls: ['./login-register-form.component.css']
})
export class LoginRegisterFormComponent implements OnInit {
email = new FormControl('', [Validators.required, Validators.email]);
user_password = new FormControl('', [Validators.required]);
first_name = new FormControl('');
last_name = new FormControl('');
user_name = new FormControl('');
user_type = new FormControl('');
accounts: Account[] = [
{value: 'REGULAR', viewValue: 'Individual'},
{value: 'MANAGER', viewValue: 'Coordinator'}
];
@Input() userid!: number | string;
user: User = {
first_name: '',
last_name: '',
user_password: '',
email: '',
user_name: '',
user_type: ''
};
userList: any[] = [];
constructor(private userService: UserServiceService) { }
ngOnInit(): void {
}
register(): void{
const newUser: User = {
email: this.email.value,
user_password: this.user_password.value,
first_name: this.first_name.value,
last_name: this.last_name.value,
user_name: this.user_name.value,
user_type: this.user_type.value
}
console.log(newUser);
this.userService.createUser(newUser).subscribe((response: any) => console.log(response));
}
login(): void{
const newLogin: Login = {
user_name: this.user_name.value,
password: <PASSWORD>
}
console.log(newLogin);
this.userService.login(newLogin);
}
// getAllUsers(): void{
// this.userService.getAllUsers().subscribe((response) => {
// console.log(response);
// response.forEach((user) => this.userList.push(JSON.stringify(user)));
// });
// }
// getUserByID(): void {
// this.userid = Number((<HTMLInputElement>document.querySelector("#userID")).value);
// this.userService.getUserByID(this.userid).subscribe((response) => console.log(response));
// }
}
<file_sep>export interface User{
id?: number;
first_name?: string;
last_name?: string;
user_name: string;
user_type?: any;
email: string;
user_password: string;
}<file_sep>package org.ex.controllers;
import org.ex.models.dto.LoginRequest;
import org.ex.models.dto.SessionUser;
import org.ex.models.User;
import org.ex.models.dto.UpdateUser;
import org.ex.services.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@RequestMapping(path = "user")
@CrossOrigin(origins = "${angular.url}")
public class UserController {
private UserService userService;
@Autowired
public UserController(UserService userService) {
this.userService = userService;
}
@GetMapping(path = "/group/{id}")
public ResponseEntity<List<SessionUser>> getAllUsersByGroup(@PathVariable int id) {
List<SessionUser> users = this.userService.getUsersByGroup(id);
if(users != null) {
if(users.size() > 0) {
return ResponseEntity.ok().body(users);
}
}
return ResponseEntity.notFound().build();
}
@PostMapping(path = "/register")
@Transactional
public ResponseEntity createUser(@RequestBody User user) {
System.out.println(user.getUser_type());
boolean result = this.userService.registerNewUser(user);
if(result) {
return ResponseEntity.status(201).build();
} else {
return ResponseEntity.status(409).build();
}
}
@PostMapping(path = "/login")
public ResponseEntity<SessionUser> login(@RequestBody LoginRequest loginRequest) {
User user = this.userService.getUserByUserName(loginRequest.getUser_name());
SessionUser resultUser = this.userService.validateUser(user, loginRequest);
if(resultUser != null) {
return ResponseEntity.accepted().body(resultUser);
}
return ResponseEntity.status(403).build();
}
@PutMapping(path = "/update")
@Transactional
public ResponseEntity updateUser(@RequestBody UpdateUser user) {
boolean result = this.userService.updateUser(user);
User u = this.userService.getUserById(user.getId());
if(result && u != null) {
return ResponseEntity.ok().build();
}
return ResponseEntity.badRequest().build();
}
}
<file_sep>import { CalendarEvent } from "angular-calendar";
import { startOfDay } from "date-fns";
export const EVENTS: CalendarEvent[] = [
{
start: new Date("10/17/2021 17:00:"),
end: new Date("10/17/2021 17:30"),
title: 'first test date'
},
{
start: startOfDay(new Date()),
title: 'An event with no end date'
},
// {
// start: startOfDay(new Date()),
// title: 'Second event'
// }
]<file_sep>package org.ex.controllers;
import org.ex.models.Group;
import org.ex.models.dto.UserGroup;
import org.ex.services.GroupService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@RequestMapping(path = "group")
@CrossOrigin(origins = "${angular.url}")
public class GroupController {
private GroupService groupService;
@Autowired
public GroupController(GroupService groupService) {
this.groupService = groupService;
}
@GetMapping(path = "/all")
public ResponseEntity<List<Group>> getAllGroups() {
List<Group> groups = this.groupService.getAllGroups();
if(groups != null) {
if(groups.size() > 0) {
return ResponseEntity.ok().body(groups);
}
}
return ResponseEntity.notFound().build();
}
@PostMapping(path = "/new")
@Transactional
public ResponseEntity createGroup(@RequestBody Group group) {
System.out.println(group);
boolean status = this.groupService.createGroup(group);
if(status) {
return ResponseEntity.ok().build();
}
return ResponseEntity.badRequest().build();
}
@PostMapping(path = "/user/add")
@Transactional
public ResponseEntity addUserToGroup(@RequestBody UserGroup userGroup) {
boolean status = this.groupService.addUserToGroup(userGroup);
if(status) {
return ResponseEntity.ok().build();
}
return ResponseEntity.badRequest().build();
}
@GetMapping(path = "/user/{id}")
public ResponseEntity<List<Group>> getUsersGroups(@PathVariable int id) {
List<Group> groups = this.groupService.getAllGroupsByUser(id);
if(groups != null) {
if(groups.size() > 0) {
return ResponseEntity.ok().body(groups);
}
}
return ResponseEntity.notFound().build();
}
@PutMapping(path = "/update")
@Transactional
public ResponseEntity updateGroup(@RequestBody Group group) {
boolean status = this.groupService.updateGroup(group);
if(status) {
return ResponseEntity.ok().build();
}
return ResponseEntity.badRequest().build();
}
@DeleteMapping(path = "/delete/{id}")
@Transactional()
public ResponseEntity deleteGroup(@PathVariable int id) {
boolean status = this.groupService.deleteGroup(id);
if(status) {
return ResponseEntity.ok().build();
}
return ResponseEntity.status(409).build();
}
}
<file_sep>import { ComponentFixture, TestBed } from '@angular/core/testing';
import { CalendarViewsComponent } from './calendar-views.component';
describe('CalendarViewsComponent', () => {
let component: CalendarViewsComponent;
let fixture: ComponentFixture<CalendarViewsComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ CalendarViewsComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(CalendarViewsComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
<file_sep># Project2
Revature project2 calendar webapp<file_sep>package org.ex.models.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
@AllArgsConstructor
@Data
@EqualsAndHashCode
public class LoginRequest {
private String user_name;
private String password;
}
<file_sep>import { Component, EventEmitter, Input, OnDestroy, OnInit, Output, ViewChild } from '@angular/core';
import { MatBottomSheet } from '@angular/material/bottom-sheet';
import { ModalDismissReasons, NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { EventServiceService } from 'src/app/services/event-service.service';
import { EventspageComponent } from '../../pages/eventspage/eventspage.component';
import { UpdateDateTimeComponent } from '../update-date-time/update-date-time.component';
@Component({
selector: 'app-modal-popout',
templateUrl: './modal-popout.component.html',
styleUrls: ['./modal-popout.component.css']
})
export class ModalPopoutComponent implements OnInit {
closeModal: string = '';
@Input() events: any[] = [];
@Input() eventClicked: boolean = false;
@ViewChild('modalData') modalData: any;
constructor(private modalService: NgbModal,
private bottomsheet: MatBottomSheet,
private eventService: EventServiceService) { }
ngOnInit(): void {
}
triggerModal(content: any) {
this.modalService.open(this.modalData, {ariaLabelledBy: 'modal-basic-title'}).result.then((res) => {
this.closeModal = `Closed with: ${res}`;
}, (res) => {
this.closeModal = `Dismissed ${this.getDismissReason(res)}`;
});
}
private getDismissReason(reason: any): string {
if(reason === ModalDismissReasons.ESC) {
return 'by pressing ESC';
} else if(reason === ModalDismissReasons.BACKDROP_CLICK) {
return 'by clicking on a backdrop';
} else {
return `with ${reason}`;
}
}
updateSheet(): void {
this.bottomsheet.open(UpdateDateTimeComponent);
}
deleteEvent(): void {
this.eventService.deleteEvent(this.events[0].id).subscribe((response) => console.log(response));
window.location.reload();
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { calendarTask } from 'src/app/models/calendarTask';
import { NgxMatDatetimePicker } from '@angular-material-components/datetime-picker';
import { EventServiceService } from 'src/app/services/event-service.service';
import { MatBottomSheet } from '@angular/material/bottom-sheet';
@Component({
selector: 'app-set-date-time',
templateUrl: './set-date-time.component.html',
styleUrls: ['./set-date-time.component.css']
})
export class SetDateTimeComponent implements OnInit {
start: any;
end: any;
title: string = '';
description: string= '';
sessionID = JSON.parse(sessionStorage.getItem('user') || '{}');
constructor(private eventService: EventServiceService, private bottomSheet: MatBottomSheet) { }
ngOnInit(): void {
}
createTask(): void{
const newTask: calendarTask = {
user_id: `${this.sessionID.id}`,
task_name: `${this.title}`,
description: `${this.description}`,
start_date: new Date(`${this.start}`),
end_date: new Date(`${this.end}`),
status: 'COMPLETED'
}
console.log(newTask);
this.eventService.createNewEvent(newTask).subscribe((response) => console.log(JSON.stringify(response)));
this.bottomSheet.dismiss();
window.location.reload();
}
sessionLog(): void{
console.log('literally taking up space');
}
}
<file_sep>import { ComponentFixture, TestBed } from '@angular/core/testing';
import { UpdateDateTimeComponent } from './update-date-time.component';
describe('UpdateDateTimeComponent', () => {
let component: UpdateDateTimeComponent;
let fixture: ComponentFixture<UpdateDateTimeComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ UpdateDateTimeComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(UpdateDateTimeComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
<file_sep>package org.ex.models.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.ex.models.UserType;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Id;
import javax.persistence.Table;
@Table(name = "t_user")
@AllArgsConstructor
@NoArgsConstructor
@Data
@EqualsAndHashCode
public class UpdateUser {
@Id
private int id;
private String first_name;
private String last_name;
private String user_name;
private String email;
private String user_password;
}
<file_sep>import { Injectable } from '@angular/core';
import { User } from 'src/app/models/User';
import { Observable, of } from 'rxjs';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Router } from '@angular/router';
import { Login } from '../models/Login';
@Injectable({
providedIn: 'root'
})
export class UserServiceService {
sessionLogged: any = {
id: '',
email: '',
first_name: '',
last_name: '',
user_name: '',
user_type: ''
}
httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
}),
};
baseurl: string = "http://localhost:8001/user";
constructor(private http: HttpClient, private router: Router) { }
/** used for initial testing
// getAllUsers(): Observable<User[]> {
// return this.http.get<User[]>(`${this.baseurl}/users`);
// }
getUserByID(id: number): Observable<User> {
return this.http.get<User>(`${this.baseurl}/users/${id}`);
}
*/
getUserByGroupID(groupID: number): Observable<User>{
return this.http.get<User>(`${this.baseurl}/group/${groupID}`);
}
createUser(user: User): Observable<any>{
console.log(user);
return this.http.post<User>(`${this.baseurl}/register`, user, this.httpOptions);
}
updateUser(user: User): Observable<any>{
return this.http.put(`${this.baseurl}/update`, user, this.httpOptions);
}
login(login: Login): void {
this.http.post<Login>(`${this.baseurl}/login`, login, this.httpOptions)
.toPromise()
.then((response: any) => {
this.sessionLogged = response;
sessionStorage.setItem('user', JSON.stringify(this.sessionLogged));
}).then(() =>{
if(sessionStorage.getItem("user") !== null){
this.router.navigate(['/calendar']);
} else {
//stand-in for error view or alert message
this.router.navigate(['']);
}
});
}
logout() {
this.router.navigate(['']);
sessionStorage.removeItem('user');
}
}
|
b78301bc704410fad37aa61b79441f50f254c73c
|
[
"Markdown",
"Java",
"TypeScript"
] | 32 |
TypeScript
|
SamJacobsen/Project2
|
a7cb3f03518fd755038e2e8310315210300864e7
|
202fea20c61130566686de5c89c3fc2331c0d753
|
refs/heads/master
|
<file_sep>import React, { Component } from 'react'
import Fade from 'react-reveal/Fade';
import Slide from 'react-reveal/Slide';
class Discount extends Component {
render() {
return (
<div className="center_wrapper">
<div className="discount_wrapper">
<div className="discount_porcentage">
<span>23%</span>
<span>OFF</span>
</div>
</div>
</div>
)
}
}
export default Discount;
<file_sep>import React, { Component } from 'react'
class TimeUntil extends Component {
state = {
}
render() {
return (
<div className="countdown_wrapper">
<div className="countdown_top">
Event starts in
</div>
</div>
)
}
}
export default TimeUntil;
<file_sep>import React from 'react';
import Slider from 'react-slick';
const Carrousel = () => {
const settings = {
dots: false,
infinite: true,
autoplay: true,
speed: 500
}
return (
<div
className="carrousel-wrapper"
style={{
height: `${window.innerWidth}px`,
overflow: 'hidden'
}}
>
</div>
);
};
export default Carrousel;<file_sep>import React, { Component } from 'react'
class TimeUntil extends Component {
state = {
}
render() {
return (
<div className="countdown_wrapper">
<div className="countdown_top">
Event starts in
</div>
<div className="countdown_bottom">
<div className="countdown_item">
<div className="countdown_time">
23
</div>
<div className="countdown_tag">
Days
</div>
</div>
<div className="countdown_item">
<div className="countdown_time">
2
</div>
<div className="countdown_tag">
Hs
</div>
</div>
<div className="countdown_item">
<div className="countdown_time">
20
</div>
<div className="countdown_tag">
Min
</div>
</div>
<div className="countdown_item">
<div className="countdown_time">
40
</div>
<div className="countdown_tag">
Sec
</div>
</div>
</div>
</div>
)
}
}
export default TimeUntil;
<file_sep>import React from 'react';
import icon_calendar from '../../resources/images/icons/calendar.png';
import icon_location from '../../resources/images/icons/location.png';
const VenueNfo = () => {
return (
<div className="bck_black">
<div className="center_wrapper">
<div className="vn_wrapper">
<div className="vn_item">
<div className="vn_outer">
<div className="vn_inner">
<div className="vn_icon_square bck_red"></div>
</div>
</div>
</div>
</div>
</div>
</div>
);
};
export default VenueNfo;<file_sep>import React from 'react';
import Fade from 'react-reveal/Fade';
const Description = () => {
return (
<Fade>
<div className="center_wrapper">
<h2>HIGHLIGHTS</h2>
<div className="highlight_description">
Lorem ipsum, dolor sit amet consectetur adipisicing elit. Sit vel quaerat hic ea! Alias nam sint consequuntur, asperiores natus eum enim blanditiis dignissimos odit similique distinctio doloremque sapiente at consectetur. Lorem ipsum dolor sit amet consectetur adipisicing elit. A velit magnam perspiciatis illo? Dicta aut voluptatibus tenetur aliquam eius officiis dolorem, natus, modi, nobis neque perspiciatis voluptates voluptatem! Quo, enim.
</div>
</div>
</Fade>
);
};
export default Description;
|
4fa40669da4e044c9dabd64f2e39f5aa7d067590
|
[
"JavaScript"
] | 6 |
JavaScript
|
Lithium87/TheVenue
|
6f2a32f07b0bb48ae0459823c0a2f3965cbfb0bc
|
2a5cdc17a2ad9959ea6a2d76aeecfa1e0c9844bc
|
refs/heads/master
|
<repo_name>alphonsehabyara/slack-emoji-api<file_sep>/db/seeds.rb
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
200.times do
SlackEmoji.create(
id: Faker::SlackEmoji.id,
people: Faker::SlackEmoji.people,
nature: Faker::SlackEmoji.nature,
food_and_drink: Faker::SlackEmoji.food_and_drink,
celebration: Faker::SlackEmoji.celebration,
activity: Faker::SlackEmoji.activity,
travel_and_places: Faker::SlackEmoji.travel_and_places,
objects_and_symbols: Faker::SlackEmoji.objects_and_symbols,
custom: Faker::SlackEmoji.custom,
emoji: Faker::SlackEmoji.emoji
)
end<file_sep>/app/assets/javascripts/emojis_ctrl.js
(function() {
"use strict";
angular.module("app").controller("emojisCtrl", function($scope, $http) {
$scope.setup = function() {
$http.get("/api/v2/emojis.json").then(function(response) {
$scope.emojis = response.data;
});
}
$scope.toggleBio = function(emoji) {
emoji.bioVisible = !emoji.bioVisible;
}
$scope.addEmoji = function(people, nature, food_and_drink, celebration, activity, travel_and_places, objects_and_symbols, custom, emoji) {
var emoji = {
people: people,
nature: nature,
food_and_drink: food_and_drink,
celebration: celebration,
activity: activity,
travel_and_places: travel_and_places,
objects_and_symbols: objects_and_symbols,
custom: custom,
emoji: emoji
};
$http.post("/api/v2/emojis.json", emoji).then(function(response) {
$scope.emojis.push(response.data);
});
}
$scope.updateEmoji = function(emoji) {
$http.patch("/api/v2/emojis/" + emoji.id + ".json", emoji).then(function(response) {
var index = $scope.emojis.indexOf(emoji);
$scope.emojis[index] = response.data;
});
}
$scope.deleteEmoji = function(emoji, index) {
$http.delete("/api/v2/emojis/" + emoji.id + ".json").then(function(response){
$scope.emojis.splice(index, 1);
});
}
$scope.toggleOrder = function(attribute) {
$scope.orderAttribute = attribute;
}
window.$scope = $scope;
});
})();<file_sep>/app/helpers/api/v2/emojis_helper.rb
module Api::V2::EmojisHelper
end
<file_sep>/app/controllers/api/v2/emojis_controller.rb
class Api::V2::EmojisController < ApplicationController
def index
@emojis = SlackEmoji.all
end
def create
@emoji = SlackEmoji.create(
id: params[:id],
people: params[:people],
nature: params[:nature],
food_and_drink: params[:food_and_drink],
celebration: params[:celebration],
activity: params[:activity],
travel_and_places: params[:travel_and_places],
objects_and_symbols: params[:objects_and_symbols],
custom: params[:custom],
emoji: params[:emoji]
)
render :show
end
def show
@emoji = SlackEmoji.find_by(id: params[:id])
end
def update
@emoji = SlackEmoji.find_by(id: params[:id])
@emoji.update({
id: params[:id],
people: params[:people],
nature: params[:nature],
food_and_drink: params[:food_and_drink],
celebration: params[:celebration],
activity: params[:activity],
travel_and_places: params[:travel_and_places],
objects_and_symbols: params[:objects_and_symbols],
custom: params[:custom],
emoji: params[:emoji]
})
render :show
end
def destroy
@emoji = SlackEmoji.find_by(id: params[:id])
@emoji.destroy
render json: {message: 'Emoji no longer exist....'}
end
end
<file_sep>/app/views/api/v2/emojis/index.json.jbuilder
json.array! @emojis.each do |emoji|
json.id emoji.id
json.people emoji.people
json.nature emoji.nature
json.food_and_drink emoji.food_and_drink
json.celebration emoji.celebration
json.activity emoji.activity
json.travel_and_places emoji.travel_and_places
json.objects_and_symbols emoji.objects_and_symbols
json.custom emoji.custom
json.emoji emoji.emoji
end <file_sep>/config/routes.rb
Rails.application.routes.draw do
get "/emojis", to: 'emojis#index'
# post "/emojis", to: 'emojis#create'
# get "/emojis/:id", to: 'emojis#show'
# patch "/emojis/:id", to: 'emojis#update'
# delete "/emojis/:id", to: 'emojis#destroy'
namespace :api do
namespace :v2 do
get "/emojis", to: 'emojis#index'
post "/emojis", to: 'emojis#create'
get "/emojis/:id", to: 'emojis#show'
patch "/emojis/:id", to: 'emojis#update'
delete "/emojis/:id", to: 'emojis#destroy'
end
end
end
<file_sep>/app/models/slack_emoji.rb
class SlackEmoji < ApplicationRecord
end
<file_sep>/app/views/api/v2/emojis/show.json.jbuilder
json.id @emoji.id
json.people @emoji.people
json.nature @emoji.nature
json.food_and_drink @emoji.food_and_drink
json.celebration @emoji.celebration
json.activity @emoji.activity
json.travel_and_places @emoji.travel_and_places
json.objects_and_symbols @emoji.objects_and_symbols
json.custom @emoji.custom
json.emoji @emoji.emoji
<file_sep>/db/migrate/20170214100609_create_slack_emojis.rb
class CreateSlackEmojis < ActiveRecord::Migration[5.0]
def change
create_table :slack_emojis do |t|
t.string :people
t.string :nature
t.string :food_and_drink
t.string :celebration
t.string :activity
t.string :travel_and_places
t.string :objects_and_symbols
t.string :custom
t.string :emoji
t.timestamps
end
end
end
|
5c3d9929dab561768d31bb67c5e5718808dd60e7
|
[
"JavaScript",
"Ruby"
] | 9 |
Ruby
|
alphonsehabyara/slack-emoji-api
|
25a619afecc05c93cbacb3ad5811552b72550603
|
6316605210d782cf9bdc31141d1afcd113b643e9
|
refs/heads/master
|
<repo_name>eliostvs/tapioca-rocketchat<file_sep>/tapioca_rocketchat/auth.py
from __future__ import unicode_literals
import requests
from requests.auth import AuthBase
class FailedLogin(Exception):
pass
class InvalidConfiguration(Exception):
pass
class RocketAuth(AuthBase):
def __init__(self, host, username=None, password=None, token=None, user_id=None):
self.token = token
self.user_id = user_id
self._check_parameters(host, username, password, token, user_id)
if username and password:
self._do_login(host, username, password)
@staticmethod
def _check_parameters(host, username, password, token, user_id):
try:
assert host
assert username and password or token and user_id
except AssertionError:
raise InvalidConfiguration('host and username/password or token/user_id')
def _do_login(self, host, user, password):
response = requests.post('{}/api/login'.format(host), data=dict(user=user, password=<PASSWORD>))
if response.status_code != 200:
raise FailedLogin
self._save_auth_headers(response)
def _save_auth_headers(self, response):
json = response.json()
self.token = json['data']['authToken']
self.user_id = json['data']['userId']
def _set_auth_headers(self, request):
request.headers['X-Auth-Token'] = self.token
request.headers['X-User-Id'] = self.user_id
def __call__(self, request):
self._set_auth_headers(request)
return request
<file_sep>/Makefile
.PHONY: clean-pyc clean-build docs clean test lint
help:
@echo "clean - remove all build, test, coverage and Python artifacts"
@echo "clean-build - remove build artifacts"
@echo "clean-pyc - remove Python file artifacts"
@echo "clean-test - remove test and coverage artifacts"
@echo "test - run tests quickly with the default Python"
@echo "docs - generate Sphinx HTML documentation, including API docs"
@echo "release - package and upload a release"
@echo "dist - package"
clean: clean-build clean-pyc clean-test
clean-build:
rm -fr build/
rm -fr dist/
rm -fr *.egg-info
clean-pyc:
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
clean-test:
rm -fr .tox/
rm -f .coverage
rm -fr htmlcov/
test: clean
python setup.py test
docs:
rm -f docs/tapioca-rocketchat.rst
rm -f docs/modules.rst
sphinx-apidoc -o docs/ tapioca-rocketchat
$(MAKE) -C docs clean
$(MAKE) -C docs html
open docs/_build/html/index.html
release.test: dist
python setup.py register -r pypitest
python setup.py sdist upload -r pypitest # --identity="<NAME>" --sign
python setup.py bdist_wheel upload -r pypitest #--identity="<NAME>" --sign
release: dist
python setup.py register -r pypi
python setup.py sdist upload -r pypi #--identity="<NAME>" --sign
python setup.py bdist_wheel upload -r pypi #--identity="<NAME>" --sign
dist: clean
python setup.py sdist
python setup.py bdist_wheel
ls -l dist
<file_sep>/README.md
# Tapioca Rocket.Chat
## Installation
```
pip install tapioca-rocketchat
```
## Documentation
``` python
from tapioca_rocketchat import Rocketchat
api = Rocketchat(host='your-host', token='your-token', user_id='your-user-id')
# or api = Rocketchat(host='your-host', username='your-username', password='<PASSWORD>')
api.version().get()
api.rooms().get()
api.messages(room='room-id').get()
api.join(room='room-id').post()
api.send(room='room-id').post(data={'msg': 'your-message'})
api.leave(room='room-id').post()
api.logoff().post()
```<file_sep>/tapioca_rocketchat/adapter.py
from __future__ import unicode_literals
from tapioca import (
TapiocaAdapter, generate_wrapper_from_adapter, JSONAdapterMixin)
from .resources import RESOURCE_MAPPING
from .auth import RocketAuth
class RocketChatClientAdapter(JSONAdapterMixin, TapiocaAdapter):
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(RocketChatClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params['auth'] = RocketAuth(api_params.get('host'),
api_params.get('username'),
api_params.get('password'),
api_params.get('token'),
api_params.get('user_id'))
return params
def get_api_root(self, api_params):
return '{}/api'.format(api_params.get('host', 'http://localhost'))
RocketChat = generate_wrapper_from_adapter(RocketChatClientAdapter)
<file_sep>/tapioca_rocketchat/__init__.py
from __future__ import unicode_literals
__author__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '0.1'
from .adapter import RocketChat
__all__ = ['RocketChat']
<file_sep>/tapioca_rocketchat/resources.py
from __future__ import unicode_literals
RESOURCE_MAPPING = {
'version': {
'resource': '/version',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#obtaining-the-running-rocket-chat-version-via-rest-api',
'methods': ['GET']
},
'logon': {
'resource': '/login',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#logon',
'methods': ['POST']
},
'logoff': {
'resource': '/logout',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#logoff',
'methods': ['POST']
},
'rooms': {
'resource': '/publicRooms',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#get-list-of-public-rooms',
'methods': ['GET']
},
'join': {
'resource': '/rooms/{room}/join',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#join-a-room',
'methods': ['GET']
},
'leave': {
'resource': '/rooms/{room}/leave',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#leave-a-room',
'methods': ['POST']
},
'messages': {
'resource': '/rooms/{room}/messages',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#get-all-messages-in-a-room',
'methods': ['GET']
},
'send': {
'resource': '/rooms/{room}/send',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#sending-a-message',
'methods': ['POST']
},
'channel': {
'resource': '/v1/channels.create',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#create-a-channel',
'methods': ['POST']
},
'user': {
'resource': '/v1/users.create',
'docs': 'https://rocket.chat/docs/developer-guides/rest-api/#create-a-user',
'methods': ['POST']
},
}
<file_sep>/tests/test_tapioca_rocketchat.py
from __future__ import unicode_literals
import pytest
import requests_mock
from tapioca_rocketchat import RocketChat
from tapioca_rocketchat.auth import InvalidConfiguration, FailedLogin
AUTH_RESOURCE = 'http://localhost.com/api/login'
AUTH_RESPONSE = {'status': 'success', 'data': {'authToken': 'authToken', 'userId': 'userId'}}
AUTH_HEADERS = {'X-Auth-Token': 'authToken', 'X-User-Id': 'userId'}
@pytest.fixture
def client():
return RocketChat(host='http://localhost.com', username='user', password='<PASSWORD>')
@pytest.fixture
def client_token():
return RocketChat(host='http://localhost.com', token='<PASSWORD>', user_id='userId')
def test_should_hit_version_resource(client):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json=AUTH_RESPONSE)
m.get('http://localhost.com/api/version', request_headers=AUTH_HEADERS)
response = client.version().get()
assert response._response.status_code == 200
def test_should_login_with_correct_parameters(client):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json=AUTH_RESPONSE)
response = client.logon().post(data={'user': 'user', 'password': '<PASSWORD>'})
assert response._response.status_code == 200
def test_should_join_a_room(client):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json=AUTH_RESPONSE)
m.post('http://localhost.com/api/rooms/room/join', request_headers=AUTH_HEADERS)
response = client.join(room='room').post()
assert response._response.status_code == 200
def test_should_raised_exception_when_logging_failed(client):
with pytest.raises(FailedLogin):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json={}, status_code=401)
m.post('http://localhost.com/api/rooms/room/join', request_headers=AUTH_HEADERS)
response = client.join(room='room').post()
assert response._response.status_code == 200
def test_should_leave_a_room(client):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json=AUTH_RESPONSE)
m.post('http://localhost.com/api/rooms/room/leave', request_headers=AUTH_HEADERS)
response = client.leave(room='room').post()
assert response._response.status_code == 200
def test_should_get_all_messages_of_a_room(client):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json=AUTH_RESPONSE)
m.get('http://localhost.com/api/rooms/room/messages', request_headers=AUTH_HEADERS)
response = client.messages(room='room').get()
assert response._response.status_code == 200
def test_should_send_a_message_to_a_room(client):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json=AUTH_RESPONSE)
m.post('http://localhost.com/api/rooms/room/send', status_code=201, request_headers=AUTH_HEADERS)
response = client.send(room='room').post(data={'msg': 'msg'})
assert response._response.status_code == 201
def test_should_create_a_channel(client):
with requests_mock.Mocker() as m:
m.post(AUTH_RESOURCE, json=AUTH_RESPONSE)
m.post('http://localhost.com/api/v1/channels.create', status_code=201, request_headers=AUTH_HEADERS)
response = client.channel().post(data={'name': 'channelname'})
assert response._response.status_code == 201
def test_should_get_version_with_token(client_token):
with requests_mock.Mocker() as m:
m.get('http://localhost.com/api/version', status_code=200, request_headers=AUTH_HEADERS)
response = client_token.version().get()
assert response._response.status_code == 200
def test_should_raise_exception_when_missing_parameters():
with pytest.raises(InvalidConfiguration):
client = RocketChat()
client.version().get()
<file_sep>/test-requirements.txt
-r requirements.txt
pytest-cov==2.4.0
pytest-flake8==0.8.1
pytest==3.0.4
requests-mock==1.1.0
wheel
<file_sep>/requirements.txt
tapioca-wrapper==1.2.3
|
163ba1efc067df52043abe5497e362781c740ba3
|
[
"Markdown",
"Python",
"Makefile",
"Text"
] | 9 |
Python
|
eliostvs/tapioca-rocketchat
|
e66ec7c6a07129c20430599e103a7cf3d9e6c281
|
4246229c50fbc64dd2661a12141a04af19979387
|
refs/heads/master
|
<file_sep>library(tidyverse)
# to do ----
# 2. calculate D'
# read in data ----
pilot_path = paste(getwd(), "/pilot2_data_raw/", sep = "")
list_of_datanames = list.files(pilot_path)
dat_p2 = lapply(paste(pilot_path, list_of_datanames, sep = ""), read.csv)
dat_p2 = do.call(plyr::rbind.fill, dat_p2)
# data preprocessing ----
data_quest = data.frame(
participant = c("1_2", "3_4", "5_6", "7_8", "9_10", "11_12", "13_14",
"15_16"),
#1 3 5 7 9 11 13 15
p1_resp = c(6, 2, 5, 3, 4, 4, 5, 3),
#2 4 6 8 10 12 14 16
p2_resp = c(1, 2, 3, 2, 4, 5, 2, 4))
dat_clean = dat_p2 %>%
group_by(participant) %>%
# get rid of practice trials
slice(-c(1:2)) %>%
# we need to recode to 1 and 2 because the fill function won't work for characters.
# It only works for numeric variables
mutate(
RespTuringLeft =
case_when(RespTuringLeft == "c" ~ 1,
RespTuringLeft == "h" ~ 2),
RespTuringRight =
case_when(RespTuringRight == "c" ~ 1,
RespTuringRight == "h" ~ 2)) %>%
fill(RespTuringLeft, .direction = "up") %>%
fill(RespTuringRight, .direction = "up") %>%
fill(keyTuringLeft.corr, .direction = "up") %>%
fill(keyTuringRight.corr, .direction = "up") %>%
mutate(
RespTuringLeft = recode(
RespTuringLeft, `1` = "comp", `2` = "human")) %>%
mutate(
RespTuringRight = recode(
RespTuringRight, `1` = "comp", `2` = "human")) %>%
# remove some vars
select(-c(1:4,7:12, 15,16,18:82, 93:116, 130:138)) %>%
## Delete every 11th row
filter(
is.na(Number)==F,
is.na(key_respLeft.rt) == F,
is.na(key_respRight.rt) == F
) %>%
left_join(data_quest, by = "participant") %>%
mutate(
# measure synchrony
sync_value = abs((key_respLeft.rt-key_respRight.rt))/
(key_respLeft.rt+key_respRight.rt)*100,
# create lead/lags for each participant
choice = case_when(RespTuringRight == "comp" &
RespTuringLeft == "comp" ~
"comp_congruent",
RespTuringRight == "comp" &
RespTuringLeft == "human" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "comp" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "human" ~
"human_congruent"),
closeness_diff = abs(p1_resp - p2_resp),
closeness_avg = mean(c(p1_resp, p2_resp))) %>%
mutate(sync_z_value = scale(sync_value)) %>%
group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ungroup(participant, Block.thisN)%>%
mutate(key_respLeft.rt = scale(key_respLeft.rt),
key_respRight.rt = scale(key_respRight.rt))
# 1. sync as a function of perceived (for each participant)
# first we do lead lag for each participant
dat_clean$part1_lag_rt = data.table::shift(dat_clean$key_respLeft.rt, n = 1L, type = "lag")
dat_clean$part2_lag_rt = data.table::shift(dat_clean$key_respRight.rt, n = 1L, type = "lag")
# get rid of NAs
dat_clean2 = dat_clean %>%
group_by(participant, Block.thisN) %>%
filter(!trials_loop.thisN==0) %>%
ungroup(Block.thisN) %>%
select(participant, key_respLeft.rt, key_respRight.rt, Partner,
part1_lag_rt, part2_lag_rt, RespTuringLeft, RespTuringRight) %>%
mutate(sync_cross_p1 = abs((key_respLeft.rt-part2_lag_rt))/
(key_respLeft.rt+part2_lag_rt)*100,
sync_cross_p2 = abs((key_respRight.rt-part1_lag_rt))/
(key_respRight.rt+part1_lag_rt)*100) %>%
pivot_longer(cols = c(sync_cross_p1, sync_cross_p2),
values_to = "sync_cross", names_to = "p_position") %>%
mutate(new_partner = ifelse(p_position == "sync_cross_p1",
RespTuringLeft,
RespTuringRight)
)
dat_clean2 %>%
ggplot(aes(sync_cross, fill = Partner))+
geom_histogram()
dat_clean2 %>%
ggplot(aes(y = sync_cross, x = Partner,
color = new_partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()
dat_clean2 %>%
ggplot(aes(y = sync_cross, x = new_partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()
# Z value ----
dat_clean %>%
group_by(participant, Block.thisN) %>%
filter(!trials_loop.thisN==0) %>%
ungroup(Block.thisN) %>%
select(participant, key_respLeft.rt, key_respRight.rt, Partner,
part1_lag_rt, part2_lag_rt, RespTuringLeft, RespTuringRight) %>%
mutate(sync_cross_p1 = abs((key_respLeft.rt-part2_lag_rt))/
(key_respLeft.rt+part2_lag_rt)*100,
sync_cross_p2 = abs((key_respRight.rt-part1_lag_rt))/
(key_respRight.rt+part1_lag_rt)*100) %>%
pivot_longer(cols = c(sync_cross_p1, sync_cross_p2),
values_to = "sync_cross", names_to = "p_position") %>%
mutate(new_partner = ifelse(p_position == "sync_cross_p1",
RespTuringLeft,
RespTuringRight),
)
dat_clean2 %>%
ggplot(aes(sync_cross_p1, fill = Partner))+
geom_histogram()
dat_clean2 %>%
ggplot(aes(y = sync_cross, x = Partner,
color = new_partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()
# Z value diff method<file_sep>---
title: "ana-exp"
output: html_document
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
library(tidyverse)
```
# read in
```{r read-in data, echo = F}
pilot_path = paste(getwd(), "/raw data/", sep = "")
list_of_datanames = list.files(pilot_path)
dat_exp = lapply(paste(pilot_path, list_of_datanames, sep = ""), read.csv)
dat_exp = do.call(plyr::rbind.fill, dat_exp)
```
# preprocessing
```{r now do some data preprocessing, echo = F}
dat_clean = dat_exp %>%
fill(
RespInclusionLeft, .direction = "up") %>%
fill(
RespInclusionRight, .direction = "up") %>%
group_by(
participant) %>%
# get rid of practice trials
slice(
-c(1:2)) %>%
# we need to recode to 1 and 2 because the fill function won't work for characters.
# It only works for numeric variables
mutate(
RespTuringLeft =
case_when(RespTuringLeft == "c" ~ 1,
RespTuringLeft == "h" ~ 2),
RespTuringRight =
case_when(RespTuringRight == "c" ~ 1,
RespTuringRight == "h" ~ 2)) %>%
fill(
RespTuringLeft, .direction = "up") %>%
fill(
RespTuringRight, .direction = "up") %>%
fill(
keyTuringLeft.corr, .direction = "up") %>%
fill(
keyTuringRight.corr, .direction = "up") %>%
fill(
RespInclusionLeft, .direction = "up") %>%
fill(
RespInclusionRight, .direction = "up") %>%
mutate(
RespTuringLeft = recode(
RespTuringLeft, `1` = "comp", `2` = "human"),
RespTuringRight = recode(
RespTuringRight, `1` = "comp", `2` = "human")) %>%
mutate(
CorrTuringLeft = case_when(RespTuringLeft == "comp" &
CorrectTuringReply == "c" ~ 1,
RespTuringLeft == "comp" &
CorrectTuringReply == "h" ~ 0,
RespTuringLeft == "human" &
CorrectTuringReply == "h" ~ 1,
RespTuringLeft == "human" &
CorrectTuringReply == "c" ~ 0),
CorrTuringRight = case_when(RespTuringRight == "comp" &
CorrectTuringReply == "c" ~ 1,
RespTuringRight == "comp" &
CorrectTuringReply == "h" ~ 0,
RespTuringRight == "human" &
CorrectTuringReply == "h" ~ 1,
RespTuringRight == "human" &
CorrectTuringReply == "c" ~ 0)) %>%
mutate(
CorrTuringDyad = case_when(CorrTuringLeft == 1 &
CorrTuringRight == 1 ~ 1,
CorrTuringLeft == 0 &
CorrTuringRight == 1 ~ 0,
CorrTuringLeft == 1 &
CorrTuringRight == 0 ~ 0,
CorrTuringLeft == 0 &
CorrTuringRight == 0 ~ 0)
) %>%
# remove some vars
#select(-c(1:4,7:12, 15,16,18:82, 93:116, 130:138)) %>%
## Delete every 11th row
filter(
is.na(Number)==F,
is.na(key_respLeft.rt) == F,
is.na(key_respRight.rt) == F
) %>%
mutate(
# create lead/lags for each participant
choice = case_when(RespTuringRight == "comp" &
RespTuringLeft == "comp" ~
"comp_congruent",
RespTuringRight == "comp" &
RespTuringLeft == "human" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "comp" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "human" ~
"human_congruent"),
CongruencyChoice = case_when(RespTuringRight == "comp" &
RespTuringLeft == "comp" ~
"congruent",
RespTuringRight == "comp" &
RespTuringLeft == "human" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "comp" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "human" ~
"congruent"),
closeness_diff = abs(RespInclusionLeft - RespInclusionRight),
closeness_avg = mean(c(RespInclusionLeft, RespInclusionRight)
)) %>%
#mutate(sync_z_value = scale(sync_value)) %>%
group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ungroup(participant, Block.thisN) %>%
filter(key_respLeft.rt > 3,
key_respRight.rt > 3) %>%
mutate(
# measure synchrony
sync_value = abs((key_respLeft.rt-key_respRight.rt))/
(key_respLeft.rt+key_respRight.rt)*100)
```
```{r}
dat_clean %>%
group_by(participant, Partner, Block.thisIndex) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = Partner, fill = choice))+
geom_histogram(stat="count", position = position_dodge(.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(.9), vjust=2) +
labs(title = "number of choices for all participants")
dat_clean %>%
group_by(participant, Partner, Block.thisIndex) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = Partner, fill = choice))+
geom_histogram(stat="count", position = position_dodge(.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(.9), vjust=2) +
facet_wrap(~participant)+
labs(title = "number of choices for for each dyad")
dat_clean %>%
mutate(new_block = ifelse(Block.thisN<10, "first", "second")) %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by participant choice")+
facet_wrap(~new_block)
dat_clean %>%
ggplot(aes(y = sync_value, x = Block.thisN, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
geom_smooth()+
theme_bw()+
labs("Sync valye for each agent by participant choice")
dat_clean %>%
ggplot(aes(key_respLeft.rt))+
geom_histogram()
dat_clean %>%
ggplot(aes(key_respRight.rt))+
geom_histogram()
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by participant choice")+
facet_wrap(~participant)
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color = CongruencyChoice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+facet_wrap(~participant, ncol = 3)+
labs("Sync valye for each agent by Congruency and partner")
for(i in dat_clean$participant){
my_plots[[i]] = dat_clean %>%
filter(participant == i) %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by Congruency and partner")}
dat_clean %>%
ggplot(aes(y = sync_z_value, fill = CongruencyChoice))+
geom_histogram()+
theme_bw()+facet_wrap(~Partner)+
labs("z Sync valye for each agent by Congruency and partner")
dat_clean %>%
ggplot(aes(y = sync_value, x = CongruencyChoice, color = CongruencyChoice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by Congruency")
dat_clean %>%
ggplot(aes(y = sync_value, x = choice, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye by participant choice")
dat_clean %>%
ggplot(aes(y = CorrTuringDyad, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Average Correctness in Turing Test by Partner")
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each actual partner")
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
geom_point(shape = 1, alpha = .8, position = position_jitterdodge(.1))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.8),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.8))+
theme_bw()+
labs("Sync valye for each agent by participant choice with individual data points")
dat_clean %>%
ggplot(aes(sync_value, closeness_diff, color = Partner))+
geom_point()+
geom_smooth(method = "lm", se =F)+theme_bw()
dat_clean %>%
ggplot(aes(sync_value, closeness_avg, color = Partner))+
geom_point()+
geom_smooth(method = "lm", se =F)+theme_bw()
```
<file_sep>---
title: "A Script for analyzing the second Pilot's data"
author: "<NAME>"
date: "4/11/2021"
output: html_document
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
library(tidyverse)
```
```{r read-in data, echo = F}
pilot_path = paste(getwd(), "/pilot data/", sep = "")
list_of_datanames = list.files(pilot_path)
dat_p2 = lapply(paste(pilot_path, list_of_datanames, sep = ""), read.csv)
dat_p2 = do.call(plyr::rbind.fill, dat_p2)
```
```{r now do some data preprocessing, echo = F}
dat_clean = dat_p2 %>%
group_by(participant) %>%
# get rid of practice trials
slice(-c(1:8)) %>%
# we need to recode to 1 and 2 because the fill function won't work for characters.
# It only works for numeric variables
mutate(
key_resp_2.keys =
case_when(key_resp_2.keys == "c" ~ 1,
key_resp_2.keys == "h" ~ 2)) %>%
fill(key_resp_2.keys, .direction = "up") %>%
fill(key_resp_2.corr, .direction = "up") %>%
mutate(
choice = recode(
key_resp_2.keys, `1` = "comp", `2` = "human")) %>%
# remove some vars
select(-c(9:12,18:88, 99:118, 126:137)) %>%
## Delete every 11th row
filter(
is.na(Number)==F,
(RTLeft!=13),
(RTRight!=13)
) %>%
mutate(
# measure synchrony
sync_value = abs((RTLeft-RTRight))/
(RTLeft+RTRight)*100) %>%
mutate(sync_z_value = scale(sync_value)) %>%
ungroup(participant)
```
## Here are some Plots to visualize the results\ First, is how many instances of choices do we have?
```{r, fig.height=8, fig.width=8}
dat_clean %>%
group_by(participant, Partner) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = Partner, fill = choice))+
geom_histogram(stat="count", position = position_dodge(.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(.9), vjust=2) +
labs(title = "number of choices for all participants")
dat_clean %>%
group_by(participant, Partner) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = Partner, fill = choice))+
geom_histogram(stat="count", position = position_dodge(.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(.9), vjust=2) +
facet_wrap(~participant)+
labs(title = "number of choices for for each dyad")
```
## Next, we have the actual sync values. Sync values are calculated as percentages. The formula is\ RT_participant1 - RT_participant2 / (RT_participant1 + RT_participant2)
```{r}
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by participant choice")
dat_clean %>%
ggplot(aes(y = sync_value, x = choice, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye by participant choice")
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each actual partner")
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
geom_point(shape = 1, alpha = .8, position = position_jitterdodge(.1))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.8),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.8))+
theme_bw()+
labs("Sync valye for each agent by participant choice with individual data points")
```
## Now lets look at the same data but exclude sync values beyond 6 SD.
```{r data without the points beyond 6 SD, echo = F}
## Now do some preprocessing
dat_clean2 = dat_clean %>%
filter(sync_z_value < 6)
```
```{r}
dat_clean2 %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs(title = "Sync value for each agent by participant choice within 6 SD")
dat_clean2 %>%
ggplot(aes(y = sync_value, x = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs(title = "Sync valye for each actual partner within 6 SD")
dat_clean2 %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
geom_point(shape = 1, alpha = .8, position = position_jitterdodge(.1))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.8),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.8))+
theme_bw()+
labs(title = "Sync valye for each agent by participant choice with individual data points within 6 SD")
```
## Percentage of "cheating"
```{r}
library(knitr)
kable(dat_clean %>%
group_by(participant) %>%
summarize(n_cheat = 160-length(RTLeft),
cheat_percent = n_cheat/200*100))
kable(dat_clean %>%
group_by(participant, Block.thisN) %>%
summarize(n_cheat = 10-length(RTLeft),
cheat_percent = n_cheat/10*100))
```
```{r lets try removing blocks that have more than 2 errors, echo = F}
dat_clean %>%
group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs(title = "Exclude bad blocks (n trials >= 8)")
```
```{r Check first half of exp vs. second half}
dat_clean %>%
group_by(participant) %>%
group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ungroup(Block.thisN) %>%
mutate(first_sec = ifelse(Block.thisN <= 7, "First", "Second")) %>%
ggplot(aes(y = sync_value, x = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+facet_wrap(~first_sec)+
labs(title = "first vs second blocks Partner")
dat_clean %>%
group_by(participant) %>%
group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ungroup(Block.thisN) %>%
mutate(first_sec = ifelse(Block.thisN <= 7, "First", "Second")) %>%
ggplot(aes(y = sync_value, x = choice, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs(title = "sync value dependent on choice")
dat_clean %>%
group_by(participant) %>%
group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ungroup(Block.thisN) %>%
mutate(first_sec = ifelse(Block.thisN <= 7, "First", "Second")) %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+facet_wrap(~first_sec)+
labs(title = "first vs second blocks Partner + Choice")
dat_clean %>%
group_by(participant) %>%
group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ggplot(aes(y = sync_value, x = Block.thisN, color = Partner))+
geom_smooth(method = "lm")+
theme_bw()
```
```{r}
dat_clean = dat_clean %>%
group_by(participant) %>%
mutate(
avg_resp = (RTLeft+RTRight)/2
) %>%
group_by(participant, Partner) %>%
mutate(
sync_val_computer = abs((avg_resp-Number))/
(avg_resp + Number)*100
) %>%
mutate(
reSync_value = ifelse(Partner == "Computer", sync_val_computer,
ifelse(Partner == "Human", sync_value, NA)
)
)
dat_clean %>% group_by(Partner) %>%
summarize(reSync_value = mean(reSync_value),
sync_value = mean(sync_value),
sync_val_computer = mean(sync_val_computer))
dat_clean %>% group_by(participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ungroup(Block.thisN) %>%
ggplot(aes(y = reSync_value, Partner, color = choice)) +
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+theme_bw()
#facet_wrap(~participant, scales = "free_y")
```
<file_sep>library(tidyverse)
library(broom)
library(lme4)
library(lmerTest)
library(roll)
# do some cleaning ----
pilot_path = paste(getwd(), "/pilot data/", sep = "")
list_of_datanames = list.files(pilot_path)
dat = lapply(paste(pilot_path, list_of_datanames, sep = ""), read.csv)
dat = do.call(plyr::rbind.fill, dat)
dat_clean = dat %>%
group_by(participant) %>%
mutate(resp_choice =
case_when(key_resp_2.keys == "c" ~ 1,
key_resp_2.keys == "h" ~ 2)) %>%
fill(resp_choice, .direction = "up") %>%
fill(key_resp_2.corr, .direction = "up") %>%
mutate(resp_choice = recode(resp_choice, `1` = "comp", `2` = "human")) %>%
slice(-c(1:8)) %>%
select(-c(1:3,5, 9:14, 17,18, 21:92, 96:118, 121:123, 125:137,139:144)) %>%
filter(is.na(Number) == F) %>% ## Delete every 11th row
mutate(sync_value = abs((RTLeft-RTRight))/(RTLeft+RTRight)*100) %>% # measure synchrony
ungroup(participant) %>%
mutate(diff_performance_RTRight = Number - RTRight,
diff_performance_RTLeft = Number - RTLeft)%>%
pivot_longer(cols = c(diff_performance_RTRight,
diff_performance_RTLeft),
names_to = "RTPlayer", values_to = "RTdiff")
data.frame(dat_clean %>% group_by(participant,
Partner, resp_choice) %>%
summarize(n = n()/20))
ggplot(dat_hist, aes(n, fill = resp_choice))+
geom_histogram()+
theme_bw()+facet_wrap(~Partner)
ggplot(dat_clean, aes( y = RTdiff, Partner, color = resp_choice))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()
ggplot(dat_clean, aes( y = RTdiff, Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()
ggplot(dat_clean, aes( y = RTdiff, sync_value, color = Partner))+
geom_point()+
geom_smooth(method = "lm")
# plot sync value from Mu et al.
ggplot(dat_clean, aes(y = sync_value, x = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
facet_wrap(~resp_choice)+
theme_bw()+
ggtitle("Sync values from Mu et al. panel 1 = p1, panel 2 = p2")
ggplot(dat_clean, aes(y = sync_value,x = Partner, color = resp_choice))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()+ggtitle("color is response, x axis is actual partner")
ggplot(dat_clean, aes(y = sync_value, x = as.factor(TuringResponsePlayer), color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()
summary(lmer(sync_value ~ Partner*resp_choice + (1|participant), dat_clean))
# measure correctness of choice ----
dat_corr = dat_clean %>%
group_by(participant, TuringResponsePlayer, Partner)%>%
summarize(percent = sum(key_resp_2.corr == 1)/length(key_resp_2.corr)) %>% # measure synchrony
mutate(TuringResponsePlayer = as.factor(TuringResponsePlayer))
ggplot(dat_corr, aes(y = percent, x = Partner, color = Partner))+
geom_jitter(position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()+
ggtitle("% of correctness. x axis and color are the same variable")
ggplot(dat_corr, aes(y = percent, x = TuringResponsePlayer, color = TuringResponsePlayer))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()+
ggtitle("% of correctness. x shows participant location")
ggplot(dat_corr, aes(y = percent, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()+facet_wrap(~participant)+
ggtitle("correctness of each dyad as a function of partner type")
# logistic regression ----
dat_logistic = dat_clean %>%
group_by(participant, Partner, TuringResponsePlayer, resp_choice) %>%
summarize(RTRight=mean(RTRight), RTLeft=mean(RTLeft),
sync_value=mean(sync_value)) %>%
mutate(Partner2 = case_when(Partner == "Computer" ~ 1,
Partner == "Human" ~ 0),
resp_choice2 = case_when(resp_choice == "comp" ~ 1,
resp_choice == "human" ~ 0)) %>%
pivot_longer(cols = c(RTRight, RTLeft),
names_to = "RTPlayer")
#plot RT and correctness as a function of player
ggplot(dat_logistic, aes(y = value, x = RTPlayer))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()
# go back to logistic
summary(glmer(Partner2 ~ sync_value*resp_choice*TuringResponsePlayer + (1|participant),
family = "binomial", data = dat_logistic))
summary(glm(Partner2 ~ sync_value*resp_choice*TuringResponsePlayer,
family = "binomial", data = dat_logistic))
summary(glmer(Partner2 ~ sync_value*resp_choice*value + (1|participant),
family = "binomial", data = dat_logistic))
ggplot(dat_logistic, aes(y = Partner2, x = sync_value, color = resp_choice))+
geom_smooth(method="glm", formula= (y ~ log(x)),
method.args = list(family = "binomial"), se = F)+
theme_bw()+facet_wrap(~TuringResponsePlayer)+
ggtitle("predicting partner type based on sync valye and resp choice. 1 = computer, 0 = hum")
ggplot(dat_logistic, aes(y = Partner2, x = value, color = resp_choice))+
geom_smooth(method="glm", formula= (y ~ log(x)),
method.args = list(family = "binomial"), se = F)+
theme_bw()+
ggtitle("value is rsponse time.1 = computer, 0 = hum")
ggplot(dat_logistic, aes(y = resp_choice2, x = sync_value, color = Partner))+
geom_smooth(method="glm", formula= (y ~ log(x)),
method.args = list(family = "binomial"), se = F)+
theme_bw()+ggtitle("y is choice. 1 = computer, 0 = hum")
ggplot(dat_logistic, aes(y = resp_choice2, x = value, color = Partner))+
geom_smooth(method="glm", formula= (y ~ log(x)),
method.args = list(family = "binomial"), se = F)+
theme_bw()+ggtitle("y is choice. x is RT. 1 = computer, 0 = hum")
summary(glmer(resp_choice2 ~ sync_value*Partner*value + (1|participant),
family = "binomial", data = dat_logistic))
# compute correlation for each dyad ----
dat_corr = dat_clean %>%
group_by(participant, Block.thisN, Partner, resp_choice) %>%
do(corr = cor(.$RTRight,.$RTLeft)) %>%
tidy(corr)
ggplot(dat_corr, aes(y = x, x = Partner, color = resp_choice))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()+
ggtitle("compute overall correlation for each block for each participant")
#compute rolling correlation ----
library(corrr)
library(tidyr)
library(broom)
# Correlation table
rolling_corr_dat = dat_clean %>%
group_by(participant, Partner, Block.thisN, resp_choice) %>%
summarize(rc = roll_cor(RTLeft, RTRight, width = 3)) %>% drop_na()
ggplot(rolling_corr_dat, aes(y = rc, x = Partner, color = resp_choice))+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "point", size = 5, position = position_dodge(.9))+
theme_bw()+
ggtitle("rolling correlation. Width = 3")
ggplot(rolling_corr_dat, aes(y = Block.thisN, x = rc, color = Partner))+
geom_point()+geom_smooth(method = "lm", se = F)+
facet_wrap(~resp_choice)
# compute cross correlation ----
cross_corr = dat_clean %>%
group_by(participant, Partner, resp_choice) %>%
do(model = ccf(.$RTRight, .$RTLeft)) %>%
tidy(model)
ggplot(cross_corr, aes(y = acf, x = lag, color = Partner))+
geom_point()+
facet_grid(~resp_choice)
ggplot(cross_corr, aes(y = acf, x = lag, color = Partner))+
geom_point()+
geom_smooth(method = "loess", se = F)+
facet_grid(~resp_choice)+
ylim(-.5,1)+
ggtitle("cross-correlation")
ggCcf(y = cross_corr$acf, x = cross_corr$lag)+
facet_wrap(~cross_corr$Partner)
ggplot(cross_corr, aes(x = lag, y = acf)) +
geom_bar(stat = "identity")+
ylab("Cross correlation")+
scale_y_continuous(limits = c(-1, 1))+
theme_bw()+
facet_wrap(~cross_corr$Partner)
cross_corr %>%
ggplot(aes(lag, acf)) +
#geom_rect(xmin = -10, xmax = 0, ymin = -Inf, ymax = Inf, fill = 'grey90', alpha = 0.1) +
geom_hline(yintercept = c(-0.1, 0, 0.1), linetype = 2) +
geom_vline(xintercept = 0, linetype = 2) +
geom_hline(yintercept = 0.1, linetype = 2) +
geom_point(aes(group = resp_choice, color = Partner), alpha = 2 / 3) +
facet_grid(~resp_choice) +
theme_bw()
<file_sep>library(tidyverse)
#library(lme4)
pilot_path = paste(getwd(), "/pilot2_data_raw/", sep = "")
list_of_datanames = list.files(pilot_path)
dat_p2 = lapply(paste(pilot_path, list_of_datanames, sep = ""), read.csv)
dat_p2 = do.call(plyr::rbind.fill, dat_p2)
data_quest = data.frame(participant = unique(dat_clean$participant),
p1_resp = c(6, 2, 5, 3, 4,4),
p2_resp = c(1, 2, 3, 2,4,5))
dat_clean = dat_p2 %>%
group_by(participant) %>%
# get rid of practice trials
slice(-c(1:2)) %>%
# we need to recode to 1 and 2 because the fill function won't work for characters.
# It only works for numeric variables
mutate(RespTuringLeft =
case_when(RespTuringLeft == "c" ~ 1,
RespTuringLeft == "h" ~ 2),
RespTuringRight =
case_when(RespTuringRight == "c" ~ 1,
RespTuringRight == "h" ~ 2)) %>%
fill(RespTuringLeft, .direction = "up") %>%
fill(RespTuringRight, .direction = "up") %>%
fill(keyTuringLeft.corr, .direction = "up") %>%
fill(keyTuringRight.corr, .direction = "up") %>%
mutate(RespTuringLeft = recode(RespTuringLeft, `1` = "comp", `2` = "human")) %>%
mutate(RespTuringRight = recode(RespTuringRight, `1` = "comp", `2` = "human")) %>%
# remove some vars
select(-c(1:4,7:12, 15:17, 19:70)) %>%
## Delete every 11th row
filter(is.na(Number) == F) %>%
# measure synchrony
left_join(data_quest, by = "participant") %>%
# filter(key_respLeft.rt != 13,
# key_respRight.rt != 13) %>%
# filter(!participant %in% c("11_12", "9_10")) %>%
mutate(sync_value = abs((key_respLeft.rt-key_respRight.rt))/
(key_respLeft.rt+key_respRight.rt)*100,
choice = case_when(RespTuringRight == "comp" &
RespTuringLeft == "comp" ~ "comp_congruent",
RespTuringRight == "comp" &
RespTuringLeft == "human" ~ "incongruent",
RespTuringRight == "human" &
RespTuringLeft == "comp" ~ "incongruent",
RespTuringRight == "human" &
RespTuringLeft == "human" ~ "human_congruent"),
closeness_diff = abs(p1_resp - p2_resp),
closeness_avg = mean(c(p1_resp, p2_resp))) %>%
ungroup(participant) %>%
mutate(sync_z_value = scale(sync_value))
%>%
filter(sync_z_value < 6)
ggplot(dat_clean, aes(sync_z_value, fill = participant))+
geom_histogram()
# plot
# ggplot(dat_clean, aes(y = sync_value, x = Partner, color = choice))+
# geom_point(position = position_jitterdodge(.1),shape = 1, alpha = .8)+
# stat_summary(fun.data = mean_se, geom = "errorbar",
# width = .1, position = position_dodge(.9))+
# stat_summary(fun.data = mean_se, geom = "point", size = 5,
# position = position_dodge(.9))+
# theme_bw()
ggplot(dat_clean, aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()
ggplot(dat_clean, aes(y = sync_value, x = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()
ggplot(dat_clean, aes(y = sync_value, x = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()
ggplot(dat_clean, aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+facet_grid(~participant)
data.frame(dat_clean %>% group_by(participant,
Partner, choice) %>%
summarize(n = n()))
# correlation ----
ggplot(dat_clean, aes(x = sync_value, y = closeness_diff, color = choice))+
geom_point()+
geom_smooth(method = "lm", se = F)+
facet_wrap(~Partner)
ggplot(dat_clean, aes(x = sync_value, y = closeness_avg, color = choice))+
geom_point()+
geom_smooth(method = "lm", se = F)+
facet_wrap(~Partner)
# measure correctness ----
dat_corr = dat_clean %>%
group_by(participant, Partner)%>%
summarize(percent_left = sum(keyTuringLeft.corr == 1)/length(keyTuringLeft.corr),
percent_right = sum(keyTuringRight.corr == 1)/length(keyTuringLeft.corr)) %>%
pivot_longer(cols = c(percent_left, percent_right),
names_to = "player", values_to = "percent")
ggplot(dat_corr, aes(y = percent, x = Partner, fill = player))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.9))+
stat_summary(fun.data = mean_se, geom = "bar", size = 5,
position = position_dodge(.9))+
theme_bw()
ggplot(dat_corr, aes(y = percent, x = Partner, fill = player))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3))+
stat_summary(fun.data = mean_se, geom = "bar", size = 5,
position = position_dodge(.3))+
theme_bw()+
facet_wrap(~participant)
ggplot(dat_corr, aes(y = percent, x = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+facet_grid(~participant)
<file_sep># Load libraries ----
library(tidyverse)
library (afex)
library(emmeans)
# Read in ----
#setwd("C:/Users/kkompatsiari/Dropbox/IIT/phd/Experiments/Hyperscanning/HyperscanningRproject")
pilot_path = paste(getwd(), "/raw data/", sep = "")
list_of_datanames = list.files(pilot_path)
dat_exp = lapply(paste(pilot_path, list_of_datanames, sep = ""), read.csv)
dat_exp = do.call(plyr::rbind.fill, dat_exp)
# Manual entry of subjective ratings of participants
dat_exp$RespInclusionRight[which(dat_exp$participant=="41_42Exp")] = 3
dat_exp$RespInclusionRight[which(dat_exp$participant=="43_44Exp")] = 2
dat_exp$RespInclusionRight[which(dat_exp$participant=="69_70Exp")] = 2
# preprocessing ----
dat_preprocessed = dat_exp %>%
fill(RespInclusionLeft, .direction = "up") %>%
fill(RespInclusionRight, .direction = "up") %>%
group_by(
participant) %>%
# get rid of practice trials
slice(
-c(1:2)) %>%
# we need to recode to 1 and 2 because the fill function won't work for characters.
# It only works for numeric variables
mutate(
RespTuringLeft =
case_when(RespTuringLeft == "c" ~ 1,
RespTuringLeft == "h" ~ 2),
RespTuringRight =
case_when(RespTuringRight == "c" ~ 1,
RespTuringRight == "h" ~ 2),
) %>%
fill(
RespTuringLeft, .direction = "up") %>%
fill(
RespTuringRight, .direction = "up") %>%
fill(
keyTuringLeft.corr, .direction = "up") %>%
fill(
keyTuringRight.corr, .direction = "up") %>%
fill(
RespInclusionLeft, .direction = "up") %>%
fill(
RespInclusionRight, .direction = "up") %>%
mutate(
RespTuringLeft = recode(
RespTuringLeft, `1` = "comp", `2` = "human"),
RespTuringRight = recode(
RespTuringRight, `1` = "comp", `2` = "human")) %>%
mutate(
CorrTuringLeft = case_when(RespTuringLeft == "comp" &
CorrectTuringReply == "c" ~ 1,
RespTuringLeft == "comp" &
CorrectTuringReply == "h" ~ 0,
RespTuringLeft == "human" &
CorrectTuringReply == "h" ~ 1,
RespTuringLeft == "human" &
CorrectTuringReply == "c" ~ 0),
CorrTuringRight = case_when(RespTuringRight == "comp" &
CorrectTuringReply == "c" ~ 1,
RespTuringRight == "comp" &
CorrectTuringReply == "h" ~ 0,
RespTuringRight == "human" &
CorrectTuringReply == "h" ~ 1,
RespTuringRight == "human" &
CorrectTuringReply == "c" ~ 0)) %>%
mutate(
CorrTuringDyad = case_when(CorrTuringLeft == 1 &
CorrTuringRight == 1 ~ 1,
CorrTuringLeft == 0 &
CorrTuringRight == 1 ~ 0,
CorrTuringLeft == 1 &
CorrTuringRight == 0 ~ 0,
CorrTuringLeft == 0 &
CorrTuringRight == 0 ~ 0),
#create a full trial number
trial_num = row_number()
) %>%
## Delete every 11th row
filter(
is.na(Number) == F,
is.na(key_respLeft.rt) == F,
is.na(key_respRight.rt) == F
) %>%
mutate(
# create lead/lags for each participant
choice = case_when(RespTuringRight == "comp" &
RespTuringLeft == "comp" ~
"comp_congruent",
RespTuringRight == "comp" &
RespTuringLeft == "human" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "comp" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "human" ~
"human_congruent"),
CongruencyChoice = case_when(RespTuringRight == "comp" &
RespTuringLeft == "comp" ~
"congruent",
RespTuringRight == "comp" &
RespTuringLeft == "human" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "comp" ~
"incongruent",
RespTuringRight == "human" &
RespTuringLeft == "human" ~
"congruent"),
closeness_diff = abs(RespInclusionLeft - RespInclusionRight),
closeness_avg = mean(c(RespInclusionLeft, RespInclusionRight)
)) %>%
group_by(
participant, Block.thisN) %>%
mutate(good_trials_n = n()) %>%
filter(good_trials_n > 8) %>%
ungroup(participant, Block.thisN) %>%
filter(key_respLeft.rt > 4,
key_respRight.rt > 4) %>%
mutate(
# measure synchrony
sync_value = abs((key_respLeft.rt - key_respRight.rt))/
(key_respLeft.rt+key_respRight.rt)*100
) %>%
mutate(
# calculate the difference between Rtleft and Computer
sync_value_Comp_Left = abs((key_respLeft.rt-Comp))/
(key_respLeft.rt+Comp)*100,
# calculate the difference between Rtright and Computer
sync_value_Comp_Right = abs((Comp-key_respRight.rt))/
(Comp+key_respRight.rt)*100) %>%
mutate(
# calculate an average value between this difference
sync_value_Comp = (sync_value_Comp_Left+sync_value_Comp_Right)/2) %>%
mutate(
# create a variable that has the value of the diff with computer when the partner is computer and the diff with the other human when the partner is human
sync_mixed_Comp_Human = case_when(Partner == "Computer" ~
sync_value_Comp,
Partner == "Human" ~
sync_value)) %>%
mutate(which_block = ifelse(Block.thisN < 10, "First_half", "Second_half")) %>%
mutate(bad_participants = ifelse(participant %in% c(
#left handed participants
#"51_52Exp", "55_56Exp", "63_64Exp",
# low trial counts
"39_40Exp", "41_42Exp", "73_74Exp",
# RT above 4 seconds
"33_34ExpCorr", "21_22Exp"), "bad", "good")) %>%
filter(
!participant %in% c("41_42Exp", "33_34ExpCorr", "73_74Exp"
))
# get rid of bad pees ----
# dat_clean = dat_preprocessed %>%
# #left handed participants
# filter(
# !participant %in% c("51_52Exp", "55_56Exp", "63_64Exp",
# # low trial counts
# "39_40Exp", "41_42Exp", "73_74Exp",
# # RT above 4 seconds
# "33_34ExpCorr", "21_22Exp"))
# check number of blocks per choice
data.frame(dat_preprocessed %>% group_by(participant, Block.thisN, choice) %>%
summarize(tt = n()))
# plots ----
dat_correl = dat_clean %>%
group_by(participant,Partner, choice) %>%
summarise(ccf_values = ccf(as.numeric(ts(key_respLeft.rt)),as.numeric(ts(key_respRight.rt)), lag.max=00)$acf) #in acf object the correlation values are saved
# for correlation between InclusionDiff and Partner
dat_correl_HumanDiffQuest = dat_clean %>%
filter(Partner=="Human") %>%
group_by(participant) %>%
summarise(avg_sync = mean(sync_value),
InclusionQuest = mean(closeness_diff))
dat_correl_ComputerDiffQuest = dat_clean %>%
filter(Partner=="Computer") %>%
group_by(participant) %>%
summarise(avg_sync = mean(sync_value),
InclusionQuest = mean(closeness_diff))
resHumanDiff <- cor.test(dat_correl_HumanDiffQuest$avg_sync, dat_correl_HumanDiffQuest$InclusionQuest,
method = "pearson")
resHumanComputer <- cor.test(dat_correl_ComputerDiffQuest$avg_sync, dat_correl_ComputerDiffQuest$InclusionQuest,
method = "pearson")
# for correlation between Inclusion Average and Partner
dat_correl_HumanAvgQuest = dat_clean %>%
filter(Partner=="Human") %>%
group_by(participant) %>%
summarise(avg_sync = mean(sync_value),
InclusionQuest = mean(closeness_avg))
dat_correl_ComputerAvgQuest = dat_clean %>%
filter(Partner=="Computer") %>%
group_by(participant) %>%
summarise(avg_sync = mean(sync_value),
InclusionQuest = mean(closeness_avg))
resHumanAvg <- cor.test(dat_correl_HumanAvgQuest$avg_sync, dat_correl_HumanAvgQuest$InclusionQuest,
method = "pearson")
resHumanComputerAvg <- cor.test(dat_correl_ComputerAvgQuest$avg_sync, dat_correl_ComputerAvgQuest$InclusionQuest,
method = "pearson")
# correlation for choice
dat_correl_HumanChoiceQuest = dat_clean %>%
filter(choice=="human_congruent") %>%
group_by(participant) %>%
summarise(avg_sync = mean(sync_value),
InclusionQuest = mean(closeness_avg))
dat_correl_ComputerChoiceQuest = dat_clean %>%
filter(choice=="comp_congruent") %>%
group_by(participant) %>%
summarise(avg_sync = mean(sync_value),
InclusionQuest = mean(closeness_avg))
# for correlation between
dat_correl_IncognruentChoiceQuest = dat_clean %>%
filter(choice=="incongruent") %>%
group_by(participant) %>%
summarise(avg_sync = mean(sync_value),
InclusionQuest = mean(closeness_avg))
resHumanChoiceAvg <- cor.test(dat_correl_HumanChoiceQuest$avg_sync, dat_correl_HumanChoiceQuest$InclusionQuest,
method = "pearson")
resCompChoiceAvg <- cor.test(dat_correl_ComputerChoiceQuest$avg_sync, dat_correl_ComputerChoiceQuest$InclusionQuest,
method = "pearson")
resCompIncongruentAvg <- cor.test(dat_correl_IncognruentChoiceQuest$avg_sync, dat_correl_IncognruentChoiceQuest$InclusionQuest,
method = "pearson")
dat_correl = dat_preprocessed %>%
group_by(participant,Partner) %>%
summarise(ccf_values = ccf(as.numeric(ts(key_respLeft.rt)),as.numeric(ts(key_respRight.rt)), lag.max=10)$acf) #in acf object the correlation values are saved
dat_sum_correl = dat_correl %>%
group_by(Partner, choice) %>%
summarise(mean_ccf_values = mean(ccf_values))
dat_std = dat_preprocessed %>%
group_by(participant,Partner, pchoice) %>%
summarise(data_std_values_left = var(key_respLeft.rt),
data_std_values_right = var(key_respRight.rt))%>%
pivot_longer(cols = c(data_std_values_left, data_std_values_right), names_to = "cazzo")
dat_preprocessed %>%
ggplot(aes(Partner, sync_value, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
facet_wrap(~bad_participants, scales = "free_y")
dat_std %>%
ggplot(aes(y = data_std_values_right, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("STD value by partner")
dat_correl %>%
ggplot(aes(y = ccf_values, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Corss correlation valye by partner")
dat_correl %>%
ggplot(aes(y = ccf_values, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
scale_y_continuous(breaks = seq(.4,1.0,.05))+
labs("Corss correlation valye by partner and particpnt's choice")
dat_std %>%
ggplot(aes(y = value, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Corss correlation valye by partner")
dat_clean %>%
group_by(participant, Partner, Block.thisN) %>%
filter(participant %in% c("1_2Exp","3_4", "5_6Exp",
"7_8Exp", "9_10Exp", "11_12Exp",
"13_14Exp", "15_16Exp", "17_18Exp",
"19_20Exp","21_22Exp","23_24Exp",
"25_26Exp","27_28Exp","29_30Exp",
"31_32Exp","33_34ExpCorr","35_36Exp",
"37_38Exp","39_40Exp" )) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = Partner,fill=choice))+
geom_histogram(stat="count", position = position_dodge(0.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(0.9), vjust=1) +
theme(text = element_text(size = 10))+
labs(title = "number of Blocks per partner per first half of couples") +
facet_wrap(~participant, ncol=4)
dat_preprocessed %>%
group_by(participant, Partner) %>%
ggplot(aes(x = Partner,fill=Partner))+
geom_histogram(stat="count", position = position_dodge(0.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(0.9), vjust=1) +
theme(text = element_text(size = 10))+
labs(title = "number of Trials per partner per Participant") +
facet_wrap(~participant)
dat_clean %>%
group_by(participant, Partner, Block.thisN) %>%
filter(participant %in% c("41_42Exp","43_44Exp", "45_46Exp",
"47_48Exp", "49_50Exp", "51_52Exp",
"53_54Exp", "55_56Exp", "57_58Exp",
"59_60Exp","61_62Exp","63_64Exp",
"65_66Exp","67_68Exp","69_70Exp",
"71_72Exp","73_74Exp","75_76Exp",
"77_78Exp","79_80Exp","81_82Exp",
"83_84Exp" )) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = Partner, fill=choice))+
geom_histogram(stat="count", position = position_dodge(0.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(0.9), vjust=1) +
theme(text = element_text(size = 10))+
labs(title = "number of Blocks per partner per second half of couples") +
facet_wrap(~participant, ncol=4)
dat_preprocessed %>%
group_by(participant, Partner, Block.thisN) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = Partner, fill= choice))+
geom_histogram(stat='count', position = position_dodge(.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(.9), vjust=2) +
labs(title = "number of choices per Partner for all participants")
dat_preprocessed %>%
group_by(participant, Partner, Block.thisN) %>%
summarize(choice = first(choice)) %>%
ggplot(aes(x = choice, fill= choice))+
geom_histogram(stat='count', position = position_dodge(.9),
color = "black",
lwd = 1, alpha = .8)+theme_bw()+
geom_text(stat='count', aes(label=round(..count..)),
position = position_dodge(.9), vjust=2) +
labs(title = "number of choices for all participants")
dat_preprocessed %>%
mutate(new_block = ifelse(Block.thisN<10, "first", "second")) %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by participant choice")+
facet_wrap(~new_block)
dat_clean %>%
mutate(new_block = ifelse(Block.thisN<10, "first", "second")) %>%
ggplot(aes(y = sync_value, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by participant choice")+
facet_wrap(~new_block)
dat_clean %>%
ggplot(aes(y = sync_value, x = Block.thisN, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
geom_smooth()+
theme_bw()+
labs("Sync valye for each agent by participant choice")
dat_clean %>%
ggplot(aes(key_respLeft.rt))+
geom_histogram()
dat_clean %>%
ggplot(aes(key_respRight.rt))+
geom_histogram()
dat_preprocessed %>%
ggplot(aes(y = sync_value, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by participant choice")
#facet_wrap(~participant)
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color = CongruencyChoice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()
#+facet_wrap(~participant)+
labs("Sync valye for each agent by Congruency and partner")
for(i in dat_clean$participant){
my_plots[[i]] = dat_clean %>%
filter(participant == i) %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by Congruency and partner")}
dat_clean %>%
ggplot(aes(y = sync_z_value, fill = CongruencyChoice))+
geom_histogram()+
theme_bw()+facet_wrap(~Partner)+
labs("z Sync valye for each agent by Congruency and partner")
dat_preprocessed %>%
ggplot(aes(y = sync_value, x = CongruencyChoice, color = CongruencyChoice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each agent by Congruency")
dat_preprocessed %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye by participant choice")
dat_clean %>%
ggplot(aes(y = CorrTuringDyad, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Average Correctness in Turing Test by Partner")
# try a different way for correctness based on the block (same results)
dat_preprocessed %>%
group_by(participant, Partner, Block.thisN) %>%
summarize(CorrTuringDyad=first(CorrTuringDyad)) %>%
ggplot(aes(y = CorrTuringDyad, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Average Correctness in Turing Test by Partner")
# trying to plot instances of
dat_preprocessed %>%
group_by(participant, Partner, Block.thisN, choice) %>%
summarize(choice=first(choice)) %>%
ggplot(aes(y = choice, x = Partner, color = Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Average Correctness in Turing Test by Partner")
dat_preprocessed %>%
ggplot(aes(y = sync_value, x = Partner, color=Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for each actual partner")
dat_preprocessed %>%
ggplot(aes(y = sync_value, x = Partner, color=choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye per choice")+
facet_wrap(~participant)
dat_clean %>%
ggplot(aes(y = sync_value, x = CorrTuringDyad))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for predicting correct the partner")
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color=as.factor(CorrTuringDyad )))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for predicting correct the partner")
dat_clean %>%
ggplot(aes(y = sync_mixed_Comp_Human, x = Partner, color=Partner))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync mixed comp human value for each actual partner")
dat_clean %>%
ggplot(aes(y = sync_mixed_Comp_Human, x = Partner, color=choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync mixed comp human value for each actual partner and Choice")
dat_clean %>%
#filter(choice %in% c("comp_congruent", "human_congruent")) %>%
# group_by(participant, Partner, choice) %>%
# mutate(correct_split = ifelse(CorrTuringDyad > .35, "Hi", "Lo")) %>%
ggplot(aes(y = sync_value, x = as.factor(CorrTuringDyad) , color = Partner ))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for predicting correct the partner")
dat_clean %>%
#filter(choice %in% c("comp_congruent", "human_congruent")) %>%
# group_by(participant, Partner, choice) %>%
# mutate(correct_split = ifelse(CorrTuringDyad > .35, "Hi", "Lo")) %>%
ggplot(aes(y = sync_value, x = Partner, color=Partner ))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
facet_wrap(~CorrTuringDyad)+
labs("Sync valye for correctness of dyad and the partner")
dat_clean %>%
filter(choice %in% c("comp_congruent", "human_congruent")) %>%
# group_by(participant, Partner, choice) %>%
# mutate(correct_split = ifelse(CorrTuringDyad > .35, "Hi", "Lo")) %>%
ggplot(aes(y = sync_value, x = as.factor(CorrTuringDyad), color=as.factor(CorrTuringDyad) ))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for correctness of dyad and the partner")
dat_clean %>%
filter(dat_clean$choice %in% c("comp_congruent", "human_congruent")) %>%
#(participant, Partner, choice) %>%
# mutate(correct_split = ifelse(CorrTuringDyad > .35, "Hi", "Lo")) %>%
ggplot(aes(y = sync_z_value, x = Partner, color=choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.3),
fun.args = list(mult = 1))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.3))+
theme_bw()+
labs("Sync valye for correctness of dyad and the partner")
dat_clean %>%
ggplot(aes(y = sync_value, x = Partner, color = choice))+
geom_point(shape = 1, alpha = .8, position = position_jitterdodge(.1))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.8),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.8))+
theme_bw()+
labs("Sync valye for each agent by participant choice with individual data points")
dat_clean %>%
ggplot(aes(sync_value, closeness_diff, color = Partner))+
geom_point()+
geom_smooth(method = "lm", se =F)+theme_bw()
dat_clean %>%
ggplot(aes(sync_value, closeness_diff, color = choice))+
geom_point()+
geom_smooth(method = "lm", se =F)+theme_bw()
dat_clean %>%
ggplot(aes(sync_value, closeness_diff, color = CongruencyChoice))+
geom_point()+
geom_smooth(method = "lm", se =F)+theme_bw()
dat_clean %>%
ggplot(aes(sync_value, closeness_avg, color = Partner))+
geom_point()+
geom_smooth(method = "lm", se =F)+theme_bw()
dat_clean %>%
ggplot(aes(sync_value, closeness_avg, color = choice))+
geom_point()+
geom_smooth(method = "lm", se =F)+theme_bw()
# stats ----
dat_clean$choice=as.factor(dat_clean$choice)
dat_clean$Partner=as.factor(dat_clean$Partner)
res_aov1 <- aov(sync_value ~ Partner,
data = dat_clean
)
res_aov2 <- aov(sync_value ~ Partner+choice,
data = dat_clean
)
summary(res_aov2)
library(multcomp)
# Tukey HSD test:
post_test <- glht(res_aov2,
linfct = mcp(choice= "Tukey")
)
summary(post_test)
# afex library tests ---
library(afex)
dat_clean$participant<-as.factor(dat_clean$participant) #Changes variable "Subject" from an integer to a factor.
dat_clean$Partner<-factor(dat_clean$Partner) #Changes variable "partner" from an integer to a factor.
dat_clean$choice<-factor(dat_clean$choice) #Changes variable "choice" from an integer to a factor.
Within.aov.1 <- aov_car(sync_value ~ Partner*choice + Error(participant/Partner*choice), data=dat_clean)
library(afex)
dat_stats = dat_clean %>%
group_by(participant, Partner, which_block ) %>%
summarize(ave_sync = mean(sync_mixed_Comp_Human))
dat_stats_2 = dat_preprocessed %>%
group_by(participant, Partner, choice) %>%
summarize(ave_sync = mean(sync_value, na.rm = T))
library(lme4)
library(lmerTest)
dat_stats = dat_clean %>%
# filter(dat_clean$choice %in% c("comp_congruent", "human_congruent")) %>%
group_by(participant, choice) %>%
summarize(ave_sync = mean(sync_value))
lm = aov_car(ave_sync ~ choice+ Error(1/participant*choice), dat_stats)
summary(lm)
post_hoc = emmeans(lm, ~ choice)
post_hoc
dat_stats = dat_clean %>%
# filter(dat_clean$choice %in% c("comp_congruent", "human_congruent")) %>%
group_by(participant, Partner, CongruencyChoice) %>%
summarize(ave_sync = mean(sync_value))
lm = aov_car(ave_sync ~ Partner*CongruencyChoice+ Error(1/participant*Partner*CongruencyChoice), dat_stats)
summary(lm)
library(rstatix)
# friedman test for dyad correctness, effect size and pairwise comparison
dat_stats = dat_clean %>%
group_by(participant, Partner) %>%
summarize(ave_Correctness = mean(CorrTuringDyad))
dat_stats = dat_stats %>%
convert_as_factor(participant, Partner)
dat_stats$ave_Correctness = as.numeric(dat_stats$ave_Correctness)
dat_stats= ungroup(dat_stats)
res.fried <- dat_stats %>% friedman_test(ave_Correctness ~ Partner | participant)
res.fried
dat_stats %>% friedman_effsize(ave_Correctness ~ Partner | participant)
pwc <- dat_stats %>%
wilcox_test(ave_Correctness ~ Partner, paired = TRUE, p.adjust.method = "bonferroni")
pwc
# Partner and CorrectedDyad
dat_stats = dat_clean %>%
group_by(participant, Partner, CorrTuringDyad) %>%
summarize(ave_sync = mean(sync_value))
lm2 = aov_car(ave_sync ~ Partner*CorrTuringDyad + Error(1/participant*Partner*CorrTuringDyad), dat_stats)
summary(lm2)
post_hoc = emmeans(lm2, ~ Partner*CorrTuringDyad)
post_hoc
library(emmeans)
post_hoc_lm1= emmeans(lm1, ~ Partner*which_block)
post_hoc_lm2 = emmeans(lm2, ~ Partner)
# mixed model
library(lme4)
library(lmerTest)
lm1 = lmer(sync_value~Partner*choice +
(1|participant),
dat_preprocessed)
lm2 = lmer(sync_value~Partner*choice +
(1|participant)+
(1|trial_num),
dat_preprocessed)
anova(lm1, lm2)
lm3 = lmer(sync_value~Partner*choice +
(1|participant)+
(1|trial_num) +
(1|Block.thisN),
dat_preprocessed)
anova(lm1, lm2, lm3)
anova(lm3)
lm4 = lmer(sync_value~Partner*choice +
(1|participant)+
(1+trial_num|participant) +
(1|Block.thisN),
dat_preprocessed)
anova(lm1, lm2, lm3, lm4)
anova(lm4)
lm5 = lmer(sync_value~Partner*choice +
(1|participant)+
(1+trial_num|participant) +
(1+Block.thisN|participant),
dat_preprocessed)
anova(lm1, lm2, lm3, lm4, lm5)
anova(lm4)
lm5 = lmer(sync_value~Partner*choice +
(1|participant)+
(1+trial_num|participant/Partner) +
(1+Block.thisN|participant/Partner),
dat_preprocessed)
anova(lm1, lm2, lm3, lm4, lm5)
anova(lm5)
dat_preprocessed %>%
ggplot(aes(Partner, sync_value))+
stat_summary(fun.data = mean_se, geom = "point", size = 3)+
#geom_jitter()+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1)
dat_preprocessed %>%
ggplot(aes(choice, sync_value))+
stat_summary(fun.data = mean_se, geom = "point", size = 3)+
#geom_jitter()+
stat_summary(fun.data = mean_se, geom = "errorbar", width = .1)
#
(dat_preprocessed %>% group_by(Partner) %>%
summarize(mean = mean(sync_value)))
(dat_preprocessed %>% group_by(choice) %>%
summarize(mean = mean(sync_value)))
lm_rating = lmer(sync_value~Partner*choice+closeness_diff +
(1|participant)+
(1+trial_num|participant/Partner) +
(1+Block.thisN|participant/Partner),
dat_preprocessed)
anova(lm_rating)
dat_preprocessed %>%
mutate(close_split_avg = ifelse(closeness_avg >= mean(closeness_avg), "close", "far"),
close_split_diff = ifelse(closeness_diff >= mean(closeness_diff), "far", "close")) %>%
ggplot(aes(Partner, sync_value, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.8),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.8))+
theme_bw()+
facet_wrap(~ close_split_diff)
dat_preprocessed %>%
ggplot(aes(Partner, sync_value, color = choice))+
stat_summary(fun.data = mean_se, geom = "errorbar",
width = .1, position = position_dodge(.8),
fun.args = list(mult = 1.96))+
stat_summary(fun.data = mean_se, geom = "point", size = 5,
position = position_dodge(.8))+
theme_bw()+
facet_wrap(~ which_block)
|
b8f81f2981048962452cbb56b9fe99dbd87b60b9
|
[
"R",
"RMarkdown"
] | 6 |
R
|
AzizAbubshait/HyperscanningRproject
|
4efcf6b6b19db2949aba8e27f4cfae8e2ef2a9e2
|
528445ac3cefa5c68497ae4af91d2acf7b2b0caf
|
refs/heads/master
|
<repo_name>Alejandro19921222/3-4<file_sep>/app/controllers/todolists_controller.rb
class TodolistsController < ApplicationController
def new
@list = List.new
end
def create
list = List.new(list_params)
list.save
redirect_to "/top"
end
private
def list_params
params.require(:list).permit(:title, :body)
end
end
|
c2825636165c547df2c254451e4162232c8dd54f
|
[
"Ruby"
] | 1 |
Ruby
|
Alejandro19921222/3-4
|
c366549fcaf0f69bf1335590fa9be809364adfcd
|
ae23785e87b3a500b1d577b4a0a337ee753316aa
|
refs/heads/master
|
<file_sep>
### Data Source
Original data is stored in this datasets site with additional description.
https://archive.ics.uci.edu/ml/datasets/Human+Activity+Recognition+Using+Smartphones
### Raw data description
Raw data of the HumanActivity Recognition database built from the recordings of 30 subjects performing activities of daily living (ADL) while carrying a waist-mounted smartphone with embedded inertial sensors.
The dataset includes the following files:
- 'features_info.txt': Shows information about the variables used on the feature vector.
- 'features.txt': List of all features.
- 'activity_labels.txt': Links the class labels with their activity name.
- 'train/X_train.txt': Training set.
- 'train/y_train.txt': Training labels.
- 'train/subject_test.txt': Each row identifies the subject who performed the activity for each window sample. Its range is from 1 to 30.
- 'test/X_test.txt': Test set.
- 'test/y_test.txt': Test labels.
- 'test/subject_train.txt': Each row identifies the subject who performed the activity for each window sample. Its range is from 1 to 30.
Data on features measured for the **test** can be found in "x_test.txt" and activity labels are in "y_test.txt". Every of each "y" data correspond to 1 to 6 activities included in the study. Information on test subjects can be found in "subject_test.txt".
The **training** dataset follows a similar format.
the **561 Features** information is found in features_info.txt and
the **6 activity** labels are found in "activity_labels.txt".
###Process of data tyding
Merges the training and the test sets to create one data set. Both have been load by separate and previosly to merges by rbind, the column of "y"" containing the activity (named activity_id) and the column of subject (named subject_id) have been added to the data frame. Also a new column have created includen the **type** of element which can be _"train"_ or _"test"_ with the purpose to matain the original source of every row.
The names of the activities have been includes changing the numercial value by it name
finally the names of the columns have been defined with descriptive of its content. It is stored as df (data.frame) in one variable all_data with 10299rx563c.
The symbols "()-," have been removed or changed by "_".
| Variable | Description
-----------|-------------
| subject | Subject ID
| tbodyacc_Mean_x | Mean time for acceleration of body for X direction
| tbodyacc_Mean_y | Mean time for acceleration of body for Y direction
| tbodyacc_Mean_z | Mean time for acceleration of body for Z direction
| tbodyacc_Std_x | Standard deviation of time for acceleration of body for X direction
| tbodyacc_Std_y | Standard deviation of time for acceleration of body for Y direction
| tbodyacc_Std_z | Standard deviation of time for acceleration of body for Z direction
| tgravityacc_Mean_x | Mean time of acceleration of gravity for X direction
| tgravityacc_Mean_y | Mean time of acceleration of gravity for Y direction
| tgravityacc_Mean_z | Mean time of acceleration of gravity for Z direction
| tgravityacc_Std_x | Standard deviation of time of acceleration of gravity for X direction
| tgravityacc_Std_y | Standard deviation of time of acceleration of gravity for Y direction
| tgravityacc_Std_z | Standard deviation of time of acceleration of gravity for Z direction
| tbodyaccjerk_Mean_x | Mean time of body acceleration jerk for X direction
| tbodyaccjerk_Mean_y | Mean time of body acceleration jerk for Y direction
| tbodyaccjerk_Mean_z | Mean time of body acceleration jerk for Z direction
| tbodyaccjerk_Std_x | Standard deviation of time of body acceleration jerk for X direction
| tbodyaccjerk_Std_y | Standard deviation of time of body acceleration jerk for Y direction
| tbodyaccjerk_Std_z | Standard deviation of time of body acceleration jerk for Z direction
| tbodygyro_Mean_x | Mean body gyroscope measurement for X direction
| tbodygyro_Mean_y | Mean body gyroscope measurement for Y direction
| tbodygyro_Mean_z | Mean body gyroscope measurement for Z direction
| tbodygyro_Std_x | Standard deviation of body gyroscope measurement for X direction
| tbodygyro_Std_y | Standard deviation of body gyroscope measurement for Y direction
| tbodygyro_Std_z | Standard deviation of body gyroscope measurement for Z direction
| tbodygyrojerk_Mean_x | Mean jerk signal of body for X direction
| tbodygyrojerk_Mean_y | Mean jerk signal of body for Y direction
| tbodygyrojerk_Mean_z | Mean jerk signal of body for Z direction
| tbodygyrojerk_Std_x | Standard deviation of jerk signal of body for X direction
| tbodygyrojerk_Std_y | Standard deviation of jerk signal of body for Y direction
| tbodygyrojerk_Std_z | Standard deviation of jerk signal of body for Z direction
| tbodyaccmag_Mean | Mean magnitude of body Acc
| tbodyaccmag_Std | Standard deviation of magnitude of body Acc
| tgravityaccmag_Mean | Mean gravity acceleration magnitude
| tgravityaccmag_Std | Standard deviation of gravity acceleration magnitude
| tbodyaccjerkmag_Mean | Mean magnitude of body acceleration jerk
| tbodyaccjerkmag_Std | Standard deviation of magnitude of body acceleration jerk
| tbodygyromag_Mean | Mean magnitude of body gyroscope measurement
| tbodygyromag_Std | Standard deviation of magnitude of body gyroscope measurement
| tbodygyrojerkmag_Mean | Mean magnitude of body body gyroscope jerk measurement
| tbodygyrojerkmag_Std | Standard deviation of magnitude of body body gyroscope jerk measurement
| fbodyacc_Mean_x | Mean frequency of body acceleration for X direction
| fbodyacc_Mean_y | Mean frequency of body acceleration for Y direction
| fbodyacc_Mean_z | Mean frequency of body acceleration for Z direction
| fbodyacc_Std_x | Standard deviation of frequency of body acceleration for X direction
| fbodyacc_Std_y | Standard deviation of frequency of body acceleration for Y direction
| fbodyacc_Std_z | Standard deviation of frequency of body acceleration for Z direction
| fbodyaccjerk_Mean_x | Mean frequency of body accerlation jerk for X direction
| fbodyaccjerk_Mean_y | Mean frequency of body accerlation jerk for Y direction
| fbodyaccjerk_Mean_z | Mean frequency of body accerlation jerk for Z direction
| fbodyaccjerk_Std_x | Standard deviation frequency of body accerlation jerk for X direction
| fbodyaccjerk_Std_y | Standard deviation frequency of body accerlation jerk for Y direction
| fbodyaccjerk_Std_z | Standard deviation frequency of body accerlation jerk for Z direction
| fbodygyro_Mean_x | Mean frequency of body gyroscope measurement for X direction
| fbodygyro_Mean_y | Mean frequency of body gyroscope measurement for Y direction
| fbodygyro_Mean_z | Mean frequency of body gyroscope measurement for Z direction
| fbodygyro_Std_x | Standard deviation frequency of body gyroscope measurement for X direction
| fbodygyro_Std_y | Standard deviation frequency of body gyroscope measurement for Y direction
| fbodygyro_Std_z | Standard deviation frequency of body gyroscope measurement for Z direction
| fbodyaccmag_Mean | Mean frequency of body acceleration magnitude
| fbodyaccmag_Std | Standard deviation of frequency of body acceleration magnitude
| fbodybodyaccjerkmag_Mean | Mean frequency of body acceleration jerk magnitude
| fbodybodyaccjerkmag_Std | Standard deviation of frequency of body acceleration jerk magnitude
| fbodybodygyromag_Mean | Mean frequency of magnitude of body gyroscope measurement
| fbodybodygyromag_Std | Standard deviation of frequency of magnitude of body gyroscope measurement
| fbodybodygyrojerkmag_Mean | Mean frequency of magnitude of body gyroscope jerk measurement
| fbodybodygyrojerkmag_Std | Standard deviation frequency of magnitude of body gyroscope jerk measurement
| activity | The activity performed
| type | can be _train_ or _test_
###Extraction of particular data
Columns containing **mean or std** data can be subsetting by the **step 2** of the run_analysis.R script. giving a set of 86 cols.
| Column | Column |
| ---------------------------- | ------------------------- |
| Activity_id | Subject_id |
| Time_BodyAcc_Mean_X | tBodyAcc_Mean_X |
| Time_BodyAcc_Mean_Y | tBodyAcc_Mean_Y |
| Time_BodyAcc_Mean_Z | tBodyAcc_Mean_Z |
| Time_BodyAcc_Std_X | tBodyAcc_Std_X |
| Time_BodyAcc_Std_Y | tBodyAcc_Std_Y |
| Time_BodyAcc_Std_Z | tBodyAcc_Std_Z |
| Time_GravityAcc_Mean_X | tGravityAcc_Mean_X |
| Time_GravityAcc_Mean_Y | tGravityAcc_Mean_Y |
| Time_GravityAcc_Mean_Z | tGravityAcc_Mean_Z |
| Time_GravityAcc_Std_X | tGravityAcc_Std_X |
| Time_GravityAcc_Std_Y | tGravityAcc_Std_Y |
| Time_GravityAcc_Std_Z | tGravityAcc_Std_Z |
| Time_BodyAccJerk_Mean_X | tBodyAccJerk_Mean_X |
| Time_BodyAccJerk_Mean_Y | tBodyAccJerk_Mean_Y |
| Time_BodyAccJerk_Mean_Z | tBodyAccJerk_Mean_Z |
| Time_BodyAccJerk_Std_X | tBodyAccJerk_Std_X |
| Time_BodyAccJerk_Std_Y | tBodyAccJerk_Std_Y |
| Time_BodyAccJerk_Std_Z | tBodyAccJerk_Std_Z |
| Time_BodyGyro_Mean_X | tBodyGyro_Mean_X |
| Time_BodyGyro_Mean_Y | tBodyGyro_Mean_Y |
| Time_BodyGyro_Mean_Z | tBodyGyro_Mean_Z |
| Time_BodyGyro_Std_X | tBodyGyro_Std_X |
| Time_BodyGyro_Std_Y | tBodyGyro_Std_Y |
| Time_BodyGyro_Std_Z | tBodyGyro_Std_Z |
| Time_BodyGyroJerk_Mean_X | tBodyGyroJerk_Mean_X |
| Time_BodyGyroJerk_Mean_Y | tBodyGyroJerk_Mean_Y |
| Time_BodyGyroJerk_Mean_Z | tBodyGyroJerk_Mean_Z |
| Time_BodyGyroJerk_Std_X | tBodyGyroJerk_Std_X |
| Time_BodyGyroJerk_Std_Y | tBodyGyroJerk_Std_Y |
| Time_BodyGyroJerk_Std_Z | tBodyGyroJerk_Std_Z |
| Time_BodyAccMag_Mean | tBodyAccMag_Mean |
| Time_BodyAccMag_Std | tBodyAccMag_Std |
| Time_GravityAccMag_Mean | tGravityAccMag_Mean |
| Time_GravityAccMag_Std | tGravityAccMag_Std |
| Time_BodyAccJerkMag_Mean | tBodyAccJerkMag_Mean |
| Time_BodyAccJerkMag_Std | tBodyAccJerkMag_Std |
| Time_BodyGyroMag_Mean | tBodyGyroMag_Mean |
| Time_BodyGyroMag_Std | tBodyGyroMag_Std |
| Time_BodyGyroJerkMag_Mean | tBodyGyroJerkMag_Mean |
| Time_BodyGyroJerkMag_Std | tBodyGyroJerkMag_Std |
| FFT_BodyAcc_Mean_X | fBodyAcc_Mean_X |
| FFT_BodyAcc_Mean_Y | fBodyAcc_Mean_Y |
| FFT_BodyAcc_Mean_Z | fBodyAcc_Mean_Z |
| FFT_BodyAcc_Std_X | fBodyAcc_Std_X |
| FFT_BodyAcc_Std_Y | fBodyAcc_Std_Y |
| FFT_BodyAcc_Std_Z | fBodyAcc_Std_Z |
| FFT_BodyAccJerk_Mean_X | fBodyAccJerk_Mean_X |
| FFT_BodyAccJerk_Mean_Y | fBodyAccJerk_Mean_Y |
| FFT_BodyAccJerk_Mean_Z | fBodyAccJerk_Mean_Z |
| FFT_BodyAccJerk_Std_X | fBodyAccJerk_Std_X |
| FFT_BodyAccJerk_Std_Y | fBodyAccJerk_Std_Y |
| FFT_BodyAccJerk_Std_Z | fBodyAccJerk_Std_Z |
| FFT_BodyGyro_Mean_X | fBodyGyro_Mean_X |
| FFT_BodyGyro_Mean_Y | fBodyGyro_Mean_Y |
| FFT_BodyGyro_Mean_Z | fBodyGyro_Mean_Z |
| FFT_BodyGyro_Std_X | fBodyGyro_Std_X |
| FFT_BodyGyro_Std_Y | fBodyGyro_Std_Y |
| FFT_BodyGyro_Std_Z | fBodyGyro_Std_Z |
| FFT_BodyAccMag_Mean | fBodyAccMag_Mean |
| FFT_BodyAccMag_Std | fBodyAccMag_Std |
| FFT_BodyBodyAccJerkMag_Mean | fBodyBodyAccJerkMag_Mean |
| FFT_BodyBodyAccJerkMag_Std | fBodyBodyAccJerkMag_Std |
| FFT_BodyBodyGyroMag_Mean | fBodyBodyGyroMag_Mean |
| FFT_BodyBodyGyroMag_Std | fBodyBodyGyroMag_Std |
| FFT_BodyBodyGyroJerkMag_Mean | fBodyBodyGyroJerkMag_Mean |
| FFT_BodyBodyGyroJerkMag_Std | fBodyBodyGyroJerkMag_Std |
| ---------------------------- | --------------------------- |<file_sep>'
1 Merges the training and the test sets to create one data set.
2 Extracts only the measurements on the mean and standard deviation for each measurement.
3 Uses descriptive activity names to name the activities in the data set
4 Appropriately labels the data set with descriptive variable names.
5 From the data set in step 4, creates a second, independent tidy data set
with the average of each variable for each activity and each subject.
Good luck!'
###################################################
#
# Download datafile.zip
#
##################################################
# check if a data folder exists; if not then create one
if (!file.exists("data")) {dir.create("data")}
destfile <- "./data/Human_Activity_Recognition_Smartphones.zip"
if (!file.exists(destfile){
# file URL & destination file
fileUrl <- "https://d396qusza40orc.cloudfront.net/getdata%2Fprojectfiles%2FUCI%20HAR%20Dataset.zip "
# download the file & note the time
download.file(fileUrl, destfile)
dateDownloaded <- date()
}
print(
'Manually extraction from Human_Activity_Recognition_Smartphones.zip
is necessary to obtain into data folder the new folder system:
UCI HAR Dataset')
############## ###########################
# LOAD DATA STRUCTURE #
############## ###########################
## Read the test data
testdata <- read.table("./data/UCI HAR Dataset/test/X_test.txt")
testdata$type<-"test" #add a column to identify the type of data came from
## Read the training data
traindata <- read.table("./data/UCI HAR Dataset/train/X_train.txt")
traindata$type<-"train" #add a column to identify the type of data came from
## Read all activities
activity_labels <- read.table("./data/UCI HAR Dataset/activity_labels.txt",
col.names=c("activity_id","activity_name"))
## Read the features names
features <- read.table("./data/UCI HAR Dataset/features.txt")
feature_names <- features[,2]
## Read the test subjects
test_subject_id <- read.table("./data/UCI HAR Dataset/test/subject_test.txt")
colnames(test_subject_id) <- "subject_id" #name
## Read the activity id's of the test data and label the the dataframe's columns
test_activity_id <- read.table("./data/UCI HAR Dataset/test/y_test.txt")
colnames(test_activity_id) <- "activity_id" #name
## Read the ids of the train subjects and label the the dataframe's columns
train_subject_id <- read.table("./data/UCI HAR Dataset/train/subject_train.txt")
colnames(train_subject_id) <- "subject_id"
## Read the activity id's of the training data and label the dataframe's columns
train_activity_id <- read.table("./data/UCI HAR Dataset/train/y_train.txt")
colnames(train_activity_id) <- "activity_id"
#assign names loaded previosly into features.
colnames(testdata) <- feature_names
colnames(testdata)[562] <-"type" # the last one is the type test or train
colnames(traindata) <- feature_names
colnames(traindata)[562] <-"type" # the last one is the type test or train
##Combine the subject id's, and the activity id's for both test and train
##and the test data into one dataframe
test_data <- cbind(test_subject_id , test_activity_id , testdata)
train_data <- cbind(train_subject_id , train_activity_id , traindata)
rm(traindata,testdata,train_subject_id ,
train_activity_id,test_subject_id , test_activity_id ) #clear this data in memory
############## ###########################
#1 Merges the training and the test sets to create one data set# #
############## ###########################
all_data <- rbind(train_data,test_data) #merges two tables giving 10299 obs.
rm(train_data,test_data) #clear this data in memory
subjects<-unique(all_data$subject_id) #Ok gives an array 1:30 subjects
activities<-unique(all_data$activity_id) #Ok gives an array 1:6 activities
############### ###########################
# 2 Extracts only the measurements on the mean and standard deviation
#for each measurement.
############## ###########################
features[,2] = gsub('mean', 'Mean', features[,2]) #put all with the Upper case
features[,2] = gsub('std', 'Std', features[,2]) #put all with the Upper case
mean.sd <- grep("Mean|Std", features[,2]) # 86 features, ignore.case = FALSE
all_data.mean.sd <-all_data[, mean.sd]
############## ###########################
#3 Uses descriptive activity names to name the activities in the data set# #
############## ###########################
#change the activity numbers by its label.
all_data$activity_id <- activity_labels[all_data$activity_id , 2]
############## ###########################
#4 Appropriately labels the data set with descriptive variable names# #
############## ###########################
#previosly, see above, the column names have been assigned and they are descriptives.
#the names are cleaned by remove ( ) and change '-' and ',' by '_'
new_names<- gsub('[()]', '', names(all_data))
new_names<- gsub('[-,]', '_', new_names)
names(all_data)<-new_names #assign new names
############## ###########################
#5 From the data set in step 4, creates a second, independent tidy data set #
# with the average of each variable for each activity and each subject # #
############## ###########################
all_data$subject_id <- as.factor(all_data$subject_id) #make as factor variable.
all_data <- all_data[,1:563] #remove type var
tidy_data <- aggregate(all_data, by=list(activity=all_data$activity_id, subject=all_data$subject_id), mean)
## Create a file with the new tidy dataset
tidy_data[,3]<-NULL #remomve column activity_id and subct_id
tidy_data[,3]<-NULL #remomve column activity_id and subct_id
write.table(tidy_data,"./tidy_data.txt",row.name=FALSE) #give a table 180x563
'__________________________ END _____________________________________'
#others activities to get documentation.
write.table(new_names,"features_names.txt",row.name=FALSE,col.name=FALSE)
<file_sep>
## Getting and Cleaning Data: Course Project
This repo contains the files required by the course project.
### File List
- **run_analysis.R** The R script for processing data and generating the tidy data
- **Codebook.md** Description of variable names and analysis steps
- **README.md** The README file for this repo.
### Data Source
Data was downloaded from:
https://d396qusza40orc.cloudfront.net/getdata%2Fprojectfiles%2FUCI%20HAR%20Dataset.zip
###Process run_analysis
1. Merges the training and the test sets to create one data set.
2. Extracts only the measurements on the mean and standard deviation for each measurement.
3. Uses descriptive activity names to name the activities in the data set
4. Appropriately labels the data set with descriptive variable names.
5. From the data set in step 4, creates a second, independent tidy data set
with the average of each variable for each activity and each subject.
### Output
The script generate this file **tidy_data.txt** containing the Tidy Data of step 5 consisting of the average of each variable for each activity and each subject.
### Tidy data description
The tidy data generated by following the steps outlined in **_Codebook.md_**
|
d92aa228d6886b8ca936632a95603893f17f6366
|
[
"Markdown",
"R"
] | 3 |
Markdown
|
huertasmf/3_getting_and_cleaning
|
ba1f32fd8c48217ad206e3a27cf4dedf0214f4d9
|
7bfddecef9122b27f3780ee3874cc5ba0464e8d1
|
refs/heads/master
|
<repo_name>jipjan/HueApp<file_sep>/app/src/main/java/stijnjj/hueapp/HueGroup.java
package stijnjj.hueapp;
import java.util.ArrayList;
/**
* Created by Jaap-Jan on 7-12-2017.
*/
public class HueGroup extends ExpandableGroup {
public HueGroup(GroupWithId group, ArrayList<LightWithId> lights) {
super(group, lights);
}
@Override
public void updateLights() {
}
}
<file_sep>/app/src/main/java/stijnjj/hueapp/GroupWithId.kt
package stijnjj.hueapp
import stijnjj.hueapp.Json.GroupClasses.SingleGroup
class GroupWithId(var id: Int, var group: SingleGroup)<file_sep>/app/src/main/java/stijnjj/hueapp/LightWithId.kt
package stijnjj.hueapp
import stijnjj.hueapp.Json.LightClasses.Light
class LightWithId(val id: Int, val light: Light)<file_sep>/app/src/main/java/stijnjj/hueapp/Activities/SettingsActivity.kt
package stijnjj.hueapp.Activities
import android.content.Intent
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.view.View
import android.widget.Button
import android.widget.EditText
import stijnjj.hueapp.R
import stijnjj.hueapp.Settings
class SettingsActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_settings)
val s = Settings.createInstance(this)
val username = findViewById<EditText>(R.id.txtUsername) as EditText
username.setText(s.username)
username.setOnFocusChangeListener { _, hasFocus ->
if (!hasFocus)
s.username = username.text.toString()
}
val port = findViewById<EditText>(R.id.txtPort) as EditText
port.setText("" + s.port)
port.setOnFocusChangeListener { _, hasFocus ->
if (!hasFocus)
s.port = port.text.toString().toInt()
}
val location = findViewById<EditText>(R.id.txtAddress) as EditText
location.setText(s.location)
location.setOnFocusChangeListener { _, hasFocus ->
if (!hasFocus)
s.location = location.text.toString()
}
val connect = findViewById<Button>(R.id.btnConnect) as Button
connect.setOnClickListener {
val mActivity = Intent(this@SettingsActivity, MainActivity::class.java)
startActivity(mActivity)
}
}
}
<file_sep>/app/src/main/java/stijnjj/hueapp/Settings.java
package stijnjj.hueapp;
import android.content.Context;
import android.content.SharedPreferences;
/**
* Created by Jaap-Jan on 21-11-2017.
*/
public class Settings {
private static final String PreferencesName = "ApiSettings";
SharedPreferences _preferences;
SharedPreferences.Editor _editor;
private static Settings _instance;
private String _username, _location;
private int _port;
private Settings(Context c) {
_preferences = c.getSharedPreferences(PreferencesName, Context.MODE_PRIVATE);
_editor = _preferences.edit();
_username = _preferences.getString("username", "newdeveloper");
_location = _preferences.getString("location", "localhost");
_port = _preferences.getInt("port", 8000);
}
public static Settings createInstance(Context c) {
return _instance = new Settings(c);
}
public static Settings getInstance() {
return _instance;
}
public String getUsername() {
return _username;
}
public String getLocation() {
return _location;
}
public int getPort() {
return _port;
}
public void setUsername(String username) {
_username = username;
_editor.putString("username", username);
_editor.commit();
}
public void setLocation(String location) {
_location = location;
_editor.putString("location", location);
_editor.commit();
}
public void setPort(int port) {
_port = port;
_editor.putInt("port", port);
_editor.commit();
}
}
<file_sep>/app/src/main/java/stijnjj/hueapp/Activities/MainActivity.kt
package stijnjj.hueapp.Activities
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.widget.ExpandableListView
import stijnjj.hueapp.*
import stijnjj.hueapp.Json.GroupClasses.Action
import stijnjj.hueapp.Json.GroupClasses.AdvancedAction
import stijnjj.hueapp.Json.GroupClasses.Group
import stijnjj.hueapp.Json.GroupClasses.SingleGroup
import stijnjj.hueapp.Json.LightClasses.Light
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
/*
var api = HueApi(this)
api.getLights({})
*/
var groups: ArrayList<Group>
val lightGroups = ArrayList<ExpandableGroup>()
val api = HueApi(this)
api.getGroups {
groups = it
for (i in 0..groups.size) {
api.getGroupInfo(i) {
val group = GroupWithId(i, it)
val lights = ArrayList<LightWithId>()
for (s in group.group.lights) {
api.getInfoLight(s.toInt()) {
val light = LightWithId(s.toInt(), it)
println(it.name)
lights.add(light)
}
}
waitTillListIsFilled(groups.size, lightGroups, HueGroup(group, lights), api)
}
}
}
}
fun waitTillListIsFilled(numElements: Int, list: ArrayList<ExpandableGroup>, newElement: ExpandableGroup, api: HueApi){
list.add(newElement)
if (list.size == numElements){
// start adding all lights
var singlegrp = SingleGroup()
singlegrp.name = "All lights"
singlegrp.action = AdvancedAction()
singlegrp.action.hue = 254
singlegrp.action.bri = 254
singlegrp.action.sat = 254
singlegrp.action.on = true
val grp = GroupWithId(0, singlegrp)
val newlights = ArrayList<LightWithId>()
api.getLights {
for (i in 0..it.size-1)
newlights.add(LightWithId(i+1, it[i]))
list.add(HueGroup(grp, newlights))
// End of system to add all lights
val listView = findViewById<ExpandableListView>(R.id.expandedList)
listView.setAdapter(CustomListAdapter(this, list, resources, api))
}
}
}
}<file_sep>/app/src/main/java/stijnjj/hueapp/HueApi.kt
package stijnjj.hueapp
import android.content.Context
import android.util.Log
import com.android.volley.Request
import com.android.volley.RequestQueue
import com.android.volley.Response
import com.android.volley.toolbox.JsonObjectRequest
import com.android.volley.toolbox.Volley
import com.google.gson.GsonBuilder
import org.json.JSONObject
import stijnjj.hueapp.Json.GroupClasses.Group
import stijnjj.hueapp.Json.GroupClasses.SingleGroup
import stijnjj.hueapp.Json.LightClasses.Light
import stijnjj.hueapp.Json.LightClasses.LightSettings
import kotlin.jvm.javaClass
/**
* Created by Jaap-Jan on 21-11-2017.
*/
class HueApi(c: Context) {
internal var _queue: RequestQueue
internal var _settings = Settings.getInstance()
init {
_queue = Volley.newRequestQueue(c)
}
fun getLights(onDone: (lights: ArrayList<Light>) -> Unit) {
getListCall("lights", onDone)
}
fun getGroups(onDone: (lights: ArrayList<Group>) -> Unit) {
getListCall("groups", onDone)
}
fun getGroupInfo(id: Int, onDone: (group: SingleGroup) -> Unit) {
getSingleCall("groups/" + id, onDone)
}
fun getInfoLight(id: Int, onDone: (light: Light) -> Unit) {
getSingleCall("lights/" + id, onDone)
}
fun setLightState(id: Int, settings: LightSettings) {
putCall("lights/$id/state", settings)
}
fun setGroupState(id: Int, settings: LightSettings) {
putCall("groups/$id/action", settings)
}
private inline fun <reified TResponse> getListCall(subDir: String, crossinline onDone: (lights: ArrayList<TResponse>) -> Unit) {
var url = makeUrl(subDir)
var request = JsonObjectRequest(Request.Method.GET, url, null, Response.Listener<JSONObject> {
response ->
val lights = ArrayList<TResponse>()
val itt = response.keys()
val gson = GsonBuilder().create()
while (itt.hasNext()) {
var str = response.getJSONObject(itt.next())
lights.add(gson.fromJson(str.toString(), TResponse::class.java))
}
onDone(lights)
}, Response.ErrorListener {
response ->
Log.d("API", "Failed")
})
_queue.add(request)
}
private inline fun <reified TResponse> getSingleCall(subDir: String, crossinline onDone: (item: TResponse) -> Unit) {
var url = makeUrl(subDir)
var request = JsonObjectRequest(Request.Method.GET, url, null, Response.Listener<JSONObject> {
response ->
val gson = GsonBuilder().create()
onDone(gson.fromJson(response.toString(), TResponse::class.java))
}, Response.ErrorListener {
response ->
Log.d("API", "Failed")
})
_queue.add(request)
}
private inline fun <reified TPut> putCall(subDir: String, settings: TPut) {
var url = makeUrl(subDir)
val gson = GsonBuilder().create()
val json = gson.toJson(settings)
var request = JsonObjectRequest(Request.Method.PUT, url, JSONObject(json) , Response.Listener<JSONObject> {
response ->
Log.d("API", "Success")
}, Response.ErrorListener {
response ->
Log.d("API", "Failed")
})
_queue.add(request)
}
private fun makeUrl(subDir: String): String =
"http://" + _settings.location + ":" + _settings.port + "/api/" + _settings.username + "/" + subDir
}
<file_sep>/app/src/main/java/stijnjj/hueapp/CustomListAdapter.kt
package stijnjj.hueapp
import android.content.Context
import android.content.res.Resources
import android.graphics.Color
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.*
import com.onegravity.colorpicker.ColorPickerDialog
import com.onegravity.colorpicker.ColorPickerListener
import com.onegravity.colorpicker.SetColorPickerListenerEvent
import stijnjj.hueapp.Json.GroupClasses.Group
import java.util.*
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import kotlin.collections.ArrayList
import stijnjj.hueapp.Json.LightClasses.LightSettings
import kotlin.collections.HashMap
import android.widget.ExpandableListView
class CustomListAdapter(val context: Context, val groups: ArrayList<ExpandableGroup>, val resources: Resources, val api: HueApi) : BaseExpandableListAdapter() {
init {
//println(groups[getGroup(0)]!!.size)
}
override fun getChildrenCount(p0: Int) = groups[p0].lights.size
override fun getGroup(p0: Int): GroupWithId = groups[p0].getGroup()
override fun getChild(p0: Int, p1: Int): LightWithId = groups[p0].lights[p1]
fun getChildren(p0: Int): ArrayList<LightWithId> = groups[p0].lights
override fun getGroupId(p0: Int): Long = p0.toLong()
override fun isChildSelectable(p0: Int, p1: Int): Boolean = false
override fun hasStableIds() = false
override fun getChildView(groupPos: Int, childPos: Int, isLastChild: Boolean, convertView: View?, parent: ViewGroup?): View? {
val newView: View
val layoutInflater = context.getSystemService(Context.LAYOUT_INFLATER_SERVICE) as LayoutInflater
newView = layoutInflater.inflate(R.layout.list_detail, parent, false)
val light = getChild(groupPos, childPos)
println(((light.light.state.hue.toFloat() / 65535) * 360))
println(light.light.state.hue)
var lightColor = Color.HSVToColor(floatArrayOf(((light.light.state.hue.toFloat() / 65535) * 360), (light.light.state.sat.toFloat() / 254), (light.light.state.bri / 254).toFloat()))
val switch = newView.findViewById<Switch>(R.id.switchOnLight)
switch.isChecked = light.light.state.on
switch.setOnCheckedChangeListener({ _, isChecked ->
light.light.state.on = isChecked
api.setLightState(light.id, LightSettings(light.light.state.on, null, null, null))
})
val text = newView.findViewById<TextView>(R.id.txtLightName)
text.text = light.light.name
val colorButton = newView.findViewById<ImageView>(R.id.lightColorBtn)
colorButton.setColorFilter(lightColor)
colorButton.setOnClickListener {
val dialog = ColorPickerDialog(context, lightColor, false)
var dialogId = dialog.show()
SetColorPickerListenerEvent.setListener(dialogId,
object : ColorPickerListener {
var shouldBeUpdated = false
var lightSettings: LightSettings
val scheduledTask = Executors.newScheduledThreadPool(1)
val task = object : TimerTask(){
override fun run() {
shouldBeUpdated = true
println("update = true")
}
}
init{
scheduledTask.scheduleAtFixedRate(task, 0, 500, TimeUnit.MILLISECONDS)
lightSettings = LightSettings()
lightSettings.isOn = light.light.state.on
}
override fun onColorChanged(color: Int) {
val hsv = floatArrayOf(0f,0f,0f)
Color.colorToHSV(color, hsv)
lightSettings.hue = (hsv[0] / 360 * 65535).toInt()
lightSettings.saturation = (hsv[1] * 254).toInt()
lightSettings.brightness = (hsv[2] * 254).toInt()
lightColor = color
if (shouldBeUpdated) {
api.setLightState(light.id, lightSettings)
light.light.state.hue = (hsv[0] / 360 * 65535).toInt()
light.light.state.sat = (hsv[1] * 254).toInt()
light.light.state.bri = (hsv[2] * 254).toInt()
colorButton.setColorFilter(color)
// println("update = false")
shouldBeUpdated = false
}
}
override fun onDialogClosing() {
scheduledTask.shutdown()
shouldBeUpdated = true
dialogId = -1
// notifyDataSetChanged()
}
}
)
}
return newView
}
override fun getChildId(p0: Int, p1: Int): Long = p1.toLong()
override fun getGroupView(groupPosition: Int, isExpanded: Boolean, p2: View?, parent: ViewGroup?): View? {
var newView = p2
val group = getGroup(groupPosition)
val action = group.group.action
val title = group.group.name
if (p2 == null) {
val layoutInflater = context.getSystemService(Context.LAYOUT_INFLATER_SERVICE) as LayoutInflater
newView = layoutInflater.inflate(R.layout.list_header, parent, false)
}
if (newView != null) {
val titleTextView = newView.findViewById<TextView>(R.id.txtGroupName)
titleTextView.text = title
@Suppress("DEPRECATION")
titleTextView.setTextColor(resources.getColor(R.color.colorText))
val switch = newView.findViewById<Switch>(R.id.switchOnGroup)
switch.isChecked = action.on
switch.setOnCheckedChangeListener({ _, isChecked ->
action.on = isChecked
api.setGroupState(group.id, LightSettings(action.on, null, null, null))
for (l in getChildren(groupPosition)){
l.light.state.on = isChecked
notifyDataSetChanged()
}
})
val collapseButton = newView.findViewById<ImageView>(R.id.collapseButton)
if (isExpanded){
collapseButton.setImageResource(R.drawable.list_expanded)
}
else collapseButton.setImageResource(R.drawable.list_collapsed)
collapseButton.setOnClickListener {
if (isExpanded)
(parent as ExpandableListView).collapseGroup(groupPosition)
else
(parent as ExpandableListView).expandGroup(groupPosition, true)
}
val colorBtn = newView.findViewById<ImageView>(R.id.groupColorBtn)
var lightsColor = Color.HSVToColor(floatArrayOf(((action.hue.toFloat() / 65535) * 360), (action.sat.toFloat() / 254), (action.bri / 254).toFloat()))
colorBtn.setColorFilter(lightsColor)
colorBtn.setOnClickListener {
val dialog = ColorPickerDialog(context, lightsColor, false)
var dialogId = dialog.show()
SetColorPickerListenerEvent.setListener(dialogId,
object : ColorPickerListener {
var shouldBeUpdated = false
var lightSettings: LightSettings
val scheduledTask = Executors.newScheduledThreadPool(1)
val task = object : TimerTask(){
override fun run() {
shouldBeUpdated = true
println("update = true")
}
}
init{
scheduledTask.scheduleAtFixedRate(task, 0, 500, TimeUnit.MILLISECONDS)
lightSettings = LightSettings()
lightSettings.isOn = action.on
}
override fun onColorChanged(color: Int) {
val hsv = floatArrayOf(0f,0f,0f)
Color.colorToHSV(color, hsv)
lightSettings.hue = (hsv[0] / 360 * 65535).toInt()
lightSettings.saturation = (hsv[1] * 254).toInt()
lightSettings.brightness = (hsv[2] * 254).toInt()
lightsColor = color
if (shouldBeUpdated) {
api.setGroupState(group.id, lightSettings)
action.hue = (hsv[0] / 360 * 65535).toInt()
action.sat = (hsv[1] * 254).toInt()
action.bri = (hsv[2] * 254).toInt()
for (l in getChildren(groupPosition)){
l.light.state.hue = (hsv[0] / 360 * 65535).toInt()
l.light.state.sat = (hsv[1] * 254).toInt()
l.light.state.bri = (hsv[2] * 254).toInt()
}
notifyDataSetChanged()
colorBtn.setColorFilter(color)
// println("update = false")
shouldBeUpdated = false
}
}
override fun onDialogClosing() {
scheduledTask.shutdown()
shouldBeUpdated = true
dialogId = -1
// notifyDataSetChanged()
}
}
)
}
}
return newView
}
override fun getGroupCount() = groups.size
}
|
ed058e3a2664baa47ba1e6b46db28e73cff53490
|
[
"Java",
"Kotlin"
] | 8 |
Java
|
jipjan/HueApp
|
d32d004cee5ac7321ed6b95f28590386755bbdb2
|
2799db891b367a389b00042772a08519f153eeb2
|
refs/heads/master
|
<file_sep>import logging
import logging.handlers
# TODO: find all formats
FORMAT = 'bibtex'
URL_ROOT = 'http://dl.acm.org/'
URL_CITATION = URL_ROOT + 'citation.cfm?id={id}&preflayout=flat'
URL_BIBTEX = URL_ROOT + 'exportformats.cfm?id={id}&expformat=' + FORMAT
INVALID_CHARS = list('/?|:*\"<>\\')
BIB_FILE = ''
BIB_ENCODING = 'utf8'
CONFERENCE_LOG = '2017-05-31.log'
log = logging.getLogger('my logger')
handler = logging.handlers.RotatingFileHandler(CONFERENCE_LOG)
fmt = '%(asctime)s - %(message)s'
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
log.addHandler(handler)
handler = logging.StreamHandler()
fmt = '%(asctime)s - %(filename)s:%(lineno)s - %(name)s - %(message)s'
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.DEBUG)
SLEEP_TIME_RANGE = (0, 5)
SLEEP_ALPHA = 2
SLEEP_BETA = 5
<file_sep># !/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import re
import random
import time
import requests
from bs4 import BeautifulSoup
from selenium import webdriver
import bibtexparser
import cfg
if platform.system() == 'Windows':
PHANTOMJS_PATH = './phantomjs.exe'
else:
PHANTOMJS_PATH = './phantomjs'
browser = webdriver.PhantomJS(PHANTOMJS_PATH)
def random_sleep(func):
def inner(*args, **kwargs):
t = random.betavariate(cfg.SLEEP_ALPHA, cfg.SLEEP_BETA) \
* (cfg.SLEEP_TIME_RANGE[1] - cfg.SLEEP_TIME_RANGE[0]) \
+ cfg.SLEEP_TIME_RANGE[0]
time.sleep(t)
return func(*args, **kwargs)
return inner
@random_sleep
def get(url, execute_js=True, proxy=''):
if not execute_js:
# without this camouflage, response will return 403 Forbidden
headers = {
'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1;'
'en-US; rv:1.9.1.6) Gecko/20091201 '
'Firefox/3.5.6'
}
if proxy:
try:
r = requests.get(url, headers=headers, proxies={'http': proxy})
except requests.exceptions.ProxyError as pe:
# The local network does not support using proxies
r = requests.get(url, headers=headers)
else:
r = requests.get(url, headers=headers)
return r.text
else:
browser.get(url)
return browser.page_source
# Confererence page parser
def extract_paper_ids(html_content):
soup = BeautifulSoup(html_content, "html.parser")
links = soup.findAll('a')
links = filter(lambda link: 'citation.cfm' in str(link), links)
ids = map(lambda link: re.findall(r'(?<=id=)\d+(?=.*)', str(link)), links)
ids = map(lambda e: e[0], filter(lambda e: e, ids))
return set(ids)
# Conference page parser
def extract_conference_title(html_content):
soup = BeautifulSoup(html_content, "html.parser")
return soup.find('strong').text
def export_citation(paper_id):
html_content = get(cfg.URL_BIBTEX.format(id=paper_id), execute_js=False)
soup = BeautifulSoup(html_content, 'html.parser')
if soup.pre:
bibtex = soup.pre.get_text()
bib = bibtexparser.loads(bibtex)
bib.entries[0]['abstract'] = get_abstract(paper_id)
return bibtexparser.dumps(bib)
return ''
def get_abstract(paper_id):
abstract = ''
html_content = get(cfg.URL_CITATION.format(id=paper_id), execute_js=False)
soup = BeautifulSoup(html_content, 'html.parser')
if soup.find('div', {'class': 'flatbody'}):
if soup.find('div', {'class': 'flatbody'}).find('div'):
abstract = soup.find('div', {'class': 'flatbody'}).find('div').text
if '<p>' in abstract:
abstract = abstract.replace('<p>', '').replace('</p>', '\n')
else:
abstract
return abstract
<file_sep>acm-citation-crawler
====================
### Introduction
This program downloads all the bibtex citations from an ACM conference url.
### How to use
Follow these steps:
- Find a ACM conference proceedings page, for example [RecSys 14'](http://dl.acm.org/citation.cfm?id=2645710&preflayout=flat#abstract).
Notice the page should switch to **flat view** and contains the table of contents (links to each paper).
- Hit `Ctrl-S` to save the conference page in the directory where the program locates.
- Copy the **whole** file name of the saved web page, including the extension `.html`, and paste into `pages.txt`, one file name per line.
The program will automatically parse each web page indicated in `pages.txt` and extract bibtex citations to corresponding files.
- (Maybe optional) Find some free proxies and create `proxy.txt` file in the following format, one proxy per line:
```
<protocol>://<ip>:<port>
```
For example, the content of `proxy.txt` file can be
```
http://1.2.3.4:80
https://5.6.7.8:90
```
- Run command prompt and run `python crawler.py`. If it warns some package is not installed, maybe you can try `pip install -r requirements.txt`.
If the program fails to parse citations in the conference proceedings page, check whether there are multiple elements belongs to `text12` class, delete all others and only keep the one which contains citations.
The program will take a while to finish collecting all the bibtex citations, because ACM library limits the connection speed from the same IP.
The program may also fail crawling some citations sometimes, and it will not out put the fail citations' information.
So if it fails, just try again util it successes. :)
### BibTex format citation
#### JabRef group comments gramma
```tex
@comment{jabref-meta: groupsversion:3;}
@comment{jabref-meta: groupstree:
0 AllEntriesGroup:;
1 ExplicitGroup:1\;0\;b\;;
2 ExplicitGroup:1.1\;0\;a\;;
2 ExplicitGroup:c\;0\;;
}
```
[depth] ExplicitGroup:[group name]\;0\;[bibtex key1]\;[bibtex key2]\;;
The parent node is indicated by the nearest previous line which depth is less than the current node.
Once you have a `.bib` citation file, open it with [JabRef](http://jabref.sourceforge.net/), and browse the citations.
There's a set of [extensions](http://www.lhnr.de/ext/) which can help to download all pdfs automatically.
-- EOF --
<file_sep># !/usr/bin/python
# -*- coding: utf-8 -*-
import re
import shelve
import crawler
import cfg
def export_paper_citations(conference_id):
url = cfg.URL_CITATION.format(id=conference_id)
html_content = crawler.get(url)
ids = crawler.extract_paper_ids(html_content)
conference_name = crawler.extract_conference_title(html_content)
for paper_id in ids:
cfg.log.info(paper_id)
if paper_id not in db:
db[paper_id] = crawler.export_citation(paper_id)
with open(conference_name + '.bib', 'w', encoding='utf-8') as f:
for paper_id in ids:
f.write(db[paper_id] + '\n')
def main():
url = r'http://dl.acm.org/citation.cfm?id=2971648&picked=prox'
conference_id = re.findall(r'(?<=id=)\d+(?=.*)', str(url))
if not conference_id:
# the url is not a valid ACM DL url
return
export_paper_citations(conference_id[0])
if __name__ == '__main__':
with shelve.open('db') as db:
main()
<file_sep>requests
beautifulsoup4
bibtexparser
selenium
|
1ba66f6f1e83b480f0a2ad9c5fe857d619e5b546
|
[
"Markdown",
"Python",
"Text"
] | 5 |
Python
|
tangym/acm-citation-crawler
|
2bdef706d917274caba933e05183ab81b96c2437
|
009c00d3a5f0d1c8ba0ae34b0d9606504a427357
|
refs/heads/master
|
<repo_name>lulucici/Story-for-Typecho<file_sep>/config.php
<?php
//on 为开启
//off&其他 为关闭
$GLOBALS['isAutoNav'] = 'off'; //自动设置导航栏中 margin 及 width 值(推荐开启)
$GLOBALS['isIconNav'] = 'off'; //将导航栏中的 1,2,3 替换成 Emoji 图标
$GLOBALS['isRSS'] = 'off'; //在菜单栏中加入 RSS 按钮
$GLOBALS['style_BG'] = ''; //背景图设置。填入图片 URL 地址,留空为关闭
|
fbe2944858f41955387be1039a37ccfd2456e188
|
[
"PHP"
] | 1 |
PHP
|
lulucici/Story-for-Typecho
|
bbde18cfb61c26d9a2ea8e826bee2ec7a239cd27
|
42f3227b6710018af56da3bbe2497910ddcc2565
|
refs/heads/master
|
<repo_name>osamaahmed17/LiveUp-Frontend<file_sep>/src/views/dashboard.jsx
import React, { Component } from 'react';
import requireAuth from './requireAuth';
import 'materialize-css/dist/css/materialize.min.css'
import 'materialize-css/dist/js/materialize.min.js'
import 'bootstrap/dist/css/bootstrap.css';
import DashboardCard from '../components/dashboardcard'
import { Link } from 'react-router-dom'
import M from "materialize-css";
import { Form, Button } from 'react-bootstrap';
import axios from 'axios';
class Dashboard extends Component {
constructor(props) {
super(props);
this.state = {
username: '',
password: '',
fullname: '',
country: '',
name: this.props.name
}
this.handleChangeUsername = this.handleChangeUsername.bind(this);
this.handleChangePassword = this.handleChangePassword.bind(this);
this.handleChangeFullName = this.handleChangeFullName.bind(this);
this.handleChangeCountry = this.handleChangeCountry.bind(this);
this.onSubmit = this.onSubmit.bind(this);
this.editmethod = this.editmethod.bind(this);
}
handleChangeUsername(e) {
this.setState({ username: e.target.value });
}
handleChangePassword(e) {
this.setState({ password: e.target.value });
}
handleChangeFullName(e) {
this.setState({ fullname: e.target.value });
}
handleChangeCountry(e) {
this.setState({ country: e.target.value });
}
componentDidMount() {
var elems = document.querySelectorAll('.modal');
M.Modal.init(elems, { opacity: 0.5 });
}
editmethod(data)
{
console.log(data)
}
/* Add new user*/
onSubmit(e) {
document.getElementById("add-form").reset();
var self = this;
e.preventDefault();
axios.post('https://liveup.mybluemix.net/users/signup', {
username: this.state.username,
password: <PASSWORD>,
fullname: this.state.fullname,
country: this.state.country
})
.then(function (response) {
const current = self.state.name;
const newname = current.concat(response.data.user);
self.setState({ name: newname });
console.log(self.state.name)
}).catch(function (error) {
console.log(error);
})
}
render() {
return (
<div>
{/* Modal for adding new user */}
<div id="addmodal" className="modal">
<div className="modal-content">
<div className="Add">
<div className="row">
<div className="col-lg-4"></div>
<Form onSubmit={this.onSubmit} id="add-form">
<div className="form-header">
Add User
</div>
<Form.Control type="text" placeholder="Username" className="username" onChange={this.handleChangeUsername} />
<Form.Control type="password" placeholder="<PASSWORD>" className="password" onChange={this.handleChangePassword} />
<Form.Control type="text" placeholder="<NAME>" className="fullname" onChange={this.handleChangeFullName} />
<Form.Control type="text" placeholder="Country" className="country" onChange={this.handleChangeCountry} />
<Button variant="primary" type="submit" className="text-capitalize modal-close">Add</Button>
</Form>
<div className="col-lg-4"></div>
</div>
</div>
</div>
</div>
{/* Dashboard starts */}
<div className="container">
<h2 className="title">Welcome {localStorage.getItem('user')}</h2>
<div className="List" >
<div className="row">
{this.state.name.map((value, index) => {
console.log("i am key: "+index)
return (
<div className="col-lg-4">
<DashboardCard key={index} data={value} />
</div>
)}
)
}
</div>
</div>
<div className="row">
<div className="dash-btn">
<a className="btn-floating btn-large waves-effect waves-light blue modal-trigger app-btn" href="#addmodal"><i className="material-icons">add</i></a>
<Link to="/namelist" className="btn-floating btn-large waves-effect waves-light app-btn"><i className="material-icons">call</i></Link>
</div>
</div>
</div>
</div>
);
}
}
export default requireAuth(Dashboard);<file_sep>/src/components/header.jsx
import React, { Component } from 'react';
import 'materialize-css/dist/css/materialize.min.css'
import 'materialize-css/dist/js/materialize.min.js'
import 'bootstrap/dist/css/bootstrap.css';
import logo from '../images/logo.png';
import { Link } from 'react-router-dom';
import { connect } from 'react-redux';
class Header extends Component {
renderLinks() {
if (this.props.authenticated) {
return (
<ul id="nav-mobile" className="float-right header-links">
<li><Link to="/namelist">Call List</Link></li>
<li><Link to="/signout">Sign Out</Link></li>
</ul>
);
} else {
return (
<ul id="nav-mobile" className="float-right header-links" >
<li><Link to="/signin">Sign In</Link> </li>
<li><Link to="/signup">Sign Up</Link></li>
</ul>
);
}
}
render() {
return (
<nav className="app-header">
<div className="row">
<div className="col-lg-2">
<a href="#" className="brand-logo center">
<img src={logo} width="55" height="85">
</img>
</a>
</div>
<div className="col-lg-7"></div>
<div className="col-lg-3">
{this.renderLinks()}
</div>
</div>
</nav>
);
}
}
function mapStateToProps(state) {
return { authenticated: state.auth.authenticated };
}
export default connect(mapStateToProps)(Header);<file_sep>/src/App.js
import React, { Component } from 'react';
import axios from 'axios';
import './App.css';
import Headers from './components/header'
import HomePage from './components/HomePage'
import NameList from './views/nameList'
import { BrowserRouter, Switch, Route } from 'react-router-dom'
import LoadingComponent from './components/loading/loading'
import Signin from './views/signin/signin'
import Signup from './views/signup/signup'
import Signout from './views/signout/signout'
import Dashboard from './views/dashboard'
import Favicon from 'react-favicon';
import { Provider } from 'react-redux';
import { createStore, applyMiddleware } from 'redux';
import reduxThunk from 'redux-thunk';
import reducers from './reducers'
//const proxyurl = "https://cors-anywhere.herokuapp.com/";
const store = createStore(
reducers,
{
auth: { authenticated: localStorage.getItem('token') }
},
applyMiddleware(reduxThunk)
);
class App extends Component {
constructor(props) {
super(props);
this.state = {
data: null,
data2: 'hello'
};
}
componentDidMount() {
var self = this;
axios.get('https://liveup.mybluemix.net/users')
.then(function (response) {
self.setState({ data: response.data })
})
.catch(function (error) {
console.log(error);
});
}
render() {
if (this.state.data == undefined)
return (<LoadingComponent />);
console.log(this.state.data)
return (
<Provider store={store}>
<div className="App">
<Favicon url="" />
<BrowserRouter>
<Headers />
<Switch>
<Route path="/signin" render={(props) => <Signin name={this.state.data} />} />
<Route path="/signup" component={Signup} />
<Route path="/signout" component={Signout} />
<Route path="/" exact component={HomePage} />
<Route exact path='/namelist' render={(props) => <NameList name={this.state.data} />} />
<Route path="/dashboard" render={(...props) => <Dashboard name={this.state.data} />} /> />
</Switch>
</BrowserRouter>
</div>
</Provider>
);
}
}
export default App;
<file_sep>/src/views/nameList.jsx
import React, { Component } from 'react';
import requireAuth from './requireAuth';
import 'materialize-css/dist/css/materialize.min.css'
import 'materialize-css/dist/js/materialize.min.js'
import 'bootstrap/dist/css/bootstrap.css';
import Cards from '../components/Cards'
import socketIO from 'socket.io-client';
class NameList extends Component {
constructor(props) {
super(props)
this.state = {
caller: localStorage.getItem('user'),
called: this.props.name,
username: '',
show: 'close'
}
}
render() {
// console.log(this.state.show)
var self = this
var socket = socketIO("https://liveup.mybluemix.net");
socket.on("Data", function (data, err) {
if (data) {
self.setState({show: data.showstate,username:data.username});
console.log(data.showstate)
}
else {
console.log("No Connection")
}
})
// if (this.state.show === false) {
// socket.emit('Data', { caller: '', showstate: false }, function (data, err) {
// console.log(err);
// })
// socket.on("Data", function (data, err) {
// if (data) {
// self.setState({ newcalled: data.caller, show: data.showstate });
// console.log(data)
// }
// else {
// console.log("No Connection")
// }
// })
// }
let userMessage;
if (this.state.show === 'open') {
userMessage = (
<div className="notification"><h5>Incoming Call From <b>{this.state.username}</b></h5></div>
)
}
return (
<div className="nameList">
<div className="container">
<h2 className="title">Welcome {localStorage.getItem('user')}</h2>
{userMessage}
<div className="row">
{this.props.name.map((value, key) => {
return (
<div className="col-lg-4">
<Cards key={value.username} data={value} truecallApi={this.truecallApi} falsecallApi={this.falsecallApi} />
</div>
)
})
}
</div>
</div>
</div>
);
}
}
export default requireAuth(NameList);
|
a59443d7898eca133a6bb8285b8519c04b5800cb
|
[
"JavaScript"
] | 4 |
JavaScript
|
osamaahmed17/LiveUp-Frontend
|
039d98364b7db4c9f3915d206ba6923738269b0f
|
3dae59c00f690c1808e4e1f2cd0d3b3c8bf0795d
|
refs/heads/master
|
<repo_name>blewis11/Repository<file_sep>/src/Lec1/CardIterator.java
package Lec1;
public interface CardIterator
{
boolean hasNext();
Card next();
}<file_sep>/src/Lec2/Deck.java
package Lec2;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Stack;
import Lec2.Card.Rank;
import Lec2.Card.Suit;
/**
* An iterable implementation of cards.
*/
public class Deck implements Iterable<Card>
{
private final Stack<Card> aCards = new Stack<Card>();
public static void main(String[] args)
{
Deck deck = new Deck();
deck.shuffle();
deck.sort();
for( Card card : deck )
{
System.out.println(card);
}
}
public Deck()
{}
/*
public void sort()
{
Collections.sort(aCards, Card.createByRankComparator());
} */
public List<Card> getCards()
{
return Collections.unmodifiableList(aCards);
}
public Deck( Deck pDeck )
{
for( Card card : pDeck.aCards )
{
aCards.add(new Card(card));
}
}
public void shuffle()
{
aCards.clear();
for( Suit suit : Suit.values() )
{
for( Lec2.Card.Rank rank : Lec2.Card.Rank.values())
{
aCards.push(new Card(rank, suit));
}
}
Collections.shuffle(aCards);
}
public boolean isEmpty()
{
return aCards.isEmpty();
}
public Card draw()
{
return aCards.pop();
}
@Override
public Iterator<Card> iterator()
{
return aCards.iterator();
}
public void sort(){
//sorts deck by rank and then suite
Comparator<Card> SuiteComp = new bySuiteComparator();
Comparator<Card> RankComp = new bySuiteComparator();
Collections.sort(aCards, SuiteComp);
Collections.sort(aCards, RankComp);
}
public void sortBy(Comparator<Card> comp){
Collections.sort(aCards, comp);
}
}
/*class byRankComparator implements Comparator<Card>{
@Override
public int compare(Card o1, Card o2) {
return o1.getRank().ordinal() - o2.getRank().ordinal();
}
} */
class bySuiteComparator implements Comparator<Card>{
@Override
public int compare(Card o1, Card o2) {
return o1.getSuit().ordinal() - o2.getSuit().ordinal();
}
}
<file_sep>/src/questions/SuitStackManager.java
package questions;
public class SuitStackManager {
}
|
223919b655741a768756846c8ce3ea34f78f6007
|
[
"Java"
] | 3 |
Java
|
blewis11/Repository
|
83676c3899a5cd8b7d22dc6ac337bdc560ea4414
|
5fff2ce0c2c4b6307d7ffd3fdb9049076b98f392
|
refs/heads/master
|
<file_sep>package hr.algebra.catchaspotapp
import android.os.Bundle
import android.util.Log
import android.util.Patterns
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.framework.setBooleanProperty
import hr.algebra.catchaspotapp.framework.setCurrentUserIDProperty
import hr.algebra.catchaspotapp.framework.startActivityAndClearStack
import hr.algebra.catchaspotapp.model.User
import kotlinx.android.synthetic.main.activity_register.*
class RegisterActivity : AppCompatActivity() {
private lateinit var mAuth: FirebaseAuth
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_register)
mAuth = FirebaseAuth.getInstance()
setupListeners()
}
private fun setupListeners() {
btnRegister.setOnClickListener {
val email = etEmail.text.toString()
val password = etPassword.text.toString()
val name = etName.text.toString()
if(validateFields(email, password, name)){
val user = User(email, password, name, null)
registerUser(user)
}
}
}
private fun registerUser(user: User) {
mAuth.createUserWithEmailAndPassword(user.email, user.password)
.addOnCompleteListener(this) { task ->
if (task.isSuccessful) {
FirebaseAuth.getInstance().currentUser?.let {
Firebase.firestore.collection("users")
.document(it.uid)
.set(user)
.addOnSuccessListener {
Toast.makeText(this, "User registered successfully", Toast.LENGTH_SHORT).show()
setBooleanProperty(USER_LOGGED_IN, true)
setCurrentUserIDProperty(USER_ID, mAuth.currentUser?.uid.toString())
startActivityAndClearStack<SplashScreenActivity>()
}
.addOnFailureListener {
Toast.makeText(this, "Big OOF", Toast.LENGTH_SHORT).show()
}
}
} else {
Log.w("TAG", "createUserWithEmail:failure", task.exception)
etEmail.error = "E-mail already in use!"
etEmail.requestFocus()
}
}
}
private fun validateFields(email: String, password: String, name: String):Boolean {
if (email.isEmpty()){
etEmail.error = "Email required"
etEmail.requestFocus()
return false
}
if (!Patterns.EMAIL_ADDRESS.matcher(email).matches()){
etEmail.error = "Please insert valid email"
etEmail.requestFocus()
return false
}
if (password.isEmpty()){
etPassword.error = "Password required"
etPassword.requestFocus()
return false
}
if (password.length < 6){
etPassword.error = "Password must be at least 6 characters long"
etPassword.requestFocus()
return false
}
if (name.isEmpty()){
etName.error = "Name required"
etName.requestFocus()
return false
}
return true
}
}<file_sep>package hr.algebra.catchaspotapp.framework
import android.view.View
interface OnItemClickListenerCallback {
fun onItemClick(v: View, position: Int)
}<file_sep>
package hr.algebra.catchaspotapp
import android.Manifest
import android.content.Intent
import android.content.pm.PackageManager
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.widget.Toast
import androidx.core.app.ActivityCompat
import hr.algebra.catchaspotapp.framework.*
import kotlinx.android.synthetic.main.activity_splash_screen.*
private const val PERMISSION_ID = 1
class SplashScreenActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_splash_screen)
startAnimations()
if (ActivityCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_FINE_LOCATION
) != PackageManager.PERMISSION_GRANTED || ActivityCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_COARSE_LOCATION
) != PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
this,
arrayOf(Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION),
PERMISSION_ID)
}
else{
if (getBooleanProperty(USER_LOGGED_IN)){
redirect()
}
else{
redirectToLogin()
}
}
}
private fun startAnimations() {
val backgroundTranslation = ivBackground.animate().translationY(-2400F)
backgroundTranslation.duration = 1000
backgroundTranslation.startDelay = 4000
val lottieViewTranslation = lottieView.animate().translationY(-2400F)
lottieViewTranslation.duration = 1000
lottieViewTranslation.startDelay = 4000
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
) {
redirectToLogin()
}
private fun redirectToLogin() {
startActivityAndClearStack<LoginActivity>()
}
private fun redirect() {
clearTable()
if (isOnline()){
Intent(this, CASService::class.java).apply {
CASService.enqueueWork(this@SplashScreenActivity, this)
}
}
else{
Toast.makeText(this, "No connection", Toast.LENGTH_LONG).show()
finish()
}
}
}<file_sep>package hr.algebra.catchaspotapp
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.view.View
import hr.algebra.catchaspotapp.framework.fetchParkingSpots
import hr.algebra.catchaspotapp.model.ParkingSpot
import kotlinx.android.synthetic.main.activity_parkin_spot_pager.*
const val ITEM_POSITION = "hr.algebra.catchaspot.item_position"
class ParkingSpotPagerActivity : AppCompatActivity(){
private lateinit var items: MutableList<ParkingSpot>
private var itemPosition: Int = 0
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_parkin_spot_pager)
init()
supportActionBar?.setDisplayHomeAsUpEnabled(true)
}
private fun init() {
items = fetchParkingSpots()
var parkingSpots = mutableListOf<ParkingSpot>()
items.forEach{
if (!it.isUserRegistered){
parkingSpots.add(it)
}
}
itemPosition = intent.getIntExtra(ITEM_POSITION, 0)
viewPager.adapter = ParkingSpotPagerAdapter(parkingSpots, this)
viewPager.currentItem = itemPosition
}
override fun onSupportNavigateUp(): Boolean {
onBackPressed()
return super.onSupportNavigateUp()
}
}<file_sep>package hr.algebra.catchaspotapp.model
import java.io.Serializable
data class ParkingSpot(
var _id: Long?,
val address: String,
val price: Double,
val latCoordinate: Double,
val longCoordinate: Double,
@Transient val isUserRegistered: Boolean
) : Serializable
<file_sep>package hr.algebra.catchaspotapp
import android.content.Context
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.TextView
import androidx.recyclerview.widget.RecyclerView
import hr.algebra.catchaspotapp.model.Payment
class PaymentPagerAdapter(private val payments: MutableList<Payment>, context: Context)
: RecyclerView.Adapter<PaymentPagerAdapter.ViewHolder>(){
val context = context
class ViewHolder(itemView: View): RecyclerView.ViewHolder(itemView) {
private val tvAddress: TextView = itemView.findViewById(R.id.tvAddress)
private val tvPrice: TextView = itemView.findViewById(R.id.tvPrice)
private val tvDate: TextView = itemView.findViewById(R.id.tvDate)
fun bind(payment: Payment){
tvAddress.text = payment.address
tvPrice.text = "${payment.pricePayed} kn"
tvDate.text = payment.date
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val itemView = LayoutInflater.from(parent.context)
.inflate(R.layout.payment_pager, parent, false)
return ViewHolder(itemView)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
val payment = payments[position]
holder.bind(payment)
}
override fun getItemCount() = payments.size
}<file_sep>package hr.algebra.catchaspotapp
import android.content.*
import android.database.Cursor
import android.net.Uri
import hr.algebra.catchaspotapp.dao.CASRepository
import hr.algebra.catchaspotapp.factory.getCASRepository
import hr.algebra.catchaspotapp.framework.clearTable
import hr.algebra.catchaspotapp.model.ParkingSpot
import java.lang.IllegalArgumentException
private const val AUTHORITY = "hr.algebra.catchaspotapp.api.provider"
private const val PATH = "parking_spots"
val CAS_PROVIDER_CONTENT_URI: Uri = Uri.parse("content://$AUTHORITY/$PATH")
private const val PARKING_SPOTS = 3
private const val PARKING_SPOT_ID = 20
private val URI_MATCHER = with(UriMatcher(UriMatcher.NO_MATCH)) {
addURI(AUTHORITY, PATH, PARKING_SPOTS)
addURI(AUTHORITY, "$PATH/#", PARKING_SPOT_ID)
this
}
private const val CONTENT_DIR_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + AUTHORITY + "/" + PATH
private const val CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + AUTHORITY + "/" + PATH
class CASProvider : ContentProvider(){
private lateinit var repository: CASRepository
override fun delete(uri: Uri, selection: String?, selectionArgs: Array<String>?): Int {
when(URI_MATCHER.match(uri)){
PARKING_SPOTS -> return repository.delete(selection, selectionArgs)
PARKING_SPOT_ID -> {
val id = uri.lastPathSegment
if (id != null){
return repository.delete("${ParkingSpot::_id.name} = ?", arrayOf(id))
}
}
}
throw IllegalArgumentException("Wrong URI")
}
override fun getType(uri: Uri): String? {
when(URI_MATCHER.match(uri)){
PARKING_SPOTS -> return CONTENT_DIR_TYPE
PARKING_SPOT_ID -> return CONTENT_ITEM_TYPE
}
throw IllegalArgumentException("Wrong URI")
}
override fun insert(uri: Uri, values: ContentValues?): Uri? {
val id = repository.insert(values)
return ContentUris.withAppendedId(CAS_PROVIDER_CONTENT_URI, id)
}
override fun onCreate(): Boolean {
repository = getCASRepository(context)
return true
}
override fun query(uri: Uri, projection: Array<String>?, selection: String?,
selectionArgs: Array<String>?, sortOrder: String?): Cursor?
= repository.query(projection, selection, selectionArgs, sortOrder)
override fun update(uri: Uri, values: ContentValues?, selection: String?,
selectionArgs: Array<String>?): Int {
when(URI_MATCHER.match(uri)){
PARKING_SPOTS -> return repository.update(values, selection, selectionArgs)
PARKING_SPOT_ID -> {
val id = uri.lastPathSegment
if (id != null){
return repository.update(values,
"${ParkingSpot::_id.name}=?", arrayOf(id))
}
}
}
throw IllegalArgumentException("Wrong URI")
}
}
<file_sep>package hr.algebra.catchaspotapp
import android.content.Context
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.TextView
import androidx.recyclerview.widget.RecyclerView
import hr.algebra.catchaspotapp.framework.OnItemClickListenerCallback
import hr.algebra.catchaspotapp.framework.startActivity
import hr.algebra.catchaspotapp.model.Payment
class PaymentAdapter (private val payments: MutableList<Payment>, private val context: Context, onClickListener: OnItemClickListenerCallback)
: RecyclerView.Adapter<PaymentAdapter.ViewHolder>() {
private val onClickListener = onClickListener
class ViewHolder(paymentView : View) : RecyclerView.ViewHolder(paymentView) {
private val tvDate: TextView = paymentView.findViewById(R.id.tvDate)
private val tvAmount: TextView = paymentView.findViewById(R.id.tvAmount)
fun bind(payment: Payment){
tvDate.text = payment.date
tvAmount.text = "${payment.pricePayed} kn"
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val paymentView = LayoutInflater.from(context)
.inflate(R.layout.payment, parent, false)
return ViewHolder(paymentView)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
holder.itemView.setOnClickListener{
onClickListener.onItemClick(it, position)
}
holder.bind(payments[position])
}
override fun getItemCount() = payments.size
}<file_sep>package hr.algebra.catchaspotapp
import android.app.Notification
import android.app.NotificationChannel
import android.app.NotificationManager
import android.app.PendingIntent
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import android.media.MediaPlayer
import android.os.Build
import android.os.Handler
import android.os.Looper
import android.provider.Settings
private const val DURATION = 1000L
private const val NOTIFICATION_ID = 1
private const val CHANNEL_ID = "channel_id"
private val CHANNEL_TITLE : CharSequence = "channel_title"
@Suppress("DEPRECATION")
class AlarmReceiver : BroadcastReceiver() {
override fun onReceive(context: Context, intent: Intent) {
playRingtone(context, DURATION)
sendNotification(context)
}
private fun playRingtone(context: Context, duration: Long) {
val mediaPlayer = MediaPlayer.create(context, Settings.System.DEFAULT_RINGTONE_URI)
mediaPlayer.start()
Handler(Looper.getMainLooper()).postDelayed({mediaPlayer.stop()}, duration)
}
private fun sendNotification(context: Context) {
val intent = Intent(context, ParkedActivity::class.java)
val pendingIntent = PendingIntent.getActivity(
context,
NOTIFICATION_ID,
intent,
PendingIntent.FLAG_UPDATE_CURRENT
)
val notificationManager =
context.getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager
val builder: Notification.Builder
builder = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O){
val channel = NotificationChannel(
CHANNEL_ID,
CHANNEL_TITLE,
NotificationManager.IMPORTANCE_DEFAULT
)
notificationManager.createNotificationChannel(channel)
Notification.Builder(context, channel.id)
}else{
Notification.Builder(context)
}
builder
.setContentTitle("Parking spot alarm")
.setContentText("You've been parked for one hour!")
.setSmallIcon(R.drawable.placeholder).setAutoCancel(true)
.setContentIntent(pendingIntent)
notificationManager.notify(NOTIFICATION_ID, builder.build())
}
}<file_sep>package hr.algebra.catchaspotapp.api
import hr.algebra.catchaspotapp.model.ParkingSpot
import retrofit2.Call
import retrofit2.http.Body
import retrofit2.http.GET
import retrofit2.http.Headers
import retrofit2.http.POST
const val API_URL = "http://192.168.43.85:3000/"
interface CASApi{
@GET("parking_spots")
fun fetchParkingSpots(): Call<List<CASParkingSpot>>
@POST("parking_spots")
fun createParkingSpot(@Body parkingSpot: ParkingSpot) : Call<ParkingSpot>
}<file_sep>package hr.algebra.catchaspotapp
import android.content.Intent
import android.os.Bundle
import androidx.fragment.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.recyclerview.widget.LinearLayoutManager
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.framework.OnItemClickListenerCallback
import hr.algebra.catchaspotapp.framework.getCurrentUserIDProperty
import hr.algebra.catchaspotapp.framework.startActivity
import hr.algebra.catchaspotapp.model.Payment
import kotlinx.android.synthetic.main.fragment_payments.*
import java.time.LocalDate
import java.util.*
import kotlin.collections.ArrayList
class PaymentsFragment : Fragment(), OnItemClickListenerCallback {
private var db = Firebase.firestore
private lateinit var payments: MutableList<Payment>
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View?
{
getPayments()
return inflater.inflate(R.layout.fragment_payments, container, false)
}
private fun getPayments() {
val refPayments = db.collection("payments")
refPayments.whereIn("payerID", listOf(requireContext().getCurrentUserIDProperty(USER_ID)))
.get()
.addOnSuccessListener { qs ->
payments = mutableListOf<Payment>()
qs.forEach {
val payment = Payment(
it.data["payerID"].toString(),
it.data["address"].toString(),
it.data["pricePayed"].toString().toDouble(),
it.data["date"].toString()
)
payments.add(payment)
}
val paymentAdapter = PaymentAdapter(payments, requireContext(), this)
rvPayments.apply {
layoutManager = LinearLayoutManager(activity)
adapter = paymentAdapter
}
}
}
override fun onItemClick(v: View, position: Int) {
val intent = Intent(requireContext(), PaymentPagerActivity::class.java)
intent.putExtra("payments", payments as ArrayList<Payment>)
startActivity(intent)
}
}<file_sep>package hr.algebra.catchaspotapp.dao
import android.content.ContentValues
import android.content.Context
import android.database.Cursor
import android.database.sqlite.SQLiteDatabase
import android.database.sqlite.SQLiteOpenHelper
import hr.algebra.catchaspotapp.model.ParkingSpot
private const val DB_NAME = "parking_spots.db"
private const val DB_VERSION = 1
private const val TABLE_NAME = "parking_spots"
private val CREATE_TABLE = "create table $TABLE_NAME( " +
"${ParkingSpot::_id.name} integer primary key autoincrement, " +
"${ParkingSpot::address.name} text not null, " +
"${ParkingSpot::price.name} text not null, " +
"${ParkingSpot::latCoordinate.name} text not null, " +
"${ParkingSpot::longCoordinate.name} text not null, " +
"${ParkingSpot::isUserRegistered.name} integer not null " +
")"
private const val DROP_TABLE = "drop table $TABLE_NAME"
class CASSqlHelper(context: Context?) : SQLiteOpenHelper(context, DB_NAME, null, DB_VERSION),
CASRepository{
override fun onCreate(db: SQLiteDatabase) {
db.execSQL(CREATE_TABLE)
}
override fun onUpgrade(db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
db.execSQL(DROP_TABLE)
onCreate(db)
}
override fun delete(selection: String?, selectionArgs: Array<String>?)
= writableDatabase.delete(TABLE_NAME, selection, selectionArgs)
override fun insert(values: ContentValues?)
= writableDatabase.insert(TABLE_NAME, null, values)
override fun query(projection: Array<String>?, selection: String?,
selectionArgs: Array<String>?, sortOrder: String?): Cursor?
= readableDatabase.query(TABLE_NAME,
projection, selection, selectionArgs, null, null, sortOrder)
override fun update(values: ContentValues?, selection: String?,
selectionArgs: Array<String>?)
= writableDatabase.update(TABLE_NAME, values, selection, selectionArgs)
fun clearTable(db: SQLiteDatabase){
db.execSQL(DROP_TABLE)
onCreate(db)
}
}<file_sep>package hr.algebra.catchaspotapp
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import hr.algebra.catchaspotapp.framework.fetchParkingSpots
import hr.algebra.catchaspotapp.model.ParkingSpot
import hr.algebra.catchaspotapp.model.Payment
import kotlinx.android.synthetic.main.activity_parkin_spot_pager.*
class PaymentPagerActivity() : AppCompatActivity() {
private lateinit var payments: MutableList<Payment>
private var paymentPosition: Int = 0
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_payment_pager)
init()
supportActionBar?.setDisplayHomeAsUpEnabled(true)
}
private fun init() {
val extras = intent.getSerializableExtra("payments") as ArrayList<Payment>
payments = extras
paymentPosition = intent.getIntExtra(ITEM_POSITION, 0)
viewPager.adapter = PaymentPagerAdapter(payments, this)
viewPager.currentItem = paymentPosition
}
override fun onSupportNavigateUp(): Boolean {
onBackPressed()
return super.onSupportNavigateUp()
}
}<file_sep>package hr.algebra.catchaspotapp
import android.os.Bundle
import androidx.fragment.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import com.google.android.gms.location.FusedLocationProviderClient
import com.google.android.gms.location.LocationServices
import com.google.android.gms.maps.GoogleMap
import com.google.android.gms.maps.OnMapReadyCallback
import com.google.android.gms.maps.SupportMapFragment
import com.google.android.gms.maps.model.BitmapDescriptorFactory
import com.google.android.gms.maps.model.LatLng
import com.google.android.gms.maps.model.Marker
import com.google.android.gms.maps.model.MarkerOptions
import com.google.firebase.firestore.FieldValue
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.api.CASSender
import hr.algebra.catchaspotapp.framework.*
import hr.algebra.catchaspotapp.model.ParkingSpot
import hr.algebra.catchaspotapp.model.User
import kotlinx.android.synthetic.main.fragment_register_spot.*
class RegisterSpotFragment : Fragment(), OnMapReadyCallback, GoogleMap.OnMarkerDragListener {
private lateinit var fusedLocationClient: FusedLocationProviderClient
private lateinit var gMap: GoogleMap
private lateinit var movableMarker: MarkerOptions
private var db = Firebase.firestore
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
return inflater.inflate(R.layout.fragment_register_spot, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
fusedLocationClient = LocationServices.getFusedLocationProviderClient(requireContext())
val mapFragment = childFragmentManager.findFragmentById(R.id.map) as SupportMapFragment?
mapFragment?.getMapAsync(this)
setupListeners()
}
private fun setupListeners() {
btnRegisterSpot.setOnClickListener{
val position = movableMarker.position
val parkingSpot = ParkingSpot(
null,
etAddress.text.toString(),
etCost.text.toString().toDouble(),
position.latitude,
position.longitude,
true
)
val casSender = CASSender(requireContext())
casSender.sendNetworkRequest(parkingSpot)
etAddress.text.clear()
etParkingSpotNum.text.clear()
etCost.text.clear()
requireContext().hideKeyboard(it)
}
}
override fun onMapReady(map: GoogleMap) {
gMap = map
fusedLocationClient.prepareMapAndSetCurrentLocation(gMap, requireContext(), false)
var pos = LatLng(requireContext().getCurrentCoordinate(CURRENT_LATITUDE),
requireContext().getCurrentCoordinate(CURRENT_LONGITUDE))
movableMarker = MarkerOptions().position(pos).icon(
BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_AZURE)).draggable(true)
gMap.addMarker(movableMarker)
gMap.setOnMarkerDragListener(this)
}
override fun onMarkerDragStart(marker: Marker?) { }
override fun onMarkerDrag(marker: Marker?) { }
override fun onMarkerDragEnd(marker: Marker?) {
if (marker != null) {
movableMarker.position(marker.position)
}
}
}<file_sep>package hr.algebra.catchaspotapp.framework
import android.annotation.SuppressLint
import android.app.Activity
import android.content.BroadcastReceiver
import android.content.ContentValues
import android.content.Context
import android.content.Intent
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.location.Location
import android.net.ConnectivityManager
import android.net.NetworkCapabilities
import android.view.View
import android.view.animation.AnimationUtils
import android.view.inputmethod.InputMethodManager
import androidx.preference.PreferenceManager
import com.google.android.gms.location.FusedLocationProviderClient
import com.google.android.gms.maps.CameraUpdateFactory
import com.google.android.gms.maps.GoogleMap
import com.google.android.gms.maps.model.LatLng
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.CAS_PROVIDER_CONTENT_URI
import hr.algebra.catchaspotapp.CURRENT_LATITUDE
import hr.algebra.catchaspotapp.CURRENT_LONGITUDE
import hr.algebra.catchaspotapp.USER_ID
import hr.algebra.catchaspotapp.dao.CASSqlHelper
import hr.algebra.catchaspotapp.model.ParkingSpot
fun Context.getCurrentCoordinate(key: String)
= java.lang.Double.longBitsToDouble(
PreferenceManager.getDefaultSharedPreferences(this).getLong(
key,
0
)
)
fun Context.setCurrentCoordinate(key: String, value: Double)
= PreferenceManager.getDefaultSharedPreferences(this)
.edit()
.putLong(key, java.lang.Double.doubleToRawLongBits(value))
.apply()
fun Context.getCurrentUserIDProperty(key: String)
= PreferenceManager.getDefaultSharedPreferences(this).getString(key, "")
fun Context.setCurrentUserIDProperty(key: String, value: String)
= PreferenceManager.getDefaultSharedPreferences(this)
.edit()
.putString(key, value)
.apply()
fun Context.getBooleanProperty(key: String)
= PreferenceManager.getDefaultSharedPreferences(this).getBoolean(key, false)
fun Context.setBooleanProperty(key: String, value: Boolean)
= PreferenceManager.getDefaultSharedPreferences(this)
.edit()
.putBoolean(key, value)
.apply()
inline fun <reified T : Activity> Context.startActivity() = startActivity(Intent(
this,
T::class.java
).apply {
addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(this)
})
inline fun <reified T : Activity> Context.startActivityAndClearStack() = startActivity(Intent(
this,
T::class.java
).apply {
addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK)
startActivity(this)
})
inline fun <reified T : Activity> Context.startActivity(key: String, value: Int) = startActivity(
Intent(
this,
T::class.java
).apply {
putExtra(key, value)
})
fun Context.isOnline() : Boolean{
val connectivityManager = getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
val activeNetwork = connectivityManager.activeNetwork
if (activeNetwork != null){
val networkCapabilities = connectivityManager.getNetworkCapabilities(activeNetwork)
if (networkCapabilities != null){
return networkCapabilities.hasTransport(NetworkCapabilities.TRANSPORT_CELLULAR)
|| networkCapabilities.hasTransport(NetworkCapabilities.TRANSPORT_WIFI)
}
}
return false
}
inline fun <reified T : BroadcastReceiver> Context.sendBroadcast() = sendBroadcast(
Intent(
this,
T::class.java
)
)
fun Context.fetchParkingSpots() : MutableList<ParkingSpot>{
val parkingSpots = mutableListOf<ParkingSpot>()
val cursor = contentResolver?.query(
CAS_PROVIDER_CONTENT_URI,
null, null, null, null
)
if (cursor != null){
while (cursor.moveToNext()){
parkingSpots.add(
ParkingSpot(
cursor.getLong(cursor.getColumnIndex(ParkingSpot::_id.name)),
cursor.getString(cursor.getColumnIndex(ParkingSpot::address.name)),
cursor.getDouble(cursor.getColumnIndex(ParkingSpot::price.name)),
cursor.getDouble(cursor.getColumnIndex(ParkingSpot::latCoordinate.name)),
cursor.getDouble(cursor.getColumnIndex(ParkingSpot::longCoordinate.name)),
cursor.getInt(cursor.getColumnIndex(ParkingSpot::isUserRegistered.name)) > 0
)
)
}
}
return parkingSpots
}
@SuppressWarnings@SuppressLint("MissingPermission")
fun FusedLocationProviderClient.prepareMapAndSetCurrentLocation(
gMap: GoogleMap,
context: Context,
showLocation: Boolean
){
gMap.isMyLocationEnabled = showLocation
gMap.uiSettings.isMyLocationButtonEnabled = showLocation
this.lastLocation
.addOnSuccessListener { location: Location? ->
if (location != null){
val sydney = LatLng(location.latitude, location.longitude)
gMap.moveCamera(CameraUpdateFactory.newLatLngZoom(sydney, 16F))
context.setCurrentCoordinate(
CURRENT_LATITUDE,
location.latitude
)
context.setCurrentCoordinate(
CURRENT_LONGITUDE,
location.longitude
)
}
}
}
fun Context.getCustomMarkerIcon(icon: Int): Bitmap {
val height = 70
val width = 50
val b = BitmapFactory.decodeResource(resources, icon)
return Bitmap.createScaledBitmap(b, width, height, false)
}
fun Context.clearTable(){
val helper = CASSqlHelper(this)
helper.clearTable(helper.writableDatabase)
}
fun Context.hideKeyboard(view: View) {
val inputMethodManager = getSystemService(Activity.INPUT_METHOD_SERVICE) as InputMethodManager
inputMethodManager.hideSoftInputFromWindow(view.windowToken, 0)
}
<file_sep>package hr.algebra.catchaspotapp.api
import com.google.gson.annotations.SerializedName
data class CASParkingSpot(
@SerializedName("id") val id : Long,
@SerializedName("address") val address : String,
@SerializedName("price") val price : Double,
@SerializedName("latCoordinate") val latCoordinate : Double,
@SerializedName("longCoordinate") val longCoordinate : Double
)
<file_sep>package hr.algebra.catchaspotapp
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import androidmads.library.qrgenearator.QRGContents
import androidmads.library.qrgenearator.QRGEncoder
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.framework.getCurrentUserIDProperty
import hr.algebra.catchaspotapp.framework.startActivityAndClearStack
import hr.algebra.catchaspotapp.model.Payment
import kotlinx.android.synthetic.main.activity_payment.*
import java.text.SimpleDateFormat
import java.util.*
class PaymentActivity : AppCompatActivity() {
private lateinit var time: String
private var basePrice: Double = 0.0
private var finalPrice: Double = 0.0
private var address: String = ""
private var db = Firebase.firestore
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_payment)
var extras = intent.extras
if (extras != null){
time = extras.getString("time").toString()
basePrice = extras.getDouble("price")
address = extras.getString("address")!!
}
val hour = time.split(":")[0].toInt()
finalPrice = if (hour < 1){
basePrice
}
else{
basePrice * (hour + 1)
}
tvPrice.text = "$finalPrice kn"
generateQR()
setupListeners()
}
private fun setupListeners() {
btnDone.setOnClickListener {
val date = Calendar.getInstance().time
val format = SimpleDateFormat.getDateTimeInstance()
val payment = Payment(getCurrentUserIDProperty(USER_ID)!!, address, finalPrice, format.format(date))
db.collection("payments").add(
payment
)
startActivityAndClearStack<HostActivity>()
}
}
private fun generateQR() {
var qrEncoder = QRGEncoder("time:$time, base price:$basePrice, final price:$finalPrice kn", null, QRGContents.Type.TEXT, 400)
val bitmap = qrEncoder.encodeAsBitmap()
ivQRCode.setImageBitmap(bitmap)
}
}<file_sep>package hr.algebra.catchaspotapp.api
import android.content.ContentValues
import android.content.Context
import android.util.Log
import com.google.firebase.firestore.DocumentSnapshot
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.CASReceiver
import hr.algebra.catchaspotapp.CAS_PROVIDER_CONTENT_URI
import hr.algebra.catchaspotapp.USER_ID
import hr.algebra.catchaspotapp.framework.fetchParkingSpots
import hr.algebra.catchaspotapp.framework.getCurrentUserIDProperty
import hr.algebra.catchaspotapp.framework.prepareMapAndSetCurrentLocation
import hr.algebra.catchaspotapp.framework.sendBroadcast
import hr.algebra.catchaspotapp.model.ParkingSpot
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
class CASFetcher(private val context: Context) {
private val db = Firebase.firestore
var casApi : CASApi
init{
val retrofit = Retrofit.Builder()
.baseUrl(API_URL)
.addConverterFactory(GsonConverterFactory.create())
.build()
casApi = retrofit.create(CASApi::class.java)
}
fun populateSpotsOnRefresh(casParkingSpot: List<CASParkingSpot>, documentSnapshot: DocumentSnapshot) {
val work = GlobalScope.launch {
var registeredSpots: ArrayList<Long>? = null
documentSnapshot.data?.values?.forEach {
if (it is ArrayList<*>) {
registeredSpots = it as ArrayList<Long>
}
}
casParkingSpot.forEach {
val values = ContentValues().apply {
put(ParkingSpot::address.name, it.address)
put(ParkingSpot::price.name, it.price)
put(ParkingSpot::latCoordinate.name, it.latCoordinate)
put(ParkingSpot::longCoordinate.name, it.longCoordinate)
Log.d("Tag", it.toString())
if (registeredSpots != null) {
if (registeredSpots!!.contains(it.id)) {
put(ParkingSpot::isUserRegistered.name, true)
} else {
put(ParkingSpot::isUserRegistered.name, false)
}
} else {
put(ParkingSpot::isUserRegistered.name, false)
}
}
context.contentResolver.insert(CAS_PROVIDER_CONTENT_URI, values)
}
}
runBlocking {
work.join()
}
}
fun fetchParkingSpotsOnStart(){
val request = casApi.fetchParkingSpots()
request.enqueue(object: Callback<List<CASParkingSpot>>{
override fun onResponse(
call: Call<List<CASParkingSpot>>,
response: Response<List<CASParkingSpot>>
) {
if (response.body() != null){
context.getCurrentUserIDProperty(USER_ID)?.let { id ->
db.collection("users")
.document(id)
.get()
.addOnSuccessListener {
populateSpotsOnStart(response.body()!!, it)
}
}
}
}
override fun onFailure(call: Call<List<CASParkingSpot>>, t: Throwable) {
Log.d(javaClass.name, t.message, t)
}
})
}
private fun populateSpotsOnStart(casParkingSpot: List<CASParkingSpot>, documentSnapshot: DocumentSnapshot) {
GlobalScope.launch {
var registeredSpots: ArrayList<Long>? = null
documentSnapshot.data?.values?.forEach{
if (it is ArrayList<*>){
registeredSpots = it as ArrayList<Long>
}
}
casParkingSpot.forEach{
val values = ContentValues().apply {
put(ParkingSpot::address.name, it.address)
put(ParkingSpot::price.name, it.price)
put(ParkingSpot::latCoordinate.name, it.latCoordinate)
put(ParkingSpot::longCoordinate.name, it.longCoordinate)
Log.d("Tag", it.toString())
if (registeredSpots != null){
if (registeredSpots!!.contains(it.id)){
put(ParkingSpot::isUserRegistered.name, true)
}
else{
put(ParkingSpot::isUserRegistered.name, false)
}
}else{
put(ParkingSpot::isUserRegistered.name, false)
}
}
context.contentResolver.insert(CAS_PROVIDER_CONTENT_URI, values)
}
context.sendBroadcast<CASReceiver>()
}
}
}<file_sep>package hr.algebra.catchaspotapp
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import hr.algebra.catchaspotapp.framework.setBooleanProperty
import hr.algebra.catchaspotapp.framework.startActivity
class CASReceiver : BroadcastReceiver() {
override fun onReceive(context: Context, intent: Intent) {
context.startActivity<HostActivity>()
}
}<file_sep>package hr.algebra.catchaspotapp.factory
import android.content.Context
import hr.algebra.catchaspotapp.dao.CASSqlHelper
fun getCASRepository(context: Context?) = CASSqlHelper(context)<file_sep>package hr.algebra.catchaspotapp.api
import android.content.Context
import android.util.Log
import android.widget.Toast
import com.google.firebase.firestore.FieldValue
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.USER_ID
import hr.algebra.catchaspotapp.framework.getCurrentUserIDProperty
import hr.algebra.catchaspotapp.model.ParkingSpot
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
class CASSender(private val context: Context) {
private val db = Firebase.firestore
private var casApi : CASApi
init{
val retrofit = Retrofit.Builder()
.baseUrl(API_URL)
.addConverterFactory(GsonConverterFactory.create())
.build()
casApi = retrofit.create(CASApi::class.java)
}
fun sendNetworkRequest(parkingSpot: ParkingSpot){
val call = casApi.createParkingSpot(parkingSpot)
call.enqueue(object: Callback<ParkingSpot>{
override fun onResponse(call: Call<ParkingSpot>, response: Response<ParkingSpot>) {
Toast.makeText(context, "Register successful", Toast.LENGTH_LONG).show()
casApi.fetchParkingSpots().enqueue(object: Callback<List<CASParkingSpot>>{
override fun onResponse(
call: Call<List<CASParkingSpot>>,
response: Response<List<CASParkingSpot>>
) {
if (response.body() != null){
var idArray = arrayListOf<Long>()
response.body()!!.forEach {
idArray.add(it.id)
}
db.collection("users").document(context.getCurrentUserIDProperty(USER_ID)!!)
.update("registered_parking_spots", FieldValue.arrayUnion(idArray.last()))
}
}
override fun onFailure(call: Call<List<CASParkingSpot>>, t: Throwable) {
Log.d(javaClass.name, t.message, t)
}
})
}
override fun onFailure(call: Call<ParkingSpot>, t: Throwable) {
Toast.makeText(context, "Something went wrong", Toast.LENGTH_LONG).show()
}
})
}
}<file_sep>package hr.algebra.catchaspotapp
import android.content.Context
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.ImageView
import android.widget.TextView
import androidx.recyclerview.widget.RecyclerView
import hr.algebra.catchaspotapp.framework.OnItemClickListenerCallback
import hr.algebra.catchaspotapp.framework.startActivity
import hr.algebra.catchaspotapp.model.ParkingSpot
class ParkingSpotAdapter (private val parkingSpots: MutableList<ParkingSpot>, private val context: Context, onClickListener: OnItemClickListenerCallback)
: RecyclerView.Adapter<ParkingSpotAdapter.ViewHolder>() {
private val onClickListener = onClickListener
class ViewHolder(parkingSpotView : View) : RecyclerView.ViewHolder(parkingSpotView) {
private val ivParkingSpot: ImageView = parkingSpotView.findViewById(R.id.ivParkingSpot)
private val tvParkingSpot: TextView = parkingSpotView.findViewById(R.id.tvParkingSpot)
fun bind(parkingSpot: ParkingSpot){
ivParkingSpot.setImageResource(R.drawable.logo)
tvParkingSpot.text = parkingSpot.address
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val parkingSpotView = LayoutInflater.from(context)
.inflate(R.layout.parking_spot, parent, false)
return ViewHolder(parkingSpotView)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
holder.itemView.setOnClickListener{
onClickListener.onItemClick(it, position)
}
holder.itemView.setOnLongClickListener{
context.startActivity<ParkingSpotPagerActivity>(ITEM_POSITION, position)
true
}
holder.bind(parkingSpots[position])
}
override fun getItemCount() = parkingSpots.size
}<file_sep>package hr.algebra.catchaspotapp
import android.os.Bundle
import android.util.Log
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.swiperefreshlayout.widget.SwipeRefreshLayout
import com.google.android.gms.location.FusedLocationProviderClient
import com.google.android.gms.location.LocationServices
import com.google.android.gms.maps.*
import com.google.android.gms.maps.model.BitmapDescriptorFactory
import com.google.android.gms.maps.model.LatLng
import com.google.android.gms.maps.model.MarkerOptions
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import com.google.maps.android.ui.IconGenerator
import hr.algebra.catchaspotapp.api.CASFetcher
import hr.algebra.catchaspotapp.api.CASParkingSpot
import hr.algebra.catchaspotapp.framework.*
import hr.algebra.catchaspotapp.model.ParkingSpot
import kotlinx.android.synthetic.main.fragment_rent_spot.*
import kotlinx.coroutines.*
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
const val CURRENT_LATITUDE = "hr.algebra.catchaspotapp_latitude"
const val CURRENT_LONGITUDE = "hr.algebra.catchaspotapp_longitude"
class RentSpotFragment : Fragment(), OnMapReadyCallback, OnItemClickListenerCallback {
private lateinit var swipeRefreshLayout: SwipeRefreshLayout
private lateinit var fusedLocationClient: FusedLocationProviderClient
private lateinit var gMap: GoogleMap
private var parkingSpots = mutableListOf<ParkingSpot>()
private lateinit var eligibleParkingSpots: MutableList<ParkingSpot>
private var db = Firebase.firestore
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
val allParkingSpots = requireContext().fetchParkingSpots()
allParkingSpots.forEach{
if (!it.isUserRegistered){
parkingSpots.add(it)
}
}
return inflater.inflate(R.layout.fragment_rent_spot, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
swipeRefreshLayout = view.findViewById(R.id.swipeContainer)
swipeRefreshLayout.setOnRefreshListener {
refresh()
}
fusedLocationClient = LocationServices.getFusedLocationProviderClient(requireContext())
val mapFragment = childFragmentManager.findFragmentById(R.id.map) as SupportMapFragment?
mapFragment?.getMapAsync(this)
eligibleParkingSpots = mutableListOf()
parkingSpots.forEach{
if (!it.isUserRegistered)
{
eligibleParkingSpots.add(it)
}
}
val parkingSpotAdapter = ParkingSpotAdapter(eligibleParkingSpots, requireContext(), this)
rvParkingSpots.apply {
layoutManager = LinearLayoutManager(activity)
adapter = parkingSpotAdapter
}
}
private fun refresh() {
requireContext().clearTable()
val fetcher = CASFetcher(requireContext())
val request = fetcher.casApi.fetchParkingSpots()
requireContext().getCurrentUserIDProperty(USER_ID)?.let { id ->
db.collection("users")
.document(id)
.get()
.addOnSuccessListener {
request.enqueue(object: Callback<List<CASParkingSpot>> {
override fun onResponse(
call: Call<List<CASParkingSpot>>,
response: Response<List<CASParkingSpot>>
) {
if (response.body() != null){
val work = GlobalScope.launch {
fetcher.populateSpotsOnRefresh(response.body()!!, it)
}
runBlocking {
work.join()
parkingSpots = requireContext().fetchParkingSpots()
gMap.clear()
fusedLocationClient.prepareMapAndSetCurrentLocation(gMap, requireContext(), true)
addMarkers()
swipeRefreshLayout.isRefreshing = false
}
}
}
override fun onFailure(call: Call<List<CASParkingSpot>>, t: Throwable) {
Log.d(javaClass.name, t.message, t)
}
})
}
}
}
override fun onMapReady(map: GoogleMap) {
gMap = map
fusedLocationClient.prepareMapAndSetCurrentLocation(gMap, requireContext(), true)
addMarkers()
}
private fun addMarkers(){
var iconFactory = IconGenerator(requireContext())
parkingSpots.forEach {
if (!it.isUserRegistered){
val pos = LatLng(it.latCoordinate, it.longCoordinate)
var marker = MarkerOptions()
.position(pos)
.icon(
BitmapDescriptorFactory
.fromBitmap(iconFactory.makeIcon(it.price.toString() + "kn/h"))
)
.anchor(iconFactory.anchorU, iconFactory.anchorV)
//val customMarkerIcon = requireContext().getCustomMarkerIcon(R.drawable.parking_meter)
gMap.addMarker(marker)
}
}
}
override fun onItemClick(v: View, position: Int) {
val parkingSpot = eligibleParkingSpots[position]
val pos = LatLng(parkingSpot.latCoordinate, parkingSpot.longCoordinate)
val cameraUpdate = CameraUpdateFactory.newLatLng(pos)
gMap.animateCamera(cameraUpdate)
}
}
<file_sep>package hr.algebra.catchaspotapp
import android.content.Context
import android.content.Intent
import android.net.Uri
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.Button
import android.widget.TextView
import androidx.core.content.ContextCompat.startActivity
import androidx.recyclerview.widget.RecyclerView
import com.google.android.gms.maps.model.LatLng
import hr.algebra.catchaspotapp.framework.getCurrentCoordinate
import hr.algebra.catchaspotapp.framework.startActivity
import hr.algebra.catchaspotapp.model.ParkingSpot
class ParkingSpotPagerAdapter(private val parkingSpots: MutableList<ParkingSpot>, context: Context)
:RecyclerView.Adapter<ParkingSpotPagerAdapter.ViewHolder>(){
val context = context
class ViewHolder(itemView: View): RecyclerView.ViewHolder(itemView) {
private val tvAddress: TextView = itemView.findViewById(R.id.tvAddress)
private val tvCost: TextView = itemView.findViewById(R.id.tvCost)
fun bind(parkingSpot: ParkingSpot){
tvAddress.text = parkingSpot.address
tvCost.text = parkingSpot.price.toString() + " kn/h"
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val itemView = LayoutInflater.from(parent.context)
.inflate(R.layout.parking_spot_pager, parent, false)
return ViewHolder(itemView)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
val parkingSpot = parkingSpots[position]
val btnNavigate = holder.itemView.findViewById<Button>(R.id.btnNavigate)
val btnParked = holder.itemView.findViewById<Button>(R.id.btnParked)
val currentLocation = LatLng(
context.getCurrentCoordinate(CURRENT_LATITUDE),
context.getCurrentCoordinate(CURRENT_LONGITUDE)
)
btnNavigate.setOnClickListener{
val uriString = "http://maps.google.com/maps?saddr="+
"${currentLocation.latitude},${currentLocation.longitude}"+
"&daddr="+"${parkingSpot.latCoordinate},${parkingSpot.longCoordinate}"
val intent = Intent(Intent.ACTION_VIEW, Uri.parse(uriString))
intent.setPackage("com.google.android.apps.maps")
startActivity(context, intent, null)
true
}
btnParked.setOnClickListener {
val intent = Intent(context, ParkedActivity::class.java)
intent.putExtra("price", parkingSpot.price)
intent.putExtra("address", parkingSpot.address)
startActivity(context, intent, null)
}
holder.bind(parkingSpot)
}
override fun getItemCount() = parkingSpots.size
}<file_sep>package hr.algebra.catchaspotapp
import android.content.Context
import android.content.Intent
import androidx.core.app.JobIntentService
import hr.algebra.catchaspotapp.api.CASFetcher
private const val JOB_ID = 1
class CASService : JobIntentService() {
override fun onHandleWork(intent: Intent) {
CASFetcher(this).fetchParkingSpotsOnStart()
}
companion object{
fun enqueueWork(context: Context, intent: Intent){
enqueueWork(context, CASService::class.java, JOB_ID, intent)
}
}
}<file_sep>package hr.algebra.catchaspotapp
import android.app.AlarmManager
import android.app.PendingIntent
import android.content.Context
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.os.SystemClock
import android.widget.Chronometer
import hr.algebra.catchaspotapp.framework.startActivityAndClearStack
import kotlinx.android.synthetic.main.activity_parked.*
import java.util.*
private const val R_CODE = 3
class ParkedActivity : AppCompatActivity() {
private lateinit var alarmManager: AlarmManager
private lateinit var stopwatch: Chronometer
private var price: Double = 0.0
private var address: String = ""
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_parked)
stopwatch = findViewById(R.id.chronometer)
startTimer()
init()
setupListeners()
}
private fun init() {
alarmManager = getSystemService(Context.ALARM_SERVICE) as AlarmManager
price = intent.extras!!.getDouble("price")
address = intent.extras!!.getString("address")!!
}
private fun setupListeners() {
btnSetAlarm.setOnClickListener {
val calendar = Calendar.getInstance()
calendar.add(Calendar.SECOND, 1)
val intent = Intent(this, AlarmReceiver::class.java)
val pendingIntent = PendingIntent.getBroadcast(this, R_CODE, intent, PendingIntent.FLAG_UPDATE_CURRENT)
alarmManager.set(AlarmManager.RTC_WAKEUP, calendar.timeInMillis, pendingIntent)
}
btnLeave.setOnClickListener {
stopwatch.stop()
val time = stopwatch.text.toString()
val intent = Intent(this, PaymentActivity::class.java)
intent.putExtra("time", time)
intent.putExtra("price", price)
intent.putExtra("address", address)
startActivity(intent)
}
}
private fun startTimer() {
var hour: String
var min: String
var sec: String
stopwatch.setOnChronometerTickListener {
var countUp = (SystemClock.elapsedRealtime() - it.base) / 1000
hour = if ((countUp / 3600).toString().length < 2){
"0" + (countUp / 3600).toString()
}
else{
(countUp / 3600).toString()
}
min = if ((countUp / 60).toString().length < 2){
"0" + (countUp / 60).toString()
} else{
(countUp / 60).toString()
}
sec = if ((countUp % 60).toString().length < 2){
"0" + (countUp % 60).toString()
} else{
(countUp % 60).toString()
}
var asText = "$hour:$min:$sec"
stopwatch.text = asText
}
stopwatch.start()
}
}<file_sep>package hr.algebra.catchaspotapp
import android.os.Bundle
import android.util.Patterns
import androidx.appcompat.app.AppCompatActivity
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.firestore.ktx.firestore
import com.google.firebase.ktx.Firebase
import hr.algebra.catchaspotapp.framework.setBooleanProperty
import hr.algebra.catchaspotapp.framework.setCurrentUserIDProperty
import hr.algebra.catchaspotapp.framework.startActivity
import hr.algebra.catchaspotapp.framework.startActivityAndClearStack
import kotlinx.android.synthetic.main.activity_login.*
const val USER_LOGGED_IN = "hr.algebra.catchaspotapp_user_logged_in"
const val USER_ID = "hr.algebra.catchaspotapp_user_id"
class LoginActivity : AppCompatActivity() {
private lateinit var mAuth: FirebaseAuth
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_login)
mAuth = FirebaseAuth.getInstance()
setupListeners()
}
private fun setupListeners() {
btnLogin.setOnClickListener {
val email = etEmail.text.toString().trim()
val password = etPassword.text.toString().trim()
if(validateFields(email, password)){
loginUser(email, password)
}
}
btnRegister.setOnClickListener {
startActivity<RegisterActivity>()
}
}
private fun loginUser(email: String, password: String) {
mAuth.signInWithEmailAndPassword(email, password)
.addOnCompleteListener(this) { task ->
if (task.isSuccessful) {
setBooleanProperty(USER_LOGGED_IN, true)
FirebaseAuth.getInstance().currentUser?.let {fu ->
Firebase.firestore.collection("users")
.document(fu.uid)
.get()
.addOnSuccessListener {
setCurrentUserIDProperty(USER_ID, it.id)
startActivityAndClearStack<SplashScreenActivity>()
}
}
} else {
etEmail.error = "User doesn't exist or password mismatch"
etEmail.requestFocus()
}
}
}
private fun validateFields(email: String, password: String):Boolean {
if (email.isEmpty()){
etEmail.error = "Email required"
etEmail.requestFocus()
return false
}
if (!Patterns.EMAIL_ADDRESS.matcher(email).matches()){
etEmail.error = "Please insert valid email"
etEmail.requestFocus()
return false
}
if (password.isEmpty()){
etPassword.error = "Password required"
etPassword.requestFocus()
return false
}
return true
}
}<file_sep>package hr.algebra.catchaspotapp.dao
import android.content.ContentValues
import android.database.Cursor
import android.database.sqlite.SQLiteDatabase
interface CASRepository {
fun delete(selection: String?, selectionArgs: Array<String>?): Int
fun insert(values: ContentValues?): Long
fun query(projection: Array<String>?, selection: String?,
selectionArgs: Array<String>?, sortOrder: String?
): Cursor?
fun update(values: ContentValues?, selection: String?,
selectionArgs: Array<String>?
): Int
}<file_sep>package hr.algebra.catchaspotapp.model
import java.io.Serializable
data class Payment(
val payerID: String,
val address: String,
val pricePayed: Double,
val date: String
) : Serializable
|
7b2fe769374558625075f27d02213938aa352d51
|
[
"Kotlin"
] | 29 |
Kotlin
|
Bis-Git/CatchASpot
|
b35aca1c97f3f11be77c50236ddc0300338847ec
|
6fa9a28f8f9678a6b05b1169112f3454991ebda4
|
refs/heads/master
|
<repo_name>ankode/muzei<file_sep>/main/src/main/java/com/google/android/apps/muzei/sync/TaskQueueService.kt
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@file:Suppress("DEPRECATION")
package com.google.android.apps.muzei.sync
import android.app.AlarmManager
import android.app.PendingIntent
import android.app.Service
import android.app.job.JobInfo
import android.app.job.JobScheduler
import android.content.ComponentName
import android.content.Context
import android.content.Intent
import android.os.Build
import android.os.IBinder
import android.os.PowerManager
import android.os.SystemClock
import android.preference.PreferenceManager
import android.support.v4.content.WakefulBroadcastReceiver
import androidx.core.content.edit
import kotlinx.coroutines.experimental.asCoroutineDispatcher
import kotlinx.coroutines.experimental.launch
import java.util.concurrent.Executor
import java.util.concurrent.Executors
class TaskQueueService : Service() {
companion object {
private const val TAG = "muzei:TaskQueueService"
internal const val ACTION_DOWNLOAD_CURRENT_ARTWORK = "com.google.android.apps.muzei.action.DOWNLOAD_CURRENT_ARTWORK"
private const val LOAD_ARTWORK_JOB_ID = 1
private const val PREF_ARTWORK_DOWNLOAD_ATTEMPT = "artwork_download_attempt"
private const val DOWNLOAD_ARTWORK_WAKELOCK_TIMEOUT_MILLIS = 30 * 1000L
private fun getArtworkDownloadRetryPendingIntent(context: Context): PendingIntent {
return PendingIntent.getService(context, 0,
getDownloadCurrentArtworkIntent(context),
PendingIntent.FLAG_UPDATE_CURRENT)
}
fun getDownloadCurrentArtworkIntent(context: Context): Intent {
return Intent(context, TaskQueueService::class.java)
.setAction(ACTION_DOWNLOAD_CURRENT_ARTWORK)
}
fun maybeRetryDownloadDueToGainedConnectivity(context: Context): Intent? {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
val jobScheduler = context.getSystemService(Context.JOB_SCHEDULER_SERVICE) as JobScheduler
val pendingJobs = jobScheduler.allPendingJobs
for (pendingJob in pendingJobs) {
if (pendingJob.id == LOAD_ARTWORK_JOB_ID) {
return TaskQueueService.getDownloadCurrentArtworkIntent(context)
}
}
return null
}
return if (PreferenceManager.getDefaultSharedPreferences(context)
.getInt(PREF_ARTWORK_DOWNLOAD_ATTEMPT, 0) > 0)
TaskQueueService.getDownloadCurrentArtworkIntent(context)
else
null
}
}
private val executorService : Executor = Executors.newSingleThreadExecutor()
private val coroutineDispatcher = executorService.asCoroutineDispatcher()
private lateinit var lock: PowerManager.WakeLock
override fun onBind(intent: Intent): IBinder? {
return null
}
override fun onCreate() {
super.onCreate()
// This is normally not started by a WakefulBroadcastReceiver so request a
// new wakelock.
val pwm = getSystemService(Context.POWER_SERVICE) as PowerManager
lock = pwm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG)
lock.acquire(DOWNLOAD_ARTWORK_WAKELOCK_TIMEOUT_MILLIS)
}
override fun onStartCommand(intent: Intent, flags: Int, startId: Int): Int {
if (ACTION_DOWNLOAD_CURRENT_ARTWORK == intent.action) {
// Handle internal download artwork request
launch(coroutineDispatcher) {
val success = downloadArtwork(this@TaskQueueService)
if (success) {
cancelArtworkDownloadRetries()
} else {
scheduleRetryArtworkDownload()
}
WakefulBroadcastReceiver.completeWakefulIntent(intent)
stopSelf(startId)
}
return Service.START_REDELIVER_INTENT
} else {
stopSelf()
return Service.START_NOT_STICKY
}
}
override fun onDestroy() {
super.onDestroy()
if (lock.isHeld) {
lock.release()
}
}
private fun cancelArtworkDownloadRetries() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
val jobScheduler = getSystemService(Context.JOB_SCHEDULER_SERVICE) as JobScheduler
jobScheduler.cancel(LOAD_ARTWORK_JOB_ID)
} else {
val am = getSystemService(Context.ALARM_SERVICE) as AlarmManager
am.cancel(TaskQueueService.getArtworkDownloadRetryPendingIntent(this))
val sp = PreferenceManager.getDefaultSharedPreferences(this)
sp.edit {
putInt(PREF_ARTWORK_DOWNLOAD_ATTEMPT, 0)
}
}
}
private fun scheduleRetryArtworkDownload() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
val jobScheduler = getSystemService(Context.JOB_SCHEDULER_SERVICE) as JobScheduler
jobScheduler.schedule(JobInfo.Builder(LOAD_ARTWORK_JOB_ID,
ComponentName(this, DownloadArtworkJobService::class.java))
.setRequiredNetworkType(if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N)
JobInfo.NETWORK_TYPE_NOT_ROAMING
else
JobInfo.NETWORK_TYPE_ANY)
.build())
} else {
val sp = PreferenceManager.getDefaultSharedPreferences(this)
val reloadAttempt = sp.getInt(PREF_ARTWORK_DOWNLOAD_ATTEMPT, 0)
sp.edit {
putInt(PREF_ARTWORK_DOWNLOAD_ATTEMPT, reloadAttempt + 1)
}
val am = getSystemService(Context.ALARM_SERVICE) as AlarmManager
val retryTimeMillis = SystemClock.elapsedRealtime() + (1 shl reloadAttempt) * 2000
am.set(AlarmManager.ELAPSED_REALTIME, retryTimeMillis,
TaskQueueService.getArtworkDownloadRetryPendingIntent(this))
}
}
}
<file_sep>/main/src/main/java/com/google/android/apps/muzei/sync/DownloadArtwork.kt
package com.google.android.apps.muzei.sync
import android.content.ContentUris
import android.content.Context
import android.net.Uri
import android.util.Log
import com.google.android.apps.muzei.api.MuzeiContract
import com.google.android.apps.muzei.room.MuzeiDatabase
import net.nurik.roman.muzei.BuildConfig
import okhttp3.Request
import java.io.File
import java.io.FileInputStream
import java.io.FileNotFoundException
import java.io.IOException
import java.io.InputStream
import java.net.URL
private const val TAG = "DownloadArtwork"
internal suspend fun downloadArtwork(context: Context)
= doDownload(context).also { success ->
if (success) {
ArtworkLoadingLiveData.postValue(ArtworkLoadingSuccess)
} else {
ArtworkLoadingLiveData.postValue(ArtworkLoadingFailure)
}
}
private suspend fun doDownload(context: Context): Boolean {
val artwork = MuzeiDatabase.getInstance(context).artworkDao().getCurrentArtwork()
val resolver = context.contentResolver
if (artwork == null) {
Log.w(TAG, "Could not read current artwork")
return false
}
val artworkUri = ContentUris.withAppendedId(MuzeiContract.Artwork.CONTENT_URI,
artwork.id)
if (artwork.imageUri == null) {
// There's nothing else we can do here so declare success
if (BuildConfig.DEBUG) {
Log.d(TAG, "Artwork $artworkUri does not have an image URI, skipping")
}
return true
}
if (BuildConfig.DEBUG) {
Log.d(TAG, "Attempting to download ${artwork.imageUri} to $artworkUri")
}
try {
resolver.openOutputStream(artworkUri)?.use { out ->
openUri(context, artwork.imageUri).use { input ->
// Only publish progress (i.e., say we've started loading the artwork)
// if we actually need to download the artwork
ArtworkLoadingLiveData.postValue(ArtworkLoadingInProgress)
val buffer = ByteArray(1024)
var bytes = input.read(buffer)
while (bytes >= 0) {
out.write(buffer, 0, bytes)
bytes = input.read(buffer)
}
out.flush()
if (BuildConfig.DEBUG) {
Log.d(TAG, "Artwork $artworkUri was successfully written")
}
}
} ?: run {
// We've already downloaded the file
if (BuildConfig.DEBUG) {
Log.d(TAG, "Artwork $artworkUri has already been downloaded")
}
return true
}
} catch (e: IOException) {
Log.e(TAG, "Error downloading artwork", e)
return false
} catch (e: IllegalArgumentException) {
Log.e(TAG, "Error downloading artwork", e)
return false
}
return true
}
@Throws(IOException::class)
private fun openUri(context: Context, uri: Uri?): InputStream {
if (uri == null) {
throw IllegalArgumentException("Uri cannot be empty")
}
val scheme = uri.scheme ?: throw IOException("Uri had no scheme")
var input: InputStream? = null
if ("content" == scheme || "android.resource" == scheme) {
try {
input = context.contentResolver.openInputStream(uri)
} catch (e: SecurityException) {
throw FileNotFoundException("No access to $uri: $e")
} catch (e: NullPointerException) {
throw FileNotFoundException("Error accessing to $uri: $e")
}
} else if ("file" == scheme) {
val segments = uri.pathSegments
input = if (segments != null && segments.size > 1
&& "android_asset" == segments[0]) {
val assetManager = context.assets
val assetPath = StringBuilder()
for (i in 1 until segments.size) {
if (i > 1) {
assetPath.append("/")
}
assetPath.append(segments[i])
}
assetManager.open(assetPath.toString())
} else {
FileInputStream(File(uri.path))
}
} else if ("http" == scheme || "https" == scheme) {
val client = OkHttpClientFactory.getNewOkHttpsSafeClient()
val request: Request = Request.Builder().url(URL(uri.toString())).build()
val response = client.newCall(request).execute()
val responseCode = response.code()
if (responseCode !in 200..299) {
throw IOException("HTTP error response $responseCode reading $uri")
}
input = response.body()?.byteStream()
}
if (input == null) {
throw FileNotFoundException("Null input stream for URI: $uri")
}
return input
}<file_sep>/wearable/src/main/java/com/google/android/apps/muzei/FullScreenActivity.kt
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.muzei
import android.animation.Animator
import android.animation.AnimatorSet
import android.animation.ObjectAnimator
import android.content.Context
import android.database.ContentObserver
import android.graphics.Bitmap
import android.os.Build
import android.os.Bundle
import android.os.Handler
import android.support.v4.app.FragmentActivity
import android.support.v4.app.LoaderManager
import android.support.v4.content.AsyncTaskLoader
import android.support.v4.content.Loader
import android.util.Log
import android.view.GestureDetector
import android.view.MotionEvent
import android.view.View
import android.widget.TextView
import com.google.android.apps.muzei.api.MuzeiContract
import com.google.android.apps.muzei.room.Artwork
import com.google.android.apps.muzei.room.MuzeiDatabase
import com.google.android.apps.muzei.util.PanView
import com.google.firebase.analytics.FirebaseAnalytics
import kotlinx.coroutines.experimental.runBlocking
import net.nurik.roman.muzei.BuildConfig
import net.nurik.roman.muzei.R
import java.io.FileNotFoundException
class FullScreenActivity : FragmentActivity(), LoaderManager.LoaderCallbacks<Pair<Artwork?, Bitmap?>> {
companion object {
private const val TAG = "FullScreenActivity"
}
private lateinit var panView: PanView
private lateinit var loadingIndicatorView: View
private lateinit var scrimView: View
private lateinit var metadataContainerView: View
private lateinit var titleView: TextView
private lateinit var bylineView: TextView
@Suppress("DEPRECATION")
private lateinit var dismissOverlay: android.support.wearable.view.DismissOverlayView
private lateinit var detector: GestureDetector
private var blurAnimator: Animator? = null
private val handler = Handler()
private var metadataVisible = false
private val showLoadingIndicatorRunnable = Runnable { loadingIndicatorView.visibility = View.VISIBLE }
public override fun onCreate(savedState: Bundle?) {
super.onCreate(savedState)
setContentView(R.layout.full_screen_activity)
FirebaseAnalytics.getInstance(this).setUserProperty("device_type", BuildConfig.DEVICE_TYPE)
panView = findViewById(R.id.pan_view)
supportLoaderManager.initLoader(0, null, this)
scrimView = findViewById(R.id.scrim)
loadingIndicatorView = findViewById(R.id.loading_indicator)
handler.postDelayed(showLoadingIndicatorRunnable, 500)
metadataContainerView = findViewById(R.id.metadata_container)
titleView = findViewById(R.id.title)
bylineView = findViewById(R.id.byline)
dismissOverlay = findViewById(R.id.dismiss_overlay)
// Only show the dismiss overlay on Wear 1.0 devices
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
dismissOverlay.setIntroText(R.string.dismiss_overlay_intro)
dismissOverlay.showIntroIfNecessary()
}
detector = GestureDetector(this, object : GestureDetector.SimpleOnGestureListener() {
override fun onSingleTapConfirmed(e: MotionEvent): Boolean {
if (dismissOverlay.visibility == View.VISIBLE) {
return false
}
if (metadataVisible) {
setMetadataVisible(false)
} else {
setMetadataVisible(true)
}
return true
}
override fun onLongPress(ev: MotionEvent) {
if (dismissOverlay.visibility == View.VISIBLE) {
return
}
// Only show the dismiss overlay on Wear 1.0 devices
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
dismissOverlay.show()
}
}
})
}
private fun setMetadataVisible(metadataVisible: Boolean) {
this.metadataVisible = metadataVisible
blurAnimator?.cancel()
val set = AnimatorSet().apply {
duration = resources.getInteger(android.R.integer.config_shortAnimTime).toLong()
}
set
.play(ObjectAnimator.ofFloat(panView, "blurAmount", if (metadataVisible) 1f else 0f))
.with(ObjectAnimator.ofFloat(scrimView, View.ALPHA, if (metadataVisible) 1f else 0f))
.with(ObjectAnimator.ofFloat(metadataContainerView, View.ALPHA,
if (metadataVisible) 1f else 0f))
blurAnimator = set.also {
it.start()
}
}
override fun dispatchTouchEvent(ev: MotionEvent): Boolean {
return detector.onTouchEvent(ev) || super.dispatchTouchEvent(ev)
}
private class ArtworkLoader internal constructor(context: Context)
: AsyncTaskLoader<Pair<Artwork?, Bitmap?>>(context) {
private var contentObserver: ContentObserver? = null
private var artwork: Artwork? = null
private var image: Bitmap? = null
override fun onStartLoading() {
if (artwork != null && image != null) {
deliverResult(Pair(artwork, image))
}
if (contentObserver == null) {
contentObserver = object : ContentObserver(null) {
override fun onChange(selfChange: Boolean) {
onContentChanged()
}
}.also { contentObserver ->
context.contentResolver.registerContentObserver(
MuzeiContract.Artwork.CONTENT_URI, true, contentObserver)
}
}
forceLoad()
}
override fun loadInBackground() = runBlocking {
loadArtworkAndImage()
}
private suspend fun loadArtworkAndImage(): Pair<Artwork?, Bitmap?> {
try {
artwork = MuzeiDatabase.getInstance(context)
.artworkDao().getCurrentArtwork()
image = MuzeiContract.Artwork.getCurrentArtworkBitmap(context)
} catch (e: FileNotFoundException) {
Log.e(TAG, "Error getting artwork", e)
}
return Pair(artwork, image)
}
override fun onReset() {
super.onReset()
image = null
contentObserver?.let { contentObserver ->
context.contentResolver.unregisterContentObserver(contentObserver)
this.contentObserver = null
}
}
}
override fun onCreateLoader(id: Int, args: Bundle?): Loader<Pair<Artwork?, Bitmap?>> {
return ArtworkLoader(this)
}
override fun onLoadFinished(loader: Loader<Pair<Artwork?, Bitmap?>>, pair: Pair<Artwork?, Bitmap?>) {
val (artwork, image) = pair
if (artwork == null || image == null) {
return
}
handler.removeCallbacks(showLoadingIndicatorRunnable)
loadingIndicatorView.visibility = View.GONE
panView.visibility = View.VISIBLE
panView.setImage(image)
titleView.text = artwork.title
bylineView.text = artwork.byline
}
override fun onLoaderReset(loader: Loader<Pair<Artwork?, Bitmap?>>) {
panView.setImage(null)
}
}
<file_sep>/main/src/main/java/com/google/android/apps/muzei/render/RealRenderController.kt
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.muzei.render
import android.content.Context
import android.database.ContentObserver
import android.net.Uri
import android.os.Handler
import com.google.android.apps.muzei.api.MuzeiContract
class RealRenderController(
context: Context,
renderer: MuzeiBlurRenderer,
callbacks: RenderController.Callbacks
) : RenderController(context, renderer, callbacks) {
private val contentObserver: ContentObserver = object : ContentObserver(Handler()) {
override fun onChange(selfChange: Boolean, uri: Uri) {
reloadCurrentArtwork(false)
}
}
init {
context.contentResolver.registerContentObserver(MuzeiContract.Artwork.CONTENT_URI,
true, contentObserver)
reloadCurrentArtwork(false)
}
override fun destroy() {
super.destroy()
context.contentResolver.unregisterContentObserver(contentObserver)
}
override suspend fun openDownloadedCurrentArtwork(forceReload: Boolean) =
BitmapRegionLoader.newInstance(context.contentResolver,
MuzeiContract.Artwork.CONTENT_URI)
}
<file_sep>/main/src/main/java/com/google/android/apps/muzei/sync/DownloadArtworkJobService.kt
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.muzei.sync
import android.app.job.JobParameters
import android.app.job.JobService
import android.os.Build
import android.support.annotation.RequiresApi
import kotlinx.coroutines.experimental.Job
import kotlinx.coroutines.experimental.launch
/**
* JobService that handles reloading artwork after any initial failure
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
class DownloadArtworkJobService : JobService() {
private var job: Job? = null
override fun onStartJob(params: JobParameters): Boolean {
job = launch {
val success = downloadArtwork(this@DownloadArtworkJobService)
jobFinished(params, !success)
}
return true
}
override fun onStopJob(params: JobParameters): Boolean {
job?.cancel()
return true
}
}
|
1317e271be5c37939b7e1ba56187fff0da3468dc
|
[
"Kotlin"
] | 5 |
Kotlin
|
ankode/muzei
|
d35de1458e395086036fc1e2fbb881f924c2e5a2
|
833821714daca03a963fa3761741429e64f9935a
|
refs/heads/master
|
<file_sep>package org.firstinspires.ftc.teamcode.subsystems;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.Range;
public class DriveTrainTeleOp extends DriveTrain {
public DriveTrainTeleOp(LinearOpMode opMode) {
super(opMode);
}
public void pollGamepads() {
if (opMode.gamepad1.a) {
stopMotion();
} else {
updateHolonomicMotion();
}
}
private void updateHolonomicMotion() {
double gamepad1LeftY = -opMode.gamepad1.left_stick_y;
double gamepad1LeftX = opMode.gamepad1.left_stick_x;
double gamepad1RightX = opMode.gamepad1.right_stick_x;
double frontRightPower = gamepad1LeftY - gamepad1LeftX - gamepad1RightX;
double frontLeftPower = -gamepad1LeftY - gamepad1LeftX - gamepad1RightX;
double backRightPower = gamepad1LeftY + gamepad1LeftX - gamepad1RightX;
double backLeftPower = -gamepad1LeftY + gamepad1LeftX - gamepad1RightX;
frontRightPower = Range.clip(frontRightPower, -1, 1);
frontLeftPower = Range.clip(frontLeftPower, -1, 1);
backRightPower = Range.clip(backRightPower, -1, 1);
backLeftPower = Range.clip(backLeftPower, -1, 1);
frontRightPower = scaleInput(frontRightPower);
frontLeftPower = scaleInput(frontLeftPower);
backRightPower = scaleInput(backRightPower);
backLeftPower = scaleInput(backLeftPower);
motorFrontRight.setPower(frontRightPower);
motorFrontLeft.setPower(frontLeftPower);
motorBackRight.setPower(backRightPower);
motorBackLeft.setPower(backLeftPower);
}
/*
* This method scales the joystick input so for low joystick values, the
* scaled value is less than linear. This is to make it easier to drive
* the robot more precisely at slower speeds.
*/
private double scaleInput(double dVal) {
//double[] scaleArray = { 0.0, 0.05, 0.09, 0.10, 0.12, 0.15, 0.18, 0.24,
// 0.30, 0.36, 0.43, 0.50, 0.60, 0.72, 0.85, 1.00, 1.00 };
double[] scaleArray = { 0.0, 0.02, 0.04, 0.06, 0.08, 0.10, 0.12, 0.14,
0.17, 0.19, 0.22, 0.25, 0.28, 0.30, 0.33, 0.50, 0.75 };
// get the corresponding index for the scaleInput array.
int index = (int) (dVal * 16.0);
// index should be positive.
if (index < 0) {
index = -index;
}
// index cannot exceed size of array minus 1.
if (index > 16) {
index = 16;
}
// get value from the array.
double dScale = 0.0;
if (dVal < 0) {
dScale = -scaleArray[index];
} else {
dScale = scaleArray[index];
}
// return scaled value.
return dScale;
}
}
<file_sep>package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
@Autonomous(name = "DepotAutonomous")
public class DepotAutonomous extends MarvinAutonomous {
protected void claim(double sampleTime) {
if (opModeIsActive()) {
bucketArm.claim(sampleTime);
}
}
}<file_sep>package org.firstinspires.ftc.teamcode.subsystems;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
public abstract class DriveTrain {
protected LinearOpMode opMode;
protected DcMotor motorFrontRight;
protected DcMotor motorFrontLeft;
protected DcMotor motorBackRight;
protected DcMotor motorBackLeft;
protected DriveTrain(LinearOpMode theOpMode) {
opMode = theOpMode;
motorFrontRight = opMode.hardwareMap.dcMotor.get("motorFrontRight");
motorFrontLeft = opMode.hardwareMap.dcMotor.get("motorFrontLeft");
motorBackRight = opMode.hardwareMap.dcMotor.get("motorBackRight");
motorBackLeft = opMode.hardwareMap.dcMotor.get("motorBackLeft");
}
public void stopMotion() {
motorFrontRight.setPower(0);
motorFrontLeft.setPower(0);
motorBackRight.setPower(0);
motorBackLeft.setPower(0);
}
}
<file_sep>package org.firstinspires.ftc.teamcode.subsystems;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
public class LandLatchAutonomous extends LandLatch {
public LandLatchAutonomous(LinearOpMode opMode) {
super(opMode);
motorLandLatch.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
motorLandLatch.setMode(DcMotor.RunMode.RUN_TO_POSITION);
motorLandLatch.setDirection(DcMotor.Direction.REVERSE);
}
public void land() {
//motorLandLatch.setTargetPosition(5000); // Use if latch is 23" off ground. DO NOT GO HIGHER OR GEARS WILL GRIND.
motorLandLatch.setTargetPosition(4675); // Use if latch is 22" off ground. REGULATION.
//motorLandLatch.setTargetPosition(4100); // Use if latch is 21" off ground.
motorLandLatch.setPower(0.75);
waitAndReportPosition();
}
public void retractLatch() {
motorLandLatch.setTargetPosition(0);
motorLandLatch.setPower(0.5);
// Intentionally not calling waitAndReportPosition().
}
private void waitAndReportPosition() {
while (opMode.opModeIsActive() && motorLandLatch.isBusy()) {
opMode.telemetry.addData("LandLatch position:", motorLandLatch.getCurrentPosition());
opMode.telemetry.update();
opMode.idle();
}
}
}
<file_sep>package org.firstinspires.ftc.teamcode.subsystems;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.ElapsedTime;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection;
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.teamcode.MarvinAutonomous;
import org.firstinspires.ftc.teamcode.subsystems.DriveTrainAutonomous;
public class CameraAutonomous {
private static final String TFOD_MODEL_ASSET = "RoverRuckus.tflite";
private static final String LABEL_GOLD_MINERAL = "Gold Mineral";
private static final String LABEL_SILVER_MINERAL = "Silver Mineral";
private static final String VUFORIA_KEY = "<KEY>";
private VuforiaLocalizer vuforia;
private TFObjectDetector tfod;
private DriveTrainAutonomous driveTrain;
private MarvinAutonomous opMode;
private ElapsedTime elapsedTime = new ElapsedTime();
public CameraAutonomous(MarvinAutonomous theOpMode, DriveTrainAutonomous theDriveTrain) {
opMode = theOpMode;
driveTrain = theDriveTrain;
initVuforia();
initTfod();
}
public double processMinerals() {
double sampleTime = 0.0d;
if (opMode.opModeIsActive()) {
boolean positionedToDrive = false;
driveTrain.twistLeft(700, true, 0.5);
tfod.activate();
elapsedTime.reset();
driveTrain.twistRight(1400, false, 0.06);
while (opMode.opModeIsActive() && elapsedTime.milliseconds() < 13000 && !positionedToDrive) {
// getUpdatedRecognitions() will return null if no new information is available since
// the last time that call was made.
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
if (updatedRecognitions != null) {
opMode.telemetry.addData("Number of updatedRecognitions:", updatedRecognitions.size());
for (Recognition recognition : updatedRecognitions) {
opMode.telemetry.addData("Recognition label:", recognition.getLabel());
opMode.telemetry.addData("Recognition left position:", (int)recognition.getLeft());
if (recognition.getLabel().equals(LABEL_GOLD_MINERAL)) {
if (recognition.getLeft() > 210 && recognition.getLeft() < 310) {
driveTrain.stopMotion();
positionedToDrive = true;
}
}
}
opMode.telemetry.update();
}
}
sampleTime = elapsedTime.milliseconds();
if (!positionedToDrive && opMode.opModeIsActive()) {
driveTrain.twistLeft(700, true, 0.35);
}
if (opMode.opModeIsActive()) {
driveTrain.moveRight(1300, 0.35);
}
}
tfod.shutdown();
return sampleTime;
}
private void initVuforia() {
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
parameters.cameraDirection = CameraDirection.BACK;
vuforia = ClassFactory.getInstance().createVuforia(parameters);
}
private void initTfod() {
int tfodMonitorViewId = opMode.hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", opMode.hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_GOLD_MINERAL, LABEL_SILVER_MINERAL);
}
}
<file_sep>package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.teamcode.subsystems.BucketArmTeleOp;
import org.firstinspires.ftc.teamcode.subsystems.DriveTrainTeleOp;
import org.firstinspires.ftc.teamcode.subsystems.LandLatchTeleOp;
import org.firstinspires.ftc.teamcode.subsystems.TrolleyTeleOp;
@TeleOp(name = "MarvinTeleOp")
public class MarvinTeleOp extends LinearOpMode {
private DriveTrainTeleOp driveTrain;
private LandLatchTeleOp landLatch;
private TrolleyTeleOp trolley;
private BucketArmTeleOp bucketArm;
@Override
public void runOpMode() {
driveTrain = new DriveTrainTeleOp(this);
landLatch = new LandLatchTeleOp(this);
trolley = new TrolleyTeleOp(this);
bucketArm = new BucketArmTeleOp(this);
telemetry.addData("Status", "Initialized");
telemetry.update();
// Wait for the game to start (driver presses PLAY)
waitForStart();
while (opModeIsActive()) {
driveTrain.pollGamepads();
landLatch.pollGamepads();
trolley.pollGamepads();
bucketArm.pollGamepads();
// TODO Add meaningful output to telemetry in all subsystems.
}
}
}
<file_sep>package org.firstinspires.ftc.teamcode.subsystems;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.ElapsedTime;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.DepotAutonomous;
import org.firstinspires.ftc.teamcode.subsystems.DriveTrainAutonomous;
public class BucketArmAutonomous extends BucketArm {
private DriveTrainAutonomous driveTrain;
public BucketArmAutonomous(LinearOpMode theOpMode, DriveTrainAutonomous theDriveTrain) {
super(theOpMode);
driveTrain = theDriveTrain;
motorBucket.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
motorBucket.setMode(DcMotor.RunMode.RUN_TO_POSITION);
}
public void claim(double sampleTime) {
opMode.telemetry.addData("Sample Time", sampleTime);
opMode.telemetry.update();
if (sampleTime >= 0.0 && sampleTime <= 3800.0) {
driveTrain.twistLeft(2650, true, 1.0);
dropMarker(2200);
} else if (sampleTime >3800.0 && sampleTime <= 6800.0) {
driveTrain.twistRight(1300, true, 1.0);
dropMarker(1500);
} else {
driveTrain.twistRight(400, true, 1.0);
dropMarker(2400);
}
}
public void park(double sampleTime) {
opMode.telemetry.addData("Sample Time", sampleTime);
opMode.telemetry.update();
if (sampleTime >= 0.0 && sampleTime <= 3800.0) {
driveTrain.twistLeft(2750, true, 1.0);
crossPlane();
} else if (sampleTime >3800.0 && sampleTime <= 6800.0) {
driveTrain.twistLeft(3350, true, 0.5);
crossPlane();
} else {
driveTrain.moveRight(225, 0.5);
driveTrain.twistRight(600, true, 1.0);
crossPlane();
}
}
private void dropMarker(int moveForwardTicks) {
driveTrain.moveForward(moveForwardTicks, 1.0);
motorBucket.setDirection(DcMotor.Direction.FORWARD);
motorBucket.setTargetPosition(300);
motorBucket.setPower(1.0);
while (opMode.opModeIsActive() && motorBucket.isBusy()) {
opMode.idle();
}
driveTrain.moveBackward(800, 1.0);
}
private void crossPlane() {
motorBucket.setDirection(DcMotor.Direction.FORWARD);
motorBucket.setTargetPosition(250);
motorBucket.setPower(0.5);
while (opMode.opModeIsActive() && motorBucket.isBusy()) {
opMode.idle();
}
moveBucketNeutral();
while (opMode.opModeIsActive()) {
opMode.idle();
}
}
}
//moveBucketBottom();
/*ElapsedTime servoMovingTime = new ElapsedTime();
servoMovingTime.reset();
while (opMode.opModeIsActive() && servoMovingTime.milliseconds() < 2000) {
opMode.idle();
}
driveTrain.moveForward(400, 1.0);
driveTrain.moveBackward(400, 1.0);
moveBucketNeutral();
servoMovingTime.reset();
while (opMode.opModeIsActive() && servoMovingTime.milliseconds() < 2000) {
opMode.idle();
}*/<file_sep>package org.firstinspires.ftc.teamcode.subsystems;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
public class LandLatchTeleOp extends LandLatch {
public LandLatchTeleOp(LinearOpMode opMode) {
super(opMode);
}
public void pollGamepads() {
if (opMode.gamepad1.a) {
motorLandLatch.setPower(opMode.gamepad1.left_stick_y);
} else {
motorLandLatch.setPower(0);
}
}
}
|
df3839802b18a3db8c727229330b7400e384454a
|
[
"Java"
] | 8 |
Java
|
ConfidentialRobotics/ftc_app
|
875cc7c0fcf87ec603b2fb5d17771afb6a05cf1e
|
db999ccdcd07285241974409eee772826b0988dc
|
refs/heads/master
|
<file_sep># Autostart a global traefik reverse proxy
This repository contains all configs needed to auto-start a global traefik instance as a reverse-proxy in a docker-compose setup.
The use case is described in my blog post: https://hollo.me/devops/routing-to-multiple-docker-compose-development-setups-with-traefik.html
## Installation
1. Clone this repository to `~/traefik`
2. Create the external docker network "gateway"
```bash
docker network create \
--driver=bridge \
--attachable \
--internal=false \
gateway
```
3. Create the needed SSL-Certificates using [mkcert](https://github.com/FiloSottile/mkcert):
```bash
mkcert "*.example.com"
```
and change the config of certificates in the [certificates.toml](.docker/traefik/dynamic/certificates.toml), so that only your needed certificates are listed.
4. Add `127.0.0.1 subdomain1.example.com subdomain2.example.com ...` to your `/etc/hosts` file, so that the trafiic is routed to the traefik instance.
4. `cp ~/traefik/com.user.traefik.autostart.plist ~/Library/LaunchAgents/com.user.traefik.autostart.plist`
5. Load the service with `launchctl load ~/Library/LaunchAgents/com.user.traefik.autostart.plist`
6. Check the log file for errors: `tail -F ~/Library/Logs/traefik.autostart.log`
7. Visit http://127.0.0.1:8080/dashboard/#/ - You should see your traefik dashboard.
If you're not my future self, adapt the paths to the user's home directory in the configs before running anything. 😬
<file_sep>[providers]
[providers.docker]
exposedByDefault = false
network = "gateway"
[providers.file]
directory = '/etc/traefik/dynamic'
watch = true
[api]
insecure = true
dashboard = true
[entryPoints]
[entryPoints.web]
address = ":80"
[entryPoints.web.http.redirections]
[entryPoints.web.http.redirections.entryPoint]
to = "websecure"
scheme = "https"
permanent = true
[entryPoints.websecure]
address = ":443"
[entryPoints.mssql]
address = ":1433"
[entryPoints.postgres]
address = ":5432"
[entryPoints.mongodb]
address = ":27017"
[log]
filePath = "/logs/traefik.log"
level = "ERROR"
[accessLog]
filePath = "/logs/access.log"
bufferingSize = 100
[accessLog.filters]
statusCodes = ["200", "300-302"]
retryAttempts = true
minDuration = "10ms"
<file_sep>[[tls.certificates]]
certFile = "/etc/traefik/ssl/_wildcard.onfos.de.pem"
keyFile = "/etc/traefik/ssl/_wildcard.onfos.de-key.pem"
[[tls.certificates]]
certFile = "/etc/traefik/ssl/_wildcard.hollo.me.pem"
keyFile = "/etc/traefik/ssl/_wildcard.hollo.me-key.pem"
[[tls.certificates]]
certFile = "/etc/traefik/ssl/_wildcard.dresden-code-of-conduct.org.pem"
keyFile = "/etc/traefik/ssl/_wildcard.dresden-code-of-conduct.org-key.pem"
[[tls.certificates]]
certFile = "/etc/traefik/ssl/_wildcard.from-home.de.pem"
keyFile = "/etc/traefik/ssl/_wildcard.from-home.de-key.pem"
<file_sep>#!/bin/bash
DOCKER_APP=/Applications/Docker.app
DOCKER="/usr/local/bin/docker"
DOCKER_COMPOSE="/usr/local/bin/docker-compose"
TRAEFIK_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo ${TRAEFIK_DIR}
# Create global gateway network, if not exists
${DOCKER} network create --driver bridge --attachable --internal=false gateway || true
# Open Docker, only if is not running
if (! ${DOCKER} stats --no-stream ); then
# Start Docker.app
open ${DOCKER_APP}
# Wait until Docker daemon is running and has completed initialisation
while (! ${DOCKER} stats --no-stream ); do
# Docker takes a few seconds to initialize
echo "Waiting for Docker to launch..."
sleep 1
done
fi
cd ${TRAEFIK_DIR}
${DOCKER_COMPOSE} up -d --force-recreate
|
c9de9669c15b674e50db46696a675ed0fe22ed0c
|
[
"Markdown",
"TOML",
"Shell"
] | 4 |
Markdown
|
hollodotme/traefik-proxy-autostart
|
8fe06add82d352ea3bb1ef552b0df93e3a18322f
|
38a65d9d54b66d60579889f102fdcdd9d6bf8281
|
refs/heads/master
|
<repo_name>naqviferman/problem-solving<file_sep>/src/main/java/arrayproblems/MoveAllZeroesToEnd.java
package arrayproblems;
import java.util.Arrays;
public class MoveAllZeroesToEnd {
public static void main(String[] args) {
int[] input = {1, 2, 0, 0, 0, 3, 6};
// int[] input = {1, 9, 8, 4, 0, 0, 2, 7, 0, 6, 0};
// int[] input = {0, 0, 0, 4};
System.out.println(Arrays.toString(input));
moveAllZeroes(input);
System.out.println(Arrays.toString(input));
}
private static void moveAllZeroes(int[] input) {
int i = 0;
int j = 0;
while (i < input.length) {
if (input[i] != 0) {
if (i != j) {
input[j] = input[i];
input[i] = 0;
}
i++;
j++;
} else {
i++;
}
}
}
}
<file_sep>/src/main/java/arrayproblems/twodim/NumberOfIslands.java
package arrayproblems.twodim;
import java.util.Deque;
import java.util.LinkedList;
/*
Given an m x n 2D binary grid grid which represents a map of '1's (land) and '0's (water), return the number of islands.
An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically.
You may assume all four edges of the grid are all surrounded by water.
*/
public class NumberOfIslands {
public static void main(String[] args) {
char[][] grid = {
{'1', '1', '1', '1', '0'},
{'1', '1', '0', '1', '0'},
{'1', '1', '0', '0', '1'},
{'0', '0', '0', '1', '1'}
};
int islands = numIslands(grid);
System.out.println(islands); // For above grid, answer will be 2
}
public static int numIslands(char[][] grid) {
int count = 0;
int[][] steps = {{-1, 0}, {0, 1}, {1, 0}, {0, -1}};
for (int i = 0; i < grid.length; i++) {
for (int j = 0; j < grid[i].length; j++) {
if (grid[i][j] == '1') {
// bfs(grid, i, j);
dfs(grid, steps, i, j);
count++;
}
}
}
return count;
}
private static void bfs(char[][] grid, int i, int j) {
int[][] steps = {{-1, 0}, {0, 1}, {1, 0}, {0, -1}};
Deque<Integer[]> queue = new LinkedList<>();
queue.offerLast(new Integer[]{i, j});
while (!queue.isEmpty()) {
Integer[] curr = queue.pollFirst();
int row = curr[0];
int col = curr[1];
if (check(grid, row, col)) {
grid[row][col] = '0';
for (int[] step : steps) {
queue.offerLast(new Integer[]{row + step[0], col + step[1]});
}
}
}
}
private static void dfs(char[][] grid, int[][] steps, int i, int j) {
if (check(grid, i, j)) {
grid[i][j] = '0';
for (int[] step : steps) {
dfs(grid, steps, i + step[0], j + step[1]);
}
}
}
private static boolean check(char[][] grid, int row, int col) {
return 0 <= row && row < grid.length && 0 <= col && col < grid[0].length && grid[row][col] == '1';
}
}
<file_sep>/src/main/java/hashingproblems/JewelsAndStones.java
package hashingproblems;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
/*
You're given strings jewels representing the types of stones that are jewels, and stones representing the stones you have.
Each character in stones is a type of stone you have. You want to know how many of the stones you have are also jewels.
Letters are case sensitive, so "a" is considered a different type of stone from "A".
Input: jewels = "aA", stones = "aAAbbbb"
Output: 3
Input: jewels = "z", stones = "ZZ"
Output: 0
*/
public class JewelsAndStones {
public int numJewelsInStones(String jewels, String stones) {
HashMap<Character, Integer> map = new HashMap<>();
for (int i = 0, length = stones.length() ; i < length; i++) {
char ch = stones.charAt(i);
if (map.containsKey(ch)) {
map.put(ch, map.get(ch) + 1);
} else {
map.put(ch, 1);
}
}
int jewelsPresent = 0;
for (int i = 0, length = jewels.length() ; i < length; i++) {
char ch = jewels.charAt(i);
Integer value = map.get(ch);
if (value != null)
jewelsPresent += value;
}
return jewelsPresent;
}
@Test
public void test1() {
Assertions.assertEquals(3,numJewelsInStones("aA", "aAAbbbb"));
}
}
<file_sep>/src/main/java/stringproblems/GoalParser.java
package stringproblems;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/*You own a Goal Parser that can interpret a string command.
The command consists of an alphabet of "G", "()" and/or "(al)" in some order.
The Goal Parser will interpret "G" as the string "G", "()" as the string "o", and "(al)" as the string "al".
The interpreted strings are then concatenated in the original order.
Given the string command, return the Goal Parser's interpretation of command.
Input: command = "G()(al)"
Output: "Goal"
Explanation: The Goal Parser interprets the command as follows:
G -> G
() -> o
(al) -> al
The final concatenated result is "Goal".
Input: command = "G()()()()(al)"
Output: "Gooooal"
Input: command = "(al)G(al)()()G"
Output: "alGalooG"
*/
public class GoalParser {
public String interpret(String command) {
return command.replace("()", "o").replace("(al)", "al");
}
@Test
public void test1() {
Assertions.assertEquals("Goal", interpret("G()(al)"));
}
@Test
public void test2() {
Assertions.assertEquals("alGalooG", interpret("(al)G(al)()()G"));
}
}
<file_sep>/src/main/java/arrayproblems/twodim/BFS.java
package arrayproblems.twodim;
import java.util.ArrayDeque;
import java.util.Deque;
public class BFS {
public static void main(String[] args) {
int[][] mat = {{1, 2, 3},
{4, 5, 6},
{7, 8, 9},
{10, 11, 12}};
bfs(mat);
System.out.println();
bfs1(mat);
}
private static void bfs(int[][] mat) {
boolean[][] visited = new boolean[mat.length][mat[0].length];
int[][] steps = {{-1, 0}, {0, 1}, {1, 0}, {0, -1}};
Deque<Integer[]> queue = new ArrayDeque<>();
queue.offerLast(new Integer[]{0, 0});
visited[0][0] = true;
System.out.print(mat[0][0] + ", ");
while (!queue.isEmpty()) {
Integer[] curr = queue.poll();
for (int[] step : steps) {
int i = curr[0] + step[0];
int j = curr[1] + step[1];
if (check(mat, visited, i, j)) {
System.out.print(mat[i][j] + ", ");
visited[i][j] = true;
queue.offerLast(new Integer[]{i, j});
}
}
}
}
private static void bfs1(int[][] mat) {
boolean[][] visited = new boolean[mat.length][mat[0].length];
int[][] steps = {{-1, 0}, {0, 1}, {1, 0}, {0, -1}};
Deque<Integer[]> queue = new ArrayDeque<>();
queue.offerLast(new Integer[]{0, 0});
while (!queue.isEmpty()) {
Integer[] curr = queue.poll();
int i = curr[0];
int j = curr[1];
if (check(mat, visited, i, j)) {
visited[i][j] = true;
System.out.print(mat[i][j] + ", ");
for (int[] step : steps) {
queue.offerLast(new Integer[]{i + step[0], j + step[1]});
}
}
}
}
private static boolean check(int[][] arr, boolean[][] visited, int i, int j) {
return 0 <= i && i < arr.length && 0 <= j && j < arr[0].length && !visited[i][j];
}
}
<file_sep>/src/main/java/arrayproblems/RemoveDupFromSortedArray.java
package arrayproblems;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/*
Given a sorted array nums,
remove the duplicates in-place such that each element appears only once and returns the new length
Input: nums = [1,1,2]
Output: 2, nums = [1,2]
Input: nums = [0,0,1,1,1,2,2,3,3,4]
Output: 5, nums = [0,1,2,3,4]
It doesn't matter what you leave beyond the returned length.
*/
public class RemoveDupFromSortedArray {
public int removeDuplicates(int[] input) {
int inputLength = input.length;
if (inputLength < 2)
return inputLength;
int i = 0;
for (int j = 1; j < inputLength; j++) {
if (input[i] != input[j]) {
input[++i] = input[j];
}
}
return i + 1;
}
@Test
public void test1() {
Assertions.assertEquals(2, removeDuplicates(new int[]{1, 1, 2}));
}
@Test
public void test2() {
Assertions.assertEquals(5, removeDuplicates(new int[]{0, 0, 1, 1, 1, 2, 2, 3, 3, 4}));
}
@Test
public void test3() {
Assertions.assertEquals(1, removeDuplicates(new int[]{2, 2, 2, 2, 2}));
}
@Test
public void test4() {
Assertions.assertEquals(5, removeDuplicates(new int[]{1, 2, 2, 3, 4, 4, 4, 5, 5}));
}
}
<file_sep>/src/main/java/stringproblems/LengthOfLastWord.java
package stringproblems;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/*
Given a string s consists of some words separated by spaces, return the length of the last word in the string.
If the last word does not exist, return 0
Input: s = "<NAME>"
Output: 5
Input: s = " "
Output: 0
*/
public class LengthOfLastWord {
public int lengthOfLastWord(String s) {
String[] wordArray = s.split(" ");
int arrLength = wordArray.length;
if (arrLength > 0) {
return wordArray[arrLength - 1].length();
}
return 0;
}
@Test
public void test1() {
Assertions.assertEquals(5, lengthOfLastWord("hello world"));
}
@Test
public void test2() {
Assertions.assertEquals(0, lengthOfLastWord(" "));
}
@Test
public void test3() {
Assertions.assertEquals(0, lengthOfLastWord(" "));
}
@Test
public void test4() {
Assertions.assertEquals(1, lengthOfLastWord("a"));
}
@Test
public void test5() {
Assertions.assertEquals(1, lengthOfLastWord("a "));
}
}
<file_sep>/src/main/java/arrayproblems/MissingNumber.java
package arrayproblems;
public class MissingNumber {
/*
Given an array of size N-1 such that it can only contain distinct integers in the range of 1 to N.
Find the missing element.
N = 5
A[] = {1,2,3,5}
Output: 4
*/
public static void main(String[] args) {
int[] input = {1, 2, 3, 4, 5, 6, 7, 8, 10};
int n = input.length + 1;
int missingNumber = missingNumberInArray(input, n);
System.out.println(missingNumber);
}
private static int missingNumberInArray(int[] input, int n) {
int totalSum = (n * (n + 1)) / 2;
int currentSum = 0;
for (int i : input) {
currentSum += i;
}
return totalSum - currentSum;
}
}
|
e52ffd6b359ffd2dd101322d709452da8f89ad28
|
[
"Java"
] | 8 |
Java
|
naqviferman/problem-solving
|
31871899295c7410258a0e8bc39b385e29bcd17d
|
08e3eb032eb986d349db36feb25840be92286cc0
|
refs/heads/master
|
<repo_name>Loreen-netizen/trainsReactApp<file_sep>/src/components/TrainTimes/TrainTimes.js
import TrainIcon from "../TrainIcon/TrainIcon";
import "../SingleStation/SingleStation.css";
import "./TrainTimes.css";
const TrainTimes = ({ trainId, specificTrainTimes }) => {
console.log({ specificTrainTimes });
const timesArray = Object.keys(specificTrainTimes);
console.log(timesArray);
return (
<div className="train-times">
<div className="train-times-card">
<h3>
<TrainIcon />
<span className="trainsList">{trainId}</span>
</h3>
<ul className="trainTimesUl">
<li className="trainListItem">14:00</li>
<li className="trainListItem">14:00</li>
<li className="trainListItem">14:00</li>
<li className="trainListItem">14:00</li>
</ul>
</div>
</div>
);
};
export default TrainTimes;
<file_sep>/src/components/App/App.js
import "./App.css";
import Header from "../Header/Header";
import Stations from "../Stations/Stations";
import SingleStation from "../SingleStation/SingleStation";
import React from "react";
import { HashRouter, Switch, Route } from "react-router-dom";
const App = () => {
return (
<div>
<Header />
<HashRouter>
<Switch>
<Route path="/station/:stationId" children={<SingleStation />} />
<Route path="/" children={<Stations />} />
</Switch>
</HashRouter>
</div>
);
};
// const getData = async () => {
// const stationsResponse = await fetch('/data/stations.json');
// const trainsRespnse = await fetch('/data/trains.json');
// const stationsData = await stationsResponse.json();
// const trainsData = await trainsRespnse.json();
// console.log(stationsData);
// console.log(trainsData);
// }
// return (
// <div>
// <Header />
// <Stations />
// <button onClick = {getData}>logData</button>
// </div>
// );
// }
export default App;
<file_sep>/src/components/SingleStation/SingleStation.js
import "./SingleStation.css";
import { useEffect, useState } from "react";
//import {useParams} from "react-router-dom";
import { HashRouter, Switch, Route, useParams } from "react-router-dom";
import TrainTimes from "../TrainTimes/TrainTimes";
const SingleStation = () => {
const { stationId } = useParams();
const [trains, setTrains] = useState(null);
const [stations, setStations] = useState(null);
console.log({ stationId });
const getData = async () => {
const trainsResponse = await fetch("/data/trains.json");
const trainsData = await trainsResponse.json();
console.log(trainsData);
setTrains(trainsData);
const stationsResponse = await fetch("/data/stations.json");
const stationsData = await stationsResponse.json();
console.log(stationsData);
setStations(stationsData);
};
useEffect(() => getData(), []);
if (!trains || !stations) {
return (
<div>
<h3>Loading station.....</h3>
<h3> Loading trains.....</h3>
</div>
);
}
return (
<div>
<a href="#/" className="homepagelink">
Back to Homepage
</a>
<h2 className="stationName">station:{stations[stationId]}</h2>
<div className="trains-grid">
{Object.keys(trains).map((trainId) => (
<TrainTimes
trainId={trainId}
specificTrainTimes={trains[trainId]} />
))}
</div>
</div>
);
};
export default SingleStation;
<file_sep>/src/components/Header/Header.js
import "./Header.css";
const Header = ()=>{
return(
<header className="header">Train Schedule
<h5>no need to be late, always use trains app!</h5></header>
)
}
export default Header;
|
4147f502d97dab9192dda1fcce5bccae0c43c690
|
[
"JavaScript"
] | 4 |
JavaScript
|
Loreen-netizen/trainsReactApp
|
f0618c70f953d628ad24dcefe1c7e0837bc2fc78
|
67fb24f003d3d96bfb543a5f94cd6fd0a972f136
|
refs/heads/master
|
<repo_name>SteveBirkner/elastic<file_sep>/config.py
consumer_key = "UrK0WCDTwg56qYGtH6GWNIXAn"
consumer_secret = "<KEY>"
access_token = "<KEY>"
access_token_secret = "<KEY>"
|
692624ecb5c877e81f7bbb12b43d86dba03414be
|
[
"Python"
] | 1 |
Python
|
SteveBirkner/elastic
|
28395309ab93b64eb063669748a7885e3a2bbe69
|
adaf5af3ab67186710975089c436b4427be02204
|
refs/heads/main
|
<file_sep>import numpy as np
import pandas as pd
import os
import re
import string
import en_core_web_sm
import spacy
from nltk.corpus import stopwords
from langdetect import detect
import re
import nltk
from nltk.sentiment.vader import SentimentIntensityAnalyzer
def import_kaggle_data(url):
"""
It downloads a selected dataset from Kaggle and convers it to
a pandas dataframe
Args:
kaggle url
Input argument - Ask for the file you want to keep
from the unzip kaggle file, moves it to a folder named "data"
and deletes the rest of non-needed downloaded files.
Returns:
kaggle csv file in data folder
"""
#Download Kaggle dataset
os.system(f'kaggle datasets download -d {url}')
#Decompress zip file
os.system('tar -xzvf *.zip')
#List files in folder
os.system('ls')
# Select the file you want to keep
selected_file = input('enter name of the file you want to keep:')
# Move selected file to the data folder
os.system(f'mv {selected_file} ../data')
# Dump the rest of the files except jupyter notebooks
# os.system('find . \! -name "*.ipynb" -delete')
# os.system('rm !(*.ipynb)')
os.system('ls -1 | grep -v "*.ipynb" | xargs rm -f')
# Import csv to dataframe
# df = pd.read_csv(f"../data/{selected_file}")
# return df
def clean_characters(list_, database_name, column):
"""
Removes list of unwanted character from a chosen column in a dataset
Args:
list of characters
database name
database column name
Returns:
nothing
"""
for ch in list_:
database_name[f'{column}'] = database_name[f'{column}'].str.replace(f"{ch}","")
def tokenizer(txt):
"""
This function reads strings and gives back its tokenization
Args:
- 1 Detects language (en or es)
- 2 It does lemmatisation to each word, removes spaces and stopword and lower cases
Returns:
string tokenize
"""
try:
if detect(txt) == 'en':
nlp = spacy.load("en_core_web_sm")
elif detect(txt) == 'es':
nlp = spacy.load("es_core_news_sm")
else:
return "It's not English or Spanish"
except:
return "this can't be analize"
tokens = nlp(txt)
filtradas = []
for word in tokens:
if not word.is_stop:
lemma = word.lemma_.lower().strip()
if re.search('^[a-zA-Z]+$',lemma):
filtradas.append(lemma)
return " ".join(filtradas)
<file_sep>-- MySQL Workbench Forward Engineering
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION';
-- -----------------------------------------------------
-- Schema political_debates
-- -----------------------------------------------------
-- -----------------------------------------------------
-- Schema political_debates
-- -----------------------------------------------------
CREATE SCHEMA IF NOT EXISTS `political_debates` DEFAULT CHARACTER SET utf8 ;
-- -----------------------------------------------------
-- Schema political_debates
-- -----------------------------------------------------
-- -----------------------------------------------------
-- Schema political_debates
-- -----------------------------------------------------
CREATE SCHEMA IF NOT EXISTS `political_debates` DEFAULT CHARACTER SET utf8 ;
USE `political_debates` ;
-- -----------------------------------------------------
-- Table `political_debates`.`speakers`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `political_debates`.`speakers` (
`speakerid` INT NOT NULL AUTO_INCREMENT,
`speaker` VARCHAR(45) NOT NULL,
PRIMARY KEY (`speakerid`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `political_debates`.`speeches`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `political_debates`.`speeches` (
`sentenceid` INT NOT NULL AUTO_INCREMENT,
`minute` TIME NOT NULL,
`sentence` LONGTEXT NOT NULL,
`tokens` LONGTEXT NOT NULL,
`speakers_speakerid` INT NULL DEFAULT NULL,
`debate` VARCHAR(60) NOT NULL,
PRIMARY KEY (`sentenceid`),
INDEX `fk_speeches_speakers_idx` (`speakers_speakerid` ASC) VISIBLE,
CONSTRAINT `fk_speeches_speakers`
FOREIGN KEY (`speakers_speakerid`)
REFERENCES `political_debates`.`speakers` (`speakerid`))
ENGINE = InnoDB;
USE `political_debates` ;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
<file_sep># Political Debates API

### Objective
Create an API capable of making possible for the user to get information about political debates from a sql database and alter its content as well.
Developers can access information about what has been said by the politician during the debate and minute when that happened. It also provides with information about polarity score of each sentence said.
### METHODS
Mysql
Flask
GET and POST request
NLP
### STEPS
**1 - Kaggle data download, cleaning and Mysql upload.ipynb**: In this notebook we work on:
- Downloading kaggle source data. (import_kaggle_data function -> kaggle_data_functions.py)
- Giving the dataframe the appropiate format (clean_characters function -> kaggle_data_functions.py)
- Adding some columns needed like "speakerid", "sentence id" and "tokens" (sentence said tokenized, tokenizer function-> kaggle_data_functions.py).
- Import dataframe to sql political_database (political_debate_database_squema.sql)
**2 - Get and Post API requests.ipynb**: API calls:
- GET requests: get endpoints -> main.py and query functions -> sql_tools.py
- POST request: post endpoints -> main.py and query and defensive programming functions -> sql_tools.py
**3 - NLP.ipynb**: sentiment analyis:
- Stablishing conexion with the database -> configuariton.py
- Get endpoints -> main.py and query sentiment analysis -> functions sql_tools.py
API Documentation ->[Doc.md](https://github.com/maria-luisa-gomez/political_debates_sentiment_analysis/blob/main/Doc.md)
## Libraries
[flask](https://flask.palletsprojects.com/en/2.0.x/)
[NLTK](https://www.nltk.org/api/nltk.sentiment.html)
[sqlalchemy](https://www.sqlalchemy.org)
[sys](https://docs.python.org/3/library/sys.html)
[requests](https://pypi.org/project/requests/2.7.0/)
[pandas](https://pandas.pydata.org/)
[dotenv](https://pypi.org/project/python-dotenv/)
[mysql](https://www.mysql.com)
[json](https://docs.python.org/3/library/json.html)
[os](https://docs.python.org/3/library/os.html)
[operator](https://docs.python.org/3/library/operator.html)
[re](https://docs.python.org/3/library/re.html)
<file_sep>from flask import Flask, request
from flask import json
from flask.json import jsonify, load
from numpy import character
from sqlalchemy.util.langhelpers import method_is_overridden
import src.sql_tools as sql
import markdown.extensions.fenced_code
app = Flask(__name__)
@app.route('/')
def index():
readme_file = open("Doc.md", "r")
md_template = markdown.markdown(readme_file.read(), extensions = ["fenced_code"])
return md_template
# GET
@app.route("/sentences/")
def allsentence():
sentences = sql.all_sentences()
return sentences
@app.route("/sentences/<speaker>")
def speakersentence(speaker):
print(speaker)
sentences = sql.speaker_sentence(speaker)
return sentences
@app.route("/sentiment/")
def sentiment_pol():
sentences = sql.get_polarity()
return sentences
@app.route("/sentiment/<speaker>")
def speaker_sentiment():
sentences = sql.get_polarity()
return sentences
# POST
@app.route("/newsentence", methods=["POST"])
def newsentence():
data = dict(request.form.to_dict())
try:
resp = sql.insert_register(data)
print(f"respuesta sql: {resp}")
return {"error": 0, "msg": resp}
except Exception as e:
print(f"There has been an error: {e}")
return {"error": 1, "msg": e}
app.run(debug=True)<file_sep># import os
# import dotenv
import sqlalchemy as alch
# dotenv.load_dotenv()
# passw = os.getenv("pass_sql")
passw = "*****"
dbName="political_debates"
connectionData=f"mysql+pymysql://root:{passw}@localhost/{dbName}"
engine = alch.create_engine(connectionData)
<file_sep><html>
<head>
<meta charset="utf-8">
<title>Political Debates Sentimental Analysis Doc</title>
<img src="https://ichef.bbci.co.uk/news/800/cpsprodpb/139CD/production/_114933308_battlegtound_promo.png" alt="joe biden vs donald trump">
</head>
<body lang=EN-GB style='tab-interval:36.0pt;word-wrap:break-word'>
<div class=WordSection1>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:1'><b><span style='font-size:24.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:black;mso-font-kerning:18.0pt;
mso-fareast-language:EN-GB'><o:p> </o:p></span></b></p>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:1'><b><span style='font-size:24.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:black;mso-font-kerning:18.0pt;
mso-fareast-language:EN-GB'>Political Debates API<o:p></o:p></span></b></p>
<table class=MsoTableGrid border=0 cellspacing=0 cellpadding=0
style='border-collapse:collapse;border:none;mso-yfti-tbllook:1184;mso-padding-alt:
0cm 5.4pt 0cm 5.4pt;mso-border-insideh:none;mso-border-insidev:none'>
<tr style='mso-yfti-irow:0;mso-yfti-firstrow:yes;height:32.85pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:32.85pt'>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:3'><b><span style='font-size:20.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:black;mso-fareast-language:
EN-GB'>Overview<o:p></o:p></span></b></p>
</td>
</tr>
<tr style='mso-yfti-irow:1;height:32.85pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:32.85pt'>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:3'><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>The PD API allows developers to access political debates
database with information about the <span class=SpellE>politicians</span>
involved in the debate, sentence said and minute. The access is type GET and
POST</span><b><span style='font-size:20.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:black;mso-fareast-language:
EN-GB'><o:p></o:p></span></b></p>
</td>
</tr>
<tr style='mso-yfti-irow:2;height:32.85pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:32.85pt'>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:3'><b><span style='font-size:20.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:black;mso-fareast-language:
EN-GB'>Request and response formats<o:p></o:p></span></b></p>
</td>
</tr>
<tr style='mso-yfti-irow:3;height:42.5pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:42.5pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>PD API uses HTTP POST requests with JSON arguments and JSON
responses. Request authentication is not needed</span>.<o:p></o:p></span></p>
<p class=MsoNormal><span style='font-family:"Apple Symbols"'><o:p> </o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:4;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:3'><b><u><span style='font-size:18.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:black;mso-fareast-language:
EN-GB'>User Endpoints<o:p></o:p></span></u></b></p>
</td>
</tr>
<tr style='mso-yfti-irow:5;height:20.25pt'>
<td width=831 valign=top style='width:623.2pt;background:#FBE4D5;mso-background-themecolor:
accent2;mso-background-themetint:51;padding:0cm 5.4pt 0cm 5.4pt;height:20.25pt'>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:4'><b><span style='font-size:16.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:#385723;mso-themecolor:accent6;
mso-themeshade:128;mso-style-textfill-fill-color:#385723;mso-style-textfill-fill-themecolor:
accent6;mso-style-textfill-fill-alpha:100.0%;mso-style-textfill-fill-colortransforms:
lumm=50000;mso-fareast-language:EN-GB'>Content-request endpoints</span></b><b><span
style='font-size:16.0pt;font-family:"Apple Symbols";mso-fareast-font-family:
"Times New Roman";color:black;mso-fareast-language:EN-GB'><o:p></o:p></span></b></p>
</td>
</tr>
<tr style='mso-yfti-irow:6;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:#002060'>"/sentences"</span></span><span style='font-family:
"Apple Symbols"'><o:p></o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:7;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:#002060'>"/sentences/</span><speaker style="caret-color: rgb(0, 0, 0); color: rgb(0, 0, 0); font-family: -webkit-standard; font-style: normal; font-variant-caps: normal; font-weight: normal; letter-spacing: normal; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal; widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto; -webkit-text-stroke-width: 0px; text-decoration: none;"><speaker></span><span
style='font-family:"Apple Symbols";color:#002060'>"<span
class=apple-converted-space> </span><o:p></o:p></span></p>
</td>
</tr>
</speaker>
<tr style='mso-yfti-irow:8;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:#002060'>"/sentiment"</span></span><span style='font-family:
"Apple Symbols"'><o:p></o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:9;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal><span class=apple-converted-space><span style='font-size:
13.5pt;font-family:"Apple Symbols";color:#002060'> </span></span><span
style='font-size:13.5pt;font-family:"Apple Symbols";color:#002060'>"/sentiment/</span><speaker style="caret-color: rgb(0, 0, 0); color: rgb(0, 0, 0); font-family: -webkit-standard; font-style: normal; font-variant-caps: normal; font-weight: normal; letter-spacing: normal; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal; widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto; -webkit-text-stroke-width: 0px; text-decoration: none;"><speaker></span><span
style='font-family:"Apple Symbols";color:#002060'>"<span
class=apple-converted-space> <o:p></o:p></span></span></p>
</speaker>
<p class=MsoNormal><span class=apple-converted-space><span style='font-family:
"Apple Symbols";color:#002060'><o:p> </o:p></span></span></p>
<p class=MsoNormal><span style='font-family:"Apple Symbols"'><o:p> </o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:10;height:110.7pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:110.7pt'>
<p class=MsoNormal><b><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>Example of GET request in Python:<o:p></o:p></span></b></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'><o:p> </o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>speaker = "<NAME>"<o:p></o:p></span></p>
<p class=MsoNormal><span class=SpellE><span style='font-size:13.5pt;
font-family:"Apple Symbols";color:black'>url</span></span><span
style='font-size:13.5pt;font-family:"Apple Symbols";color:black'> = <span
class=SpellE>f"http</span>://127.0.0.1:5000/sentences/{speaker}"<o:p></o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>data = <span class=SpellE>requests.get</span>(<span
class=SpellE>url</span>)<o:p></o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>data <span class=GramE>json(</span>)<o:p></o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'><o:p> </o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'><o:p> </o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:11;height:21.15pt'>
<td width=831 valign=top style='width:623.2pt;background:#FBE4D5;mso-background-themecolor:
accent2;mso-background-themetint:51;padding:0cm 5.4pt 0cm 5.4pt;height:21.15pt'>
<p class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;
mso-outline-level:4'><b><span style='font-size:16.0pt;font-family:"Apple Symbols";
mso-fareast-font-family:"Times New Roman";color:#385723;mso-themecolor:accent6;
mso-themeshade:128;mso-style-textfill-fill-color:#385723;mso-style-textfill-fill-themecolor:
accent6;mso-style-textfill-fill-alpha:100.0%;mso-style-textfill-fill-colortransforms:
lumm=50000;mso-fareast-language:EN-GB'>Content-upload endpoints</span></b><b><span
style='font-size:16.0pt;font-family:"Apple Symbols";mso-fareast-font-family:
"Times New Roman";color:black;mso-fareast-language:EN-GB'><o:p></o:p></span></b></p>
</td>
</tr>
<tr style='mso-yfti-irow:12;height:21.45pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:21.45pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>These endpoints accept python dictionary content in the request
body. These are the endpoints for data upload:<o:p></o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:13;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:#002060'>“/<span class=SpellE>newsentence</span>”</span><span
style='font-size:13.5pt;font-family:"Apple Symbols";color:black'><o:p></o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:14;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>Dictionary keys should follow the following key format <span
class=SpellE>e.g</span>:</span><o:p></o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:15;height:51.75pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:51.75pt'>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>Register = <span class=GramE>{ "</span>speaker":
"<NAME>", "minute": "01:10",
"sentence": "</span><span style='font-family:"Apple Symbols"'>
</span><span style='font-size:13.5pt;font-family:"Apple Symbols";color:black'>I
use peanut butter to do my fake tan",</span></span><span
style='font-family:"Apple Symbols";color:black'><br style='caret-color: rgb(0, 0, 0);
font-variant-caps: normal;orphans: auto;text-align:start;widows: auto;
-webkit-text-size-adjust: auto;-webkit-text-stroke-width: 0px;word-spacing:
0px'>
</span><span style='font-size:13.5pt;font-family:"Apple Symbols";color:black'><span
style='caret-color: rgb(0, 0, 0);font-variant-caps: normal;orphans: auto;
text-align:start;widows: auto;-webkit-text-size-adjust: auto;-webkit-text-stroke-width: 0px;
float:none;word-spacing:0px'>"debate":
"us_election_2020_1st_presidential_debate"</span>}</span><span
style='font-family:"Apple Symbols"'><o:p></o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'><o:p> </o:p></span></p>
</td>
</tr>
<tr style='mso-yfti-irow:16;mso-yfti-lastrow:yes;height:29.95pt'>
<td width=831 valign=top style='width:623.2pt;padding:0cm 5.4pt 0cm 5.4pt;
height:29.95pt'>
<p class=MsoNormal><b><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>Example of POST request in Python:<o:p></o:p></span></b></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'><o:p> </o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>speaker = "<NAME>"<o:p></o:p></span></p>
<p class=MsoNormal><span class=SpellE><span style='font-size:13.5pt;
font-family:"Apple Symbols";color:black'>url</span></span><span
style='font-size:13.5pt;font-family:"Apple Symbols";color:black'> = <span
class=SpellE>f"http</span>://127.0.0.1:5000/sentences"<o:p></o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>data = <span class=SpellE><span class=GramE>requests.post</span></span><span
class=GramE>(</span><span class=SpellE>url</span>, data=register)<o:p></o:p></span></p>
<p class=MsoNormal><span style='font-size:13.5pt;font-family:"Apple Symbols";
color:black'>data <span class=GramE>json(</span>)<o:p></o:p></span></p>
</td>
</tr>
</table>
<p class=MsoNormal><span style='font-family:"Apple Symbols"'><o:p> </o:p></span></p>
</div>
</body>
</html>
<file_sep>from config.configuration import engine
import pandas as pd
import string
import en_core_web_sm
import spacy
from nltk.corpus import stopwords
from langdetect import detect
import re
from textblob import TextBlob
import nltk
# from googletrans import Translator
from nltk.sentiment.vader import SentimentIntensityAnalyzer
def all_sentences():
"""
Gets all data from political_debates mysql database
Args:
query from flask end point http://127.0.0.1:5000/sentences/
Returns:
json with query results
"""
query = f"""
SELECT * FROM speeches
INNER JOIN speakers ON speeches.speakers_speakerid = speakers.speakerid
"""
datos = pd.read_sql_query(query,engine)
return datos.to_json(orient="records")
def speaker_sentence(speaker):
"""
Gets all data from political_debates filtered by given speaker name
Args:
query from flask end point http://127.0.0.1:5000/sentences/<speaker>
Returns:
json with query results
"""
query = f"""
SELECT * FROM speeches
INNER JOIN speakers ON speeches.speakers_speakerid = speakers.speakerid
WHERE speaker = '{speaker}'"""
datos = pd.read_sql_query(query,engine)
return datos.to_json(orient="records")
# DEFENSIVE PROGRAMMING / ERROR HANDELING
# This is a secuence of smaller functions than handle bits of possible errors
# that will be part of a bigger function below named "insert_register"
# 1 function) validates keys allowed
def validate_keys_allowed(dict_):
"""
Checks if needed keys for posting are in the dict provided by the
api user
Args:
Read from a list of need keys and checks is they are in the dictionary
provided by the user
Returns:
True or False
"""
allowed_keys = ["speaker", "sentence", "minute", "debate"]
for keys in allowed_keys:
if keys in dict_:
return True
else:
return False
# 2 function) validates the number of keys needed
def validate_dict_len(dict_):
"""
Checks is the dictionary provided by the api user are the right number
Args:
Counts the number of keys in the dictionary and if it's different from
needed, reminds the user which ones should be use
Returns:
True or False
"""
allowed_keys = ["speaker", "sentence", "minute", "debate"]
if len(dict_) == 4:
return True
else:
print(f"Incorrect keys: These are the keys allowed and needed: {allowed_keys}")
# 3 funcition) validates keys allowed, needed and checks if key value is missing
def validate_keys(dict_):
"""
Validates keys allowed, needed and checks if key value is missing.
This function calls validate_keys_allowed and validate_dict_len functions.
Args:
validate_keys_allowed function runs, if True runs validate_dict_len,
if True checks is there're keys with missing values
Returns:
True or False
"""
if validate_keys_allowed(dict_):
if validate_dict_len(dict_):
for k,v in dict_.items():
if len(v) == 0:
print(f"The value of {k} is missing")
else:
return True
# validates keys allowed, needed, checks if key value is missing, checks if "speaker" value already exists and if not inserts it into speakers table. It also gets speakersid needed for inserting new registers into peeches table
def insert_register(dict_):
"""
This function calls validate_keys_allowed, validate_dict_len and validare_keys to
validates keys allowed, needed and checks if key value is missing. If dictionary has correct format
checks speaker's id (needed for inserting new registers) and sentence duplicity
Args:
validate_keys_allowed function runs, if True
-> runs validate_dict_len:
-> if True, checks is there're keys with missing values
-> if False it will return if there's a problem with the dictionary provided by the api user
-> checks if speaker key value is already in political_database speakers table:
if True -> sql queries for the speaker id
if False -> adds speaker value to speakers table and get new speakerid autogenerate it by sql table
-> checks id sentences value is not already the speeches table:
- if True -> Returns a message to the user flagging that is already in the database
- if False -> inserts new register to speaker and speeches sql tables
Returns:
Error messages (not valid dictionary) or successfull upload message
"""
# makes a copy of the dictionary
dict_copy = dict(dict_)
if validate_keys(dict_copy):
for k, v in dict_copy.items():
if k == "speaker":
speaker_query = list(engine.execute(f"SELECT speaker FROM speakers WHERE speaker = '{v}'"))
if len(speaker_query) > 0:
speakerid = list(engine.execute(f"SELECT speakerid FROM speakers WHERE speaker ='{v}';"))[0][0]
else:
engine.execute(f"INSERT INTO speakers (speaker) VALUES ('{v}');")
speakerid = list(engine.execute(f"SELECT speakerid FROM speakers WHERE speaker ='{v}';"))[0][0]
elif k == "sentence":
sentence_query = list(engine.execute(f"SELECT sentence FROM speeches WHERE sentence = '{v}'"))
print(dict_copy)
if len(sentence_query) > 0:
print(f"'{v}' is already in the database")
return f"'{v}' is already in the database"
else:
pass
#deletes "speaker" key and creates new key value "speakerid" needed to INSERT INTO speeches table
del dict_copy["speaker"]
dict_copy['speakerid'] = speakerid
dict_copy['tokens'] = tokenizer(dict_copy['sentence'])
query_insert = f"""
INSERT INTO speeches (minute, sentence, tokens, debate, speakers_speakerid)
VALUES ("{dict_copy['minute']}","{dict_copy['sentence']}", "{dict_copy['tokens']}","{dict_copy['debate']}", {dict_copy['speakerid']});
"""
print(query_insert)
engine.execute(query_insert)
return f"The register was uploaded successfully {dict_}"
# -----DEFENSIVE PROGRAMMING / ERROR HANDELING----END
def sentiment(sentence):
"""
This function reads strings (sentence) tokenized and gives back its polarity
Args:
Recieves a string (sentence) and applies compound score, this score
is computed by summing the valence scores of each word in the lexicon,
adjusted according to the rules, and then normalized to be between -1
(most extreme negative) and +1 (most extreme positive).
Returns:
String(sentence) polarity
"""
sia = SentimentIntensityAnalyzer()
polaridad = sia.polarity_scores(sentence)
pol = polaridad["compound"]
return pol
def get_polarity():
"""
This function gives back all the information within political_debates databates
and calculates the polarity of each sentence said
Args:
query from flask end point http://127.0.0.1:5000/sentiment/
Returns:
json with query results with new object 'polarity'
"""
df = pd.read_sql_query("""
SELECT speaker, speakerid, minute, sentence, tokens, speakers_speakerid, debate FROM speeches
INNER JOIN speakers ON speeches.speakers_speakerid = speakers.speakerid
""", engine)
df["polarity"] = df.tokens.apply(sentiment)
return df.to_json(orient="records")
def get_speaker_polarity(speaker):
"""
Gets all data from political_debates filtered by given speaker name
Args:
query from flask end point http://127.0.0.1:5000/sentiment/<speaker>
Returns:
json with query results adding calculate polarity parameter
"""
query = f"""
SELECT speaker, speakerid, minute, sentence, tokens, speakers_speakerid, debate FROM speeches
INNER JOIN speakers ON speeches.speakers_speakerid = speakers.speakerid
WHERE speaker = '{speaker}'"""
df = pd.read_sql_query(query,engine)
df["polarity"] = df.tokens.apply(sentiment)
return df.to_json(orient="records")
|
7e4a97c0415d1508bdb6667ae518aa5a892bfefc
|
[
"Markdown",
"SQL",
"Python"
] | 7 |
Python
|
maria-luisa-gomez/political_debates_sentiment_analysis
|
c5622e9c05a4efe22e18ff1d47e120e1d6fd0cb4
|
21b821a8160d78e4822603fb893374de696cd222
|
refs/heads/master
|
<file_sep>using Android.App;
using Android.Widget;
using Android.OS;
using Android.Views;
using Com.Cooltechworks.Creditcarddesign;
using System;
using Android.Content;
namespace App
{
[Activity(Label = "App", MainLauncher = true, Icon = "@mipmap/icon")]
public class MainActivity : Activity
{
private int CREATE_NEW_CARD = 0;
private LinearLayout cardContainer;
private Button addCardButton;
protected override void OnCreate(Bundle savedInstanceState)
{
base.OnCreate(savedInstanceState);
SetContentView(Resource.Layout.Main);
Init();
SubscribeUi();
}
void Init()
{
addCardButton = FindViewById<Button>(Resource.Id.add_card);
cardContainer = FindViewById<LinearLayout>(Resource.Id.card_container);
populate();
}
private void populate()
{
var sampleCreditCardView = new CreditCardView(this);
var name = "<NAME>";
var cvv = "420";
var expiry = "01/18";
var cardNumber = "4242424242424242";
sampleCreditCardView.CVV = cvv;
sampleCreditCardView.CardHolderName = name;
sampleCreditCardView.SetCardExpiry(expiry);
sampleCreditCardView.CardNumber = cardNumber;
cardContainer.AddView(sampleCreditCardView);
int index = cardContainer.ChildCount - 1;
addCardListener(index, sampleCreditCardView);
}
public class ClickListener : Java.Lang.Object, View.IOnClickListener
{
public Action<View> Click { get; set; }
public void OnClick(View v) => Click?.Invoke(v);
}
private void SubscribeUi()
{
addCardButton.SetOnClickListener(new ClickListener
{
Click = v =>
{
Intent intent = new Intent(this, typeof(CardEditActivity));
StartActivityForResult(intent, CREATE_NEW_CARD);
}
});
}
private void addCardListener(int index, CreditCardView creditCardView)
{
creditCardView.SetOnClickListener(new ClickListener
{
Click = v =>
{
var cv = v as CreditCardView;
String cardNumber = cv.CardNumber;
String expiry = cv.Expiry;
String cardHolderName = cv.CardHolderName;
String cvv = cv.CVV;
Intent intent = new Intent(this, typeof(CardEditActivity));
intent.PutExtra(CreditCardUtils.ExtraCardHolderName, cardHolderName);
intent.PutExtra(CreditCardUtils.ExtraCardNumber, cardNumber);
intent.PutExtra(CreditCardUtils.ExtraCardExpiry, expiry);
intent.PutExtra(CreditCardUtils.ExtraCardShowCardSide, CreditCardUtils.CardSideBack);
intent.PutExtra(CreditCardUtils.ExtraValidateExpiryDate, false);
// start at the CVV activity to edit it as it is not being passed
intent.PutExtra(CreditCardUtils.ExtraEntryStartPage, CreditCardUtils.CardCvvPage);
StartActivityForResult(intent, index);
}
});
}
protected override void OnActivityResult(int requestCode, Result resultCode, Intent data)
{
if (resultCode == Result.Ok)
{
// Debug.printToast("Result Code is OK", getApplicationContext());
String name = data.GetStringExtra(CreditCardUtils.ExtraCardHolderName);
String cardNumber = data.GetStringExtra(CreditCardUtils.ExtraCardNumber);
String expiry = data.GetStringExtra(CreditCardUtils.ExtraCardExpiry);
String cvv = data.GetStringExtra(CreditCardUtils.ExtraCardCvv);
if (requestCode == CREATE_NEW_CARD)
{
CreditCardView creditCardView = new CreditCardView(this)
{
CVV = cvv,
CardHolderName = name,
CardNumber = cardNumber
};
creditCardView.SetCardExpiry(expiry);
cardContainer.AddView(creditCardView);
int index = cardContainer.ChildCount - 1;
addCardListener(index, creditCardView);
}
else
{
CreditCardView creditCardView = cardContainer.GetChildAt(requestCode) as CreditCardView;
creditCardView.SetCardExpiry(expiry);
creditCardView.CardNumber = cardNumber;
creditCardView.CardHolderName = name;
creditCardView.CVV = cvv;
}
}
}
}
}<file_sep>
# Xamarin.Android.CreditCardView [](https://app.bitrise.io/app/89ff40c0afcad133) [](https://www.nuget.org/packages/Xamarin.Android.CreditCardView/)
CreditCard layout and validation to enter credit card data easily with fancy animations.

Port of https://github.com/kibotu/CreditCardView to Xamarin.
## How to use
Have a look at [README.md](https://github.com/kibotu/CreditCardView/blob/master/README.md)
## How to install
### Android
Add [Xamarin.Android.Indefinite.Pager.Indicator](https://www.nuget.org/packages/Xamarin.Android.CreditCardView)
PM> Install-Package Xamarin.Android.Indefinite.Pager.CreditCardView -Version 1.0.4
## Contributors
[<NAME>](<EMAIL>)
|
f6d75cda9b8e4aaf8ea8601ae25b379182561856
|
[
"Markdown",
"C#"
] | 2 |
C#
|
kibotu/Xamarin.Android.CreditCardView
|
873c55a5a7c589884e115149bf9561a3f1f76748
|
10aa95b6c5c77238278bc1200a36adbca250fa81
|
refs/heads/master
|
<file_sep>package com.alibaba.csp.sentinel.dashboard.rule.zookeeper;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* @author rodbate
* @since 2019/04/20 14:58
*/
@Getter
@Setter
@ConfigurationProperties(prefix = "zookeeper.config")
public class ZookeeperConfigProperties {
private String connectString;
private int sessionTimeout;
private int connectionTimeout;
}
|
208f6b3effb733f847f94d2234fb8de30a6e259d
|
[
"Java"
] | 1 |
Java
|
yvanme/Sentinel
|
3c63c94188521079df59fdf4fc11c4ae840d7ff3
|
3cac1cb9e6222e8c6421947b67be059bac3031a8
|
refs/heads/master
|
<repo_name>thelittlefireman/SPP_TP3_4<file_sep>/src/Exercice2/Bob.java
package Exercice2;
import java.util.concurrent.Exchanger;
/**
* Created by Thomas on 29/03/2016.
*/
public class Bob extends AliceBobBaseRunnable {
public Bob(Exchanger<String> ex,String mainString){
super(ex,mainString);
name = "bob ";
}
}
<file_sep>/src/Exercice2/mainEx2.java
package Exercice2;
import java.util.concurrent.Exchanger;
/**
* Created by Thomas on 29/03/2016.
*/
public class mainEx2 {
static Exchanger<String> exchanger;
public static void main(String[] args) {
//On initialise nos variables
//Alice commence pas ping
//Bob par pong
exchanger = new Exchanger<String>();
Thread t1 = new Thread(new Alice(exchanger,"ping"));
Thread t2 = new Thread(new Bob(exchanger,"pong"));
//On Execute les deux threads
t1.start();
t2.start();
}
}
<file_sep>/src/jUnitTest/SemaphoreJUnitTest.java
package jUnitTest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.Collection;
import java.util.Vector;
import main.SemaphoreImplClass;
import org.junit.Before;
import org.junit.Test;
import interfaces.SemaphoreInterface;
public class SemaphoreJUnitTest {
private Class mySemImpl;
@Before
public void obtainSemaphoreImplementation() throws ClassNotFoundException {
mySemImpl = SemaphoreImplClass.class;
}
private SemaphoreInterface createSemaphore()
throws InstantiationException, IllegalAccessException, ClassNotFoundException {
return (SemaphoreInterface) mySemImpl.newInstance();
}
protected void upThenDown(SemaphoreInterface sem, int count) {
for (int i = 1; i <= count; i++) {
for (int k = 0; k < i; k++)
sem.up();
for (int k = 0; k < i; k++)
sem.down();
} // EndFor i
} // EndMethod upThenDown
@Test(timeout = 20000)
public void testUpThenDownShouldNotBlockSingleThread() throws Exception {
SemaphoreInterface sem = createSemaphore();
upThenDown(sem, 10000);
} // EndMethod testUpThenDownShouldNotBlockSingleThread
@Test(timeout = 20000)
public void testUpThenDownShouldNotBlockMultipleThreads() throws Exception {
SemaphoreInterface sem = createSemaphore();
Collection<Thread> allThreads = new Vector<Thread>();
for (int i = 1; i <= 40; i++) {
Thread t = new UpThenDownThread(sem, this);
t.start();
allThreads.add(t);
}
for (Thread t : allThreads)
t.join();
} // EndMethod testUpThenDownShouldNotBlockMultipleThreads
@Test
public void testThatDownDoesBlock() throws Exception {
SemaphoreInterface sem = createSemaphore();
Thread t = new DowningThread(sem, this);
t.start();
Thread.sleep(1000); // 1s = very long in terms of CPU time
assertTrue(t.isAlive());
sem.up();
t.join(1000);
assertFalse(t.isAlive()); // t should now have finished
} // EndMethod testThatDownDoesBlock
private int countAliveThreads(Collection<Thread> allThreads) {
int res = 0;
for (Thread t : allThreads)
if (t.isAlive())
res++;
return res;
} // EndMethod countAliveThreads
@Test(timeout = 20000)
public void testThatUpUnblocksBlockedThreads() throws Exception {
SemaphoreInterface sem = createSemaphore();
Collection<Thread> allThreads = new Vector<Thread>();
for (int i = 0; i < 10; i++) {
Thread t = new DowningThread(sem, this);
t.start();
allThreads.add(t);
}
Thread.sleep(1000); // leaving some time for threads to start and block
assertEquals(10, countAliveThreads(allThreads));
for (int i = 9; i >= 0; i--) {
sem.up(); // should unblock one thread
do {
Thread.sleep(10); // Busy waiting
}
while (countAliveThreads(allThreads) != i);
} // EndFor i
assertEquals(0, countAliveThreads(allThreads));
} // EndMethod testThatUpUnblocksBlockedThreads
@Test(timeout = 20000)
public void testThatUpUnblocksBlockedThreadsWithUpperThreads() throws Exception {
SemaphoreInterface sem = createSemaphore();
Collection<Thread> allThreads = new Vector<Thread>();
for (int i = 0; i < 100; i++) {
Thread t = new DowningThread(sem, this);
t.start();
allThreads.add(t);
}
Thread.sleep(1000); // leaving some time for threads to start and block
for (int i = 0; i < 100; i++) {
Thread t = new UppingThread(sem, this);
t.start();
allThreads.add(t);
}
// all threads should finish, including blocked ones
for (Thread t : allThreads)
t.join();
} // EndMethod testThatUpUnblocksBlockedThreadsWithUpperThreads
@Test(timeout = 20000)
public void testThatReleaseAllWorksWithNoThreadWaiting() throws Exception {
SemaphoreInterface sem = createSemaphore();
// testing that release all and up do now interfere
for (int i = 0; i < 100; i++) {
assertEquals(0, sem.releaseAll());
sem.up();
} // EndFor
} // EndMethod testThatReleaseAllWorksWithNoThreadWaiting
@Test(timeout = 20000)
public void testThatReleaseAllWorksWithThreadsWaiting() throws Exception {
SemaphoreInterface sem = createSemaphore();
for (int nbWaitingThread = 0; nbWaitingThread < 100; nbWaitingThread++) {
// launching the downing threads
Collection<Thread> allThreads = new Vector<Thread>();
for (int i = 0; i < nbWaitingThread; i++) {
Thread t = new DowningThread(sem, this);
t.start();
allThreads.add(t);
} // EndFor i
// releasing blocked threads. We use a while loop as we don't
// know how long the downing threads will take to initialise.
int totalReleased = 0;
while (totalReleased < nbWaitingThread) {
totalReleased += sem.releaseAll();
Thread.yield();
} // EndWhile
// all threads on the semaphore should now be released
assertEquals(0, sem.releaseAll());
// all threads should have finish
for (Thread t : allThreads)
t.join();
} // EndFor
} // EndMethod testThatReleaseAllWorksWithThreadsWaiting
@Test(timeout = 20000)
public void testStressWorkloadWithAllConcurrent() throws Exception {
SemaphoreInterface sem = createSemaphore();
Collection<Thread> allThreads = new Vector<Thread>();
// we first create a number of threads blocked on a down operation.
for (int i = 0; i < 200; i++) {
Thread t = new DowningThread(sem, this);
t.start();
allThreads.add(t);
} // EndFor i
// we then create 40 threads doing up and down
for (int i = 1; i <= 40; i++) {
Thread t = new UpThenDownThread(createSemaphore(), this);
t.start();
allThreads.add(t);
}
// releaseAll should unblock 200 threads in total
int totalReleased = 0;
while (totalReleased != 200) {
totalReleased += sem.releaseAll();
Thread.yield();
} // EndWhile
// all threads on the semaphore should now be released
assertEquals(0, sem.releaseAll());
// all threads should have finished
for (Thread t : allThreads)
t.join();
} // EndMethod testStressWorkloadWithAllConcurrent
} // EndClass SemaphoreJUnitTest
class TestingThread extends Thread {
protected SemaphoreInterface mySemaphore;
protected SemaphoreJUnitTest myTestCase;
public TestingThread(SemaphoreInterface aSemaphore, SemaphoreJUnitTest aTestCase) {
mySemaphore = aSemaphore;
myTestCase = aTestCase;
}
} // EndClass TestingThread
class UpThenDownThread extends TestingThread {
public UpThenDownThread(SemaphoreInterface aSemaphore, SemaphoreJUnitTest aTestCase) {
super(aSemaphore, aTestCase);
}
public void run() {
myTestCase.upThenDown(mySemaphore, 2000);
} // EndMethod run
} // EndClass UpThenDownThread
class DowningThread extends TestingThread {
public DowningThread(SemaphoreInterface aSemaphore, SemaphoreJUnitTest aTestCase) {
super(aSemaphore, aTestCase);
}
public void run() {
mySemaphore.down();
} // EndMethod run
} // EndClass DowningThread
class UppingThread extends TestingThread {
public UppingThread(SemaphoreInterface aSemaphore, SemaphoreJUnitTest aTestCase) {
super(aSemaphore, aTestCase);
}
public void run() {
mySemaphore.up();
} // EndMethod run
} // EndClass UppingThread
|
51ed4d0721293933af95ad951e8728b99cdcdf15
|
[
"Java"
] | 3 |
Java
|
thelittlefireman/SPP_TP3_4
|
72783e3d011acc1242033f287742d059a412d0d7
|
bc7877ec1c5be967774578175053fa9f5be5dfe1
|
refs/heads/master
|
<file_sep>function formatCurrency(type, curr, value) {
const formatter = new Intl.NumberFormat(type, {
currency: curr,
style: "currency"
});
return formatter.format(value);
}
function convertCurrency() {
let amount = amountInput.value;
let inp = inputCurrency.value;
let out = outputCurrency.value;
const currencyTable = {"usd": 1, "vnd": 23329.52, "eur": 0.92, "krw": 1224.22, "cny": 7.1}
const currencyFormat = {"usd": "en-IN", "vnd": "vi", "eur": "eu", "krw": "kr", "cny": "zh-cn"}
let result = (amount * (currencyTable[out] / currencyTable[inp])).toFixed(2);
document.getElementById("result").innerHTML = `${formatCurrency(currencyFormat[inp], inp, amount)} = ${formatCurrency(currencyFormat[out], out, result)}`;
}
function swap() {
var inp = document.getElementById("inputCurrency").value;
var out = document.getElementById("outputCurrency").value;
document.getElementById("inputCurrency").value = out;
document.getElementById("outputCurrency").value = inp;
}
let amountInput = document.getElementById("amount");
let inputCurrency = document.getElementById("inputCurrency");
let outputCurrency = document.getElementById("outputCurrency");
let convertButton = document.getElementById('convertButton');
convertButton.addEventListener("click",convertCurrency);
<file_sep>const exchangeRateVNDandUSD = 23208;
const exchangeRateVNDandEUR = 25330.88;
const exchangeRateUSDandEUR = 0.92;
function vndToUsd(amountVnd) {
return ((amountVnd / exchangeRateVNDandUSD).toFixed(2))
}
function vndToEur(amountVnd) {
return (amountVnd / exchangeRateVNDandEUR)
}
function usdToVnd(amountUsd) {
return (amountUsd * exchangeRateVNDandUSD)
}
function usdToEur(amountUsd) {
return (amountUsd * exchangeRateUSDandEUR)
}
function eurToVnd(amountEur) {
return (amountEur * exchangeRateVNDandEUR)
}
function eurToUsd(amountEur) {
return (amountEur * exchangeRateUSDandEUR)
}
function formatUSD(type, value) {
const formatter = new Intl.NumberFormat(type, {
currency: 'USD',
style: "currency"
});
return formatter.format(value);
}
function formatVND(type, value) {
const formatter = new Intl.NumberFormat(type, {
currency: 'VND',
style: "currency"
});
return formatter.format(value);
}
function formatEUR(type, value) {
const formatter = new Intl.NumberFormat(type, {
currency: 'EUR',
style: "currency"
});
return formatter.format(value);
}
let currency = prompt("Choose your input currency VND, USD, or EUR?");
//== is comparing type && value, === is comparing value
//NaN != NaN: this is true
if (currency === "VND") {
let amountVND = prompt("How much VND?");
let check_number = parseInt(amountVND);
if (Object.is(check_number, NaN)) {
console.log("Please input a number!");
} else {
let currency = prompt("Choose your output currency USD or EUR?");
if (currency === "USD") {
let res_vndToUsd = vndToUsd(check_number);
console.log("From VND to USD:", formatUSD('en-IN',res_vndToUsd));
}
else if (currency === "EUR") {
let res_vndToEur = vndToEur(check_number);
console.log("From VND to EUR:", formatEUR('eu', res_vndToEur));
}
}
} else if (currency === "USD") {
let amountUSD = prompt("How much USD?");
let check_number = parseInt(amountUSD);
if (Object.is(check_number, NaN)) {
console.log("Please input a number!");
} else {
let currency = prompt("Choose your output currency VND or EUR?");
if (currency === "VND") {
let res_usdToVnd = usdToVnd(check_number);
console.log("From USD to VND:", formatVND('vi',res_usdToVnd));
}
else if (currency === "EUR") {
let res_usdToEur = usdToEur(check_number);
console.log("From USD to EUR:", formatEUR('eu', res_usdToEur));
}
}
} else if (currency === "EUR") {
let amountEUR = prompt("How much EUR?");
let check_number = parseInt(amountEUR);
if (Object.is(check_number, NaN)) {
console.log("Please input a number!");
} else {
let currency = prompt("Choose your output currency VND or USD?");
if (currency === "VND") {
let res_eurToVnd = eurToVnd(check_number);
console.log("From EUR to VND:", formatVND('vi',res_eurToVnd));
}
else if (currency === "USD") {
let res_eurToUsd = eurToUsd(check_number);
console.log("From EUR to USD:", formatEUR('eu', res_eurToUsd));
}
}
}
|
a86dd3245b36061cdd8932da72292ffb6a26dee8
|
[
"JavaScript"
] | 2 |
JavaScript
|
fool1280/Currency-Convert
|
d06628918ac5ed61e130d6227bf78a84ffe8bbda
|
e5fe2b189d6216b493e20846971d4d2b28e9ef16
|
refs/heads/master
|
<repo_name>Latitude-Cartagene/luminance-js<file_sep>/README.md
# luminance-js
Detect luminance of an HEX color
```
luminance("#fff") # 1
luminance("#000") # 0<file_sep>/luminance.js
// http://www.w3.org/TR/2008/REC-WCAG20-20081211/#relativeluminancedef
export const luminance = (color = "ffffff") => {
let RsRGB, GsRGB, BsRGB, R, G, B
if (color.length === 0) {
color = "ffffff"
}
// Validate hex color
color = String(color).replace(/[^0-9a-f]/gi, "")
const valid = new RegExp(/^(?:[0-9a-f]{3}){1,2}$/i).test(color)
if (valid) {
if (color.length < 6) {
color = color[0] + color[0] + color[1] + color[1] + color[2] + color[2]
}
} else {
throw new Error("Invalid HEX color !")
}
// Convert color to RGB
const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(color)
const rgb = {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
}
RsRGB = rgb.r / 255
GsRGB = rgb.g / 255
BsRGB = rgb.b / 255
R = RsRGB <= 0.03928 ? RsRGB / 12.92 : Math.pow(((RsRGB + 0.055) / 1.055), 2.4)
G = GsRGB <= 0.03928 ? GsRGB / 12.92 : Math.pow(((GsRGB + 0.055) / 1.055), 2.4)
B = BsRGB <= 0.03928 ? BsRGB / 12.92 : Math.pow(((BsRGB + 0.055) / 1.055), 2.4)
return (0.2126 * R) + (0.7152 * G) + (0.0722 * B)
}
|
a14869fd746c3ee45df01ec13642c3f1c7781a67
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
Latitude-Cartagene/luminance-js
|
a1563ef49e085afc8953508f1e680d4c1a2b68f1
|
f7aecfd115f3e77bca7cbb7c91c18c589061b5c2
|
refs/heads/master
|
<file_sep><?php
$args = array('category_name' => 'socials');
$catposts = get_posts($args);
foreach($catposts as $post) {
setup_postdata($post);
?>
<li>
<a href="<?php echo get_post_meta($post->ID, 'url', true); ?>" target="_blank">
<i class="fa fa-<?php echo get_post_meta($post->ID, 'name', true); ?>"></i>
</a>
</li>
<?php
}
wp_reset_postdata();
?>
<file_sep><div class="section-header">
<div class="container">
<div class="row">
<div class="col-md-12">
<?php include 'fotorama.php'; ?>
</div>
</div>
</div>
<h2>Почему мы?</h2>
</div>
<div class="container">
<div class="row">
<div class="col-xs-6 col-md-3">
<div id="telemarketer-icon" class="feature-icon">
</div>
<div class="feature-text">
Всегда на связи
</div>
</div>
<div class="col-xs-6 col-md-3">
<div id="time-icon" class="feature-icon">
</div>
<div class="feature-text">
Работаем 24 часа
</div>
</div>
<div class="col-xs-6 col-md-3">
<div id="delivery-icon" class="feature-icon">
</div>
<div class="feature-text">
Быстрая доставка
</div>
</div>
<div class="col-xs-6 col-md-3">
<div id="wallet-icon" class="feature-icon">
</div>
<div class="feature-text">
Удобная оплата
</div>
</div>
</div>
</div>
<file_sep><div class="section-header">
<h2><?php echo get_category_by_slug('about')->name; ?></h2>
<p><?php echo get_category_by_slug('about')->description; ?></p>
</div>
<div class="section-content">
<div class="container">
<div class="row">
<div class="col-md-12 justified wow slideInLeft">
<?php
$args = array('category_name' => 'about');
$catposts = get_posts($args);
foreach($catposts as $post) {
setup_postdata($post);
?>
<h3><?php the_title(); ?></h3>
<p><?php the_content(); ?></p>
<?php
}
wp_reset_postdata();
?>
</div>
</div>
</div>
</div>
<file_sep><?php
/**
* Landing theme functions and definitions.
*/
require_once 'custom-options.php';
/**
* Настройка темы и регистрация различных WordPress фич.
*/
function favorite_flowers_setup() {
/* Уберем лишние включения, которые автоматом добавляет WP */
remove_action('wp_head', 'rsd_link');
remove_action('wp_head', 'wlwmanifest_link');
if ( ! current_user_can( 'manage_options' ) ) {
show_admin_bar( false );
}
/* Добавим возможность прикреплять изображения к постам */
add_theme_support('post-thumbnails');
/* Добавим настройку титульного изображения */
$header_info = array(
'default-image' => get_template_directory_uri() . '/img/header-background.jpg'
);
/* Добавим возможность настройки кастомного изображения в заголовке */
add_theme_support('custom-header', $header_info);
$header_images = array(
'1' => array(
'url' => get_template_directory_uri() . '/img/bg2.jpg',
'thumbnail_url' => get_template_directory_uri() . '/img/bg2x320.jpg'
),
'2' => array(
'url' => get_template_directory_uri() . '/img/bg3.jpg',
'thumbnail_url' => get_template_directory_uri() . '/img/bg3x320.jpg'
),
'3' => array(
'url' => get_template_directory_uri() . '/img/bg4.jpg',
'thumbnail_url' => get_template_directory_uri() . '/img/bg4x320.jpg'
),
'4' => array(
'url' => get_template_directory_uri() . '/img/bg5.jpg',
'thumbnail_url' => get_template_directory_uri() . '/img/bg5x320.jpg'
),
'5' => array(
'url' => get_template_directory_uri() . '/img/bg6.jpg',
'thumbnail_url' => get_template_directory_uri() . '/img/bg6x320.jpg'
),
'6' => array(
'url' => get_template_directory_uri() . '/img/bg7.jpg',
'thumbnail_url' => get_template_directory_uri() . '/img/bg7x320.jpg'
),
);
register_default_headers($header_images);
/* Зарегистрируем свой размер изображений, который будет использоваться при отображении ассортимента */
add_image_size( 'assort-thumbnail', 750, 750, TRUE );
}
add_action( 'after_setup_theme', 'favorite_flowers_setup' );
/**
* Регистрация виджетов
*/
function favorite_flowers_widgets_init() {
register_sidebar( array(
'name' => __('Логотип', 'landing'),
'id' => 'logo',
'description' => __( 'Вставьте сюда текстовый виджет и перенесите в него код из SVG-файла', 'landing' ),
'before_widget' => '',
'after_widget' => '',
'before_title' => '<span class="hidden">',
'after_title' => '</span>'
));
}
add_action('widgets_init', 'favorite_flowers_widgets_init');
/**
* Скроем ненужные настройки темы
*/
function favorite_flowers_customize_register($wp_customize) {
$wp_customize->remove_section('colors');
$wp_customize->remove_section('static_front_page');
$wp_customize->remove_section('nav_menus');
}
add_action('customize_register', 'favorite_flowers_customize_register');
/**
* Загружаем стили и скрипты.
*/
function favorite_flowers_scripts() {
wp_enqueue_style( 'bootstrap-grid', get_template_directory_uri() . '/libs/bootstrap/bootstrap-grid.min.css');
wp_enqueue_style( 'animate', get_template_directory_uri() . '/libs/animate/animate.min.css');
wp_enqueue_style( 'font-awesome', get_template_directory_uri() . '/libs/font-awesome-4.7.0/css/font-awesome.min.css');
wp_enqueue_style( 'fotorama', get_template_directory_uri() . '/libs/fotorama-4.6.4/fotorama.css');
wp_enqueue_style( 'fonts', get_template_directory_uri() . '/css/fonts.css');
wp_enqueue_style( 'main', get_template_directory_uri() . '/css/main.css');
wp_enqueue_style( 'mymedia', get_template_directory_uri() . '/css/media.css');
wp_enqueue_script('s-circletype-jquery', get_template_directory_uri() . '/libs/CircleType/vendor/jquery-1.9.1.min.js');
wp_enqueue_script('s-circletype-modernizr', get_template_directory_uri() . '/libs/CircleType/vendor/modernizr-2.6.2-respond-1.1.0.min.js');
wp_enqueue_script('s-circletype-plugins', get_template_directory_uri() . '/libs/CircleType/plugins.js');
wp_enqueue_script('s-circletype', get_template_directory_uri() . '/libs/CircleType/circletype.js');
wp_enqueue_script('s-wow', get_template_directory_uri() . '/libs/wow/wow.min.js');
wp_enqueue_script('s-fotorama', get_template_directory_uri() . '/libs/fotorama-4.6.4/fotorama.js');
wp_enqueue_script('s-PageScroll2id', get_template_directory_uri() . '/libs/scroll2id/PageScroll2id.min.js');
wp_enqueue_script('s-jqBootstrapValidation', get_template_directory_uri() . '/libs/jqBootstrapValidation/jqBootstrapValidation.js');
wp_enqueue_script('s-common', get_template_directory_uri() . '/js/common.js');
}
add_action( 'wp_enqueue_scripts', 'favorite_flowers_scripts' );
<file_sep><div class="section-header">
<h2><?php echo get_category_by_slug('flowers')->name; ?></h2>
</div>
<div class="section-content">
<div class="container">
<div class="row">
<?php
$args = array('category_name' => 'flowers', 'numberposts' => -1);
$catposts = get_posts($args);
foreach($catposts as $post) {
setup_postdata($post);
?>
<div class="col-xs-12 col-sm-6 col-md-3">
<div class="bouquet center-block">
<div class="bouquet-image">
<?php echo the_post_thumbnail('',array('class' => '')); ?>
</div>
<div class="bouquet-name">
<?php the_title(); ?>
</div>
<div class="bouquet-description">
<?php the_content(); ?>
</div>
<div class="bouquet-price">
<?php echo get_post_meta($post->ID, 'price', true); ?>
</div>
</div>
</div>
<?php
}
wp_reset_postdata();
?>
</div>
</div>
</div>
<file_sep><?php
add_action('admin_init', function(){
add_settings_field(
'phone_number', // id
'Номер телефона', // title
'phone_number_display', // callback
'general' // where to show this option
);
register_setting(
'general',
'phone_number'
);
function phone_number_display(){
echo '<input type="text" name="phone_number" id="phone_number" value="' . get_option('phone_number') . '">';
}
});<file_sep><div class="section-header">
<h2><?php echo get_category_by_slug('questions')->name; ?></h2>
</div>
<div class="section-content">
<div class="container">
<div class="row">
<div class="col-md-12">
<?php
$args = array('category_name' => 'questions','numberposts' => 1);
$catposts = get_posts($args);
foreach($catposts as $post) {
setup_postdata($post);
?>
<span><?php the_content(); ?></span>
<div class="city">
<?php echo get_post_meta($post->ID, 'city', true); ?>
</div>
<div class="phone">
<?php echo get_post_meta($post->ID, 'phone', true); ?>
</div>
<a href="#" class="accented-btn wow flipInX" id="show-call-form">
<?php echo get_post_meta($post->ID, 'button-text', true); ?>
</a>
<?php
}
wp_reset_postdata();
?>
</div>
</div>
</div>
</div>
<file_sep><div class="section-header">
<h2><?php echo get_category_by_slug('why-us')->name; ?></h2>
</div>
<div class="section-content">
<div class="container">
<div class="row">
<?php
$args = array('category_name' => 'why-us','numberposts' => 3);
$catposts = get_posts($args);
foreach($catposts as $post) {
setup_postdata($post);
?>
<div class="col-sm-4">
<div class="why-us-block">
<div class="why-us-img wow flipInY">
<?php echo the_post_thumbnail('full',array('class' => 'img-circle img-responsive')); ?>
</div>
<h3><?php the_title(); ?></h3>
<span><?php the_content(); ?></span>
</div>
</div>
<?php
}
wp_reset_postdata();
?>
</div>
</div>
</div>
<file_sep><footer class="main-footer">
<div class="container">
<div class="row">
<div class="col-md-4">
<div class="footer-block">
<ul class="v-centered">
<li>© 2017 Favorite Flowers</li>
<li>Доставка цветов по Москве и области</li>
<li><?php echo get_option('phone_number')?></li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="footer-block">
<ul class="socials-list v-centered">
<?php include 'helpers/socials.php'; ?>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="footer-block">
<div class="v-centered">
Все права защищены
</div>
</div>
</div>
</div>
</div>
</footer>
<?php wp_footer();?>
</body>
</html>
<file_sep><div class="fotorama-wrapper">
<div class="fotorama" data-maxheight="400px" data-loop="true" data-autoplay="true" data-width="100%">
<?php
$args = array('category_name' => 'fotorama');
$catposts = get_posts($args);
foreach($catposts as $post) {
setup_postdata($post);
echo the_post_thumbnail('',array('class' => ''));
}
wp_reset_postdata();
?>
</div>
</div>
<file_sep><?php get_header(); ?>
<section class="wow fadeIn">
<?php include 'helpers/icons.php'; ?>
</section>
<!-- О Нас -->
<section id="about" class="section-shadow wow fadeIn">
<?php include 'helpers/about.php'; ?>
</section>
<!-- Цветы -->
<section id="flowers" class=" wow fadeIn">
<?php include 'helpers/flowers.php'; ?>
</section>
<!-- Доставка и оплата -->
<section id="delivery" class="section-shadow wow fadeIn">
<?php include 'helpers/delivery.php'; ?>
</section>
<!-- Почему мы? -->
<section id="why-us" class=" wow fadeIn">
<?php include 'helpers/why-us.php'; ?>
</section>
<!-- Остались вопросы? -->
<section id="questions" class="section-shadow wow fadeIn">
<?php include 'helpers/questions.php' ?>
</section>
<?php get_footer(); ?>
|
61631b1bbea6fde09f516063b8d7718e4387e3a8
|
[
"PHP"
] | 11 |
PHP
|
Tordan/favorite-flowers
|
6e5603516e9b928a854041036071c6a1228e5231
|
f57b660e1597e339abdaacb87d320a18c460db16
|
refs/heads/master
|
<file_sep>using System.Threading.Tasks;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.Suppliers
{
public interface ISupplierRepository:IGenericRepository<Supplier>
{
Task<SupplierComboDto[]> GetCombosAsync();
}
}<file_sep>using Microsoft.Extensions.DependencyInjection;
namespace Seyit.Core
{
public interface IContainerInstaller
{
void Install(IServiceCollection services);
}
}<file_sep>using System.Threading;
using System.Threading.Tasks;
using MediatR;
using Seyit.Core;
using Seyit.Data;
using Seyit.Data.Airways;
namespace Seyit.Business.Airways.Query
{
public class GetAirwayCombosQueryHandler : IRequestHandler<GetAirwayCombosQuery, AirwayComboDto[]>
{
private readonly IAirwayRepository _airwayRepository;
public GetAirwayCombosQueryHandler(IAirwayRepository airwayRepository)
{
_airwayRepository = airwayRepository;
}
public Task<AirwayComboDto[]> Handle(GetAirwayCombosQuery request, CancellationToken cancellationToken)
{
return _airwayRepository.GetCombosAsync();
}
}
}<file_sep>using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.Airways
{
public class AirwayRepository : GenericRepository<Airway>, IAirwayRepository
{
public AirwayRepository(SeyitDbContext dbContext) : base(dbContext)
{
}
public async Task<AirwayComboDto[]> GetCombosAsync()
{
return await Table.Where(x => x.Status).Select(AirwayComboDto.Projection).ToArrayAsync();
}
}
}<file_sep>using FluentValidation;
using Seyit.Business.Airways.Command;
namespace Seyit.Business.Airways.Validator
{
public class CreateAirwayCommandValidator:AbstractValidator<CreateAirwayCommand>
{
public CreateAirwayCommandValidator()
{
RuleFor(x => x.AirWayName).NotEmpty();
}
}
}<file_sep>using System.Reflection;
using FluentValidation;
using MediatR;
using Microsoft.Extensions.DependencyInjection;
using Seyit.Core;
using Seyit.Data.Infrastructure;
namespace Seyit.Business.Infrastructure
{
public class BusinessLayerInstaller:IContainerInstaller
{
public void Install(IServiceCollection services)
{
new DataLayerInstaller().Install(services);
services.AddScoped<IDateTimeProvider, DateTimeProvider>();
services.AddTransient(typeof(IPipelineBehavior<,>), typeof(ValidationBehavior<,>));
services.AddMediatR(Assembly.GetExecutingAssembly());
services.AddValidatorsFromAssembly(Assembly.GetExecutingAssembly());
}
}
}<file_sep>using System.Threading.Tasks;
namespace Seyit.Data.Infrastructure
{
public interface IGenericRepository<T> where T :class
{
ValueTask<T> GetByIdAsync(object id);
void Insert(T obj);
void Update(T obj);
void Delete(object id);
}
}<file_sep>using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace Seyit.Data.SystemAccounts
{
public class SystemAccountConfiguration:IEntityTypeConfiguration<SystemAccount>
{
public void Configure(EntityTypeBuilder<SystemAccount> builder)
{
builder.HasKey(x => x.AccountId);
builder.Property(x => x.AccountCode);
builder.Property(x => x.AccountName);
builder.Property(x => x.AccountPath);
builder.Property(x => x.CurrencyId);
builder.Property(x => x.CreationDate);
builder.Property(x => x.ParentAccountId);
builder.Property(x => x.RegionLetter);
//.HasMaxLength().HasColumnType();
}
}
}<file_sep>using System;
using System.Threading.Tasks;
using MediatR;
using Microsoft.AspNetCore.Mvc;
using Seyit.Business.Airways.Command;
using Seyit.Business.Suppliers.Command;
using Seyit.Business.Suppliers.Query;
using Seyit.Data.Suppliers;
namespace Seyit.Web.Controllers
{
[ApiController]
[Route("[controller]")]
public class SuppliersController : ControllerBase
{
private readonly IMediator _mediator;
public SuppliersController(IMediator mediator)
{
_mediator = mediator;
}
[HttpGet]
public async Task<IActionResult> GetCombos()
{
return Ok(await _mediator.Send(new GetSupplierCombosQuery()));
}
[HttpPost]
public async Task<IActionResult> Create(CreateSupplierCommand command)
{
var id = await _mediator.Send(command);
return Created(String.Empty, id);
}
}
}<file_sep>using System.Reflection;
using Microsoft.Extensions.DependencyInjection;
using Seyit.Core;
namespace Seyit.Data.Infrastructure
{
public class DataLayerInstaller:IContainerInstaller
{
public void Install(IServiceCollection services)
{
services.AddDbContext<SeyitDbContext>(ServiceLifetime.Scoped);
services.AddByNameConvention(Assembly.GetExecutingAssembly(), "Repository",new []{"GenericRepository"});
new MockData().Generate();
}
}
}<file_sep>using System;
namespace Seyit.Data.SystemAccounts
{
public class SystemAccount
{
public long AccountId { get; set; }
public long ParentAccountId { get; set; }
public int CurrencyId { get; set; }
public string AccountPath { get; set; }
public string AccountName { get; set; }
public string AccountCode { get; set; }
public string RegionLetter { get; set; }
public DateTime CreationDate { get; set; }
}
}<file_sep>using System;
using System.Threading;
using System.Threading.Tasks;
using MediatR;
using Seyit.Business.Infrastructure;
using Seyit.Data;
using Seyit.Data.Airways;
using Seyit.Data.Suppliers;
using Seyit.Data.SystemAccounts;
namespace Seyit.Business.Airways.Command
{
public class CreateAirwayCommandHandler:IRequestHandler<CreateAirwayCommand,Guid>
{
private readonly IAirwayRepository _airwayRepository;
private readonly ISystemAccountRepository _systemAccountRepository;
private readonly ISupplierRepository _supplierRepository;
private readonly SeyitDbContext _dbContext;
private readonly IDateTimeProvider _dateTimeProvider;
public CreateAirwayCommandHandler(IAirwayRepository airwayRepository, SeyitDbContext dbContext, ISystemAccountRepository systemAccountRepository, IDateTimeProvider dateTimeProvider, ISupplierRepository supplierRepository)
{
_airwayRepository = airwayRepository;
_dbContext = dbContext;
_systemAccountRepository = systemAccountRepository;
_dateTimeProvider = dateTimeProvider;
_supplierRepository = supplierRepository;
}
public async Task<Guid> Handle(CreateAirwayCommand request, CancellationToken cancellationToken)
{
//todo : update sorguları asnotracking halde üretiliyor.
var supplier = await _supplierRepository.GetByIdAsync(request.SupplierId).ConfigureAwait(false);
_dbContext.Database.BeginTransaction();
var accountId= await _systemAccountRepository.CreateAirwaySystemAccountsAsync(request.AirWayName,request.RegionLetter,_dateTimeProvider.Now);
var entity=new Airway
{
AirWayName = request.AirWayName,
Status = request.Status,
AccountId = accountId,
CarrierCode = request.CarrierCode,
ServicesPrice = request.ServicesPrice,
CanCreateRezervation = request.CanCreateRezervation,
WebServicesUrl = request.WebServicesUrl,
WebServisPassword = request.WebServisPassword,
DefaultAgencyCommissionsPrice = request.DefaultAgencyCommissionsPrice,
WebServicesParamOne = request.WebServicesParamOne,
WebServicesParamTwo = request.WebServicesParamTwo,
WebServicesUserName = request.WebServicesUserName,
SearchServiceTimeOutSecond = request.SearchServiceTimeOutSecond,
Supplier = supplier
};
_airwayRepository.Insert(entity);
await _dbContext.SaveChangesAsync(cancellationToken).ConfigureAwait(false);
_dbContext.Database.CommitTransaction();
return await Task.FromResult(entity.AirWayId);
}
}
}
<file_sep>using System;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Seyit.Data.Currencies;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.SystemAccounts
{
public class SystemAccountRepository:GenericRepository<SystemAccount>,ISystemAccountRepository
{
private const string AirwayParentAccountCode = "OtherSupplier";
private readonly SeyitDbContext _dbContext;
private readonly ICurrencyRepository _currencyRepository;
public SystemAccountRepository(SeyitDbContext dbContext, ICurrencyRepository currencyRepository) : base(dbContext)
{
_dbContext = dbContext;
_currencyRepository = currencyRepository;
}
private async Task<SystemAccount> GetParentAccountInfoAsync(string accountCode)
{
return await Table.AsNoTracking().Where(x => x.AccountCode == accountCode).FirstOrDefaultAsync();
}
public async Task<long> CreateAirwaySystemAccountsAsync(string airwayName,string regionLetter,DateTime creationDate)
{
var currencies = await _currencyRepository.GetCombosAsync().ConfigureAwait(false);
var parentAccount =await GetParentAccountInfoAsync(AirwayParentAccountCode).ConfigureAwait(false);
var airwaySystemAccount=new SystemAccount
{
ParentAccountId = parentAccount.AccountId,
AccountName = airwayName,
CurrencyId = default,
RegionLetter = regionLetter,
AccountCode = String.Empty,
CreationDate = creationDate
};
Insert(airwaySystemAccount);
await _dbContext.SaveChangesAsync();
airwaySystemAccount.AccountPath = $"{parentAccount.AccountPath}/{airwaySystemAccount.AccountId}";
Update(airwaySystemAccount);
foreach (var currency in currencies)
{
var account=new SystemAccount
{
ParentAccountId = airwaySystemAccount.AccountId,
AccountName = $"{airwayName} {currency.CurrencyName}",
CurrencyId = currency.CurrencyId,
RegionLetter = airwaySystemAccount.RegionLetter,
AccountCode = String.Empty,
CreationDate = creationDate
};
Insert(account);
await _dbContext.SaveChangesAsync().ConfigureAwait(false);
account.AccountPath = $"{airwaySystemAccount.AccountPath}/{account.AccountId}";
Update(account);
}
return airwaySystemAccount.AccountId;
}
}
}<file_sep>namespace Seyit.Data.Currencies
{
public class Currency
{
public int CurrencyId { get; set; }
public int OrderProcess { get; set; }
public string CurrencyName { get; set; }
public string CurrencyCode { get; set; }
}
}<file_sep>using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace Seyit.Data.Suppliers
{
public class SupplierConfiguration:IEntityTypeConfiguration<Supplier>
{
public void Configure(EntityTypeBuilder<Supplier> builder)
{
builder.HasKey(x => x.SupplierId);
builder.Property(x => x.SupplierName);
builder.Property(x => x.ProcessOrder);
}
}
}<file_sep>using System;
using System.Linq.Expressions;
namespace Seyit.Core
{
public class SearchModel<T>
{
public int CurrentPage { get; set; }
public int PageSize { get; set; }
public Expression<Func<T,bool>> Where { get; set; }
public Expression<Func<T,object>> OrderBy { get; set; }
public bool IsDescending { get; set; }
}
}<file_sep>using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.Currencies
{
public class CurrencyRepository :GenericRepository<Currency>, ICurrencyRepository
{
public CurrencyRepository(SeyitDbContext dbContext) : base(dbContext)
{
}
public async Task<CurrencyComboDto[]> GetCombosAsync()
{
return await Table.AsNoTracking().Select(CurrencyComboDto.Projection).ToArrayAsync();
}
}
}<file_sep>using System.Reflection;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Seyit.Data.Currencies;
using Seyit.Data.Suppliers;
using Seyit.Data.SystemAccounts;
namespace Seyit.Data
{
public class SeyitDbContext:DbContext
{
public DbSet<Airway> Airways { get; set; }
public DbSet<Supplier> Suppliers { get; set; }
public DbSet<Currency> Currencies { get; set; }
public DbSet<SystemAccount> SystemAccounts { get; set; }
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
optionsBuilder.UseSqlServer("Server=localhost\\SQLEXPRESS;Database=flight;Trusted_Connection=True;");
#if DEBUG
optionsBuilder.UseLoggerFactory(MyLoggerFactory);
#endif
base.OnConfiguring(optionsBuilder);
}
public static readonly ILoggerFactory MyLoggerFactory
= LoggerFactory.Create(builder =>
{
builder
.AddFilter((category, level) =>
category == DbLoggerCategory.Database.Command.Name
&& level == LogLevel.Information)
.AddConsole();
});
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.ApplyConfigurationsFromAssembly(Assembly.GetExecutingAssembly());
base.OnModelCreating(modelBuilder);
}
}
}<file_sep>using MediatR;
using Seyit.Data.Suppliers;
namespace Seyit.Business.Suppliers.Query
{
public class GetSupplierCombosQuery:IRequest<SupplierComboDto[]>
{
}
}<file_sep>using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace Seyit.Data.Currencies
{
public class CurrencyConfiguration:IEntityTypeConfiguration<Currency>
{
public void Configure(EntityTypeBuilder<Currency> builder)
{
builder.HasKey(x => x.CurrencyId);
builder.Property(x => x.CurrencyCode);
builder.Property(x => x.CurrencyName);
builder.Property(x => x.OrderProcess);
}
}
}<file_sep>using System;
using System.Threading;
using System.Threading.Tasks;
using MediatR;
using Seyit.Data;
using Seyit.Data.Suppliers;
namespace Seyit.Business.Suppliers.Command
{
public class CreateSupplierCommandHandler:IRequestHandler<CreateSupplierCommand,int>
{
private readonly ISupplierRepository _supplierRepository;
private readonly SeyitDbContext _dbContext;
public CreateSupplierCommandHandler(ISupplierRepository supplierRepository, SeyitDbContext dbContext)
{
_supplierRepository = supplierRepository;
_dbContext = dbContext;
}
public async Task<int> Handle(CreateSupplierCommand request, CancellationToken cancellationToken)
{
var entity=new Supplier
{
SupplierId = request.SupplierId,
SupplierName = request.SupplierName,
ProcessOrder = request.ProcessOrder
};
_supplierRepository.Insert(entity);
await _dbContext.SaveChangesAsync(cancellationToken);
return await Task.FromResult(entity.SupplierId);
}
}
}<file_sep>using System.Linq;
using System.Reflection;
using Microsoft.Extensions.DependencyInjection;
namespace Seyit.Core
{
public static class ServiceCollectionExtensions
{
public static void AddByNameConvention(this IServiceCollection services,Assembly assembly ,string nameEndsWith, string[] exceptNames)
{
foreach (var type in assembly.GetTypes()
.Where(x=>x.IsClass && x.Name.EndsWith(nameEndsWith) && !exceptNames.Contains(x.Name)))
{
var singleInterface = type.GetInterfaces().First(x => x.Name.EndsWith(nameEndsWith));
services.AddScoped(singleInterface, type);
}
}
}
}<file_sep>using System.Threading.Tasks;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.Airways
{
public interface IAirwayRepository : IGenericRepository<Airway>
{
Task<AirwayComboDto[]> GetCombosAsync();
}
}
<file_sep>using System;
using System.Threading.Tasks;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.SystemAccounts
{
public interface ISystemAccountRepository:IGenericRepository<SystemAccount>
{
Task<long> CreateAirwaySystemAccountsAsync(string airwayName,string regionLetter,DateTime creationDate);
}
}<file_sep>using System.Collections.Generic;
namespace Seyit.Core
{
public class PagedResult<TDto> where TDto : class
{
public int PageCount { get; set; }
public int RowCount { get; set; }
public IList<TDto> Result { get; set; }
public PagedResult()
{
Result=new List<TDto>();
}
}
}<file_sep>using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.Suppliers
{
public class SupplierRepository : GenericRepository<Supplier>, ISupplierRepository
{
public SupplierRepository(SeyitDbContext dbContext) : base(dbContext)
{
}
public async Task<SupplierComboDto[]> GetCombosAsync()
{
return await Table.Select(SupplierComboDto.Projection).ToArrayAsync();
}
}
}<file_sep>using System;
using System.ComponentModel.DataAnnotations.Schema;
using Seyit.Data.Suppliers;
namespace Seyit.Data
{
public class Airway
{
public Guid AirWayId { get; set; }
public string AirWayName { get; set; }
public string CarrierCode { get; set; }
public string WebServicesUserName { get; set; }
public string WebServisPassword { get; set; }
public string WebServicesParamOne { get; set; }
public string WebServicesParamTwo { get; set; }
public decimal ServicesPrice { get; set; }
public decimal DefaultAgencyCommissionsPrice { get; set; }
public bool Status { get; set; }
public string WebServicesUrl { get; set; }
public bool CanCreateRezervation { get; set; }
public Supplier Supplier { get; set; }
public long AccountId { get; set; }
public int SearchServiceTimeOutSecond { get; set; }
}
}<file_sep>using System.Threading.Tasks;
using Seyit.Data.Infrastructure;
namespace Seyit.Data.Currencies
{
public interface ICurrencyRepository:IGenericRepository<Currency>
{
Task<CurrencyComboDto[]> GetCombosAsync();
}
}<file_sep>using System;
using System.Linq.Expressions;
namespace Seyit.Data.Airways
{
public class AirwayComboDto
{
public string AirWayName { get; set; }
public Guid AirWayId { get; set; }
public static Expression<Func<Airway,AirwayComboDto>> Projection
{
get
{
return x => new AirwayComboDto
{
AirWayName = x.AirWayName,
AirWayId = x.AirWayId
};
}
}
}
}<file_sep>using System.Threading;
using System.Threading.Tasks;
using MediatR;
using Seyit.Data.Suppliers;
namespace Seyit.Business.Suppliers.Query
{
public class GetSupplierCombosQueryHandler : IRequestHandler<GetSupplierCombosQuery, SupplierComboDto[]>
{
private readonly ISupplierRepository _supplierRepository;
public GetSupplierCombosQueryHandler(ISupplierRepository supplierRepository)
{
_supplierRepository = supplierRepository;
}
public async Task<SupplierComboDto[]> Handle(GetSupplierCombosQuery request, CancellationToken cancellationToken)
{
return await _supplierRepository.GetCombosAsync();
}
}
}<file_sep>using System;
using System.Linq.Expressions;
namespace Seyit.Data.Suppliers
{
public class SupplierComboDto
{
public int SupplierId { get; set; }
public string SupplierName { get; set; }
public static Expression<Func<Supplier,SupplierComboDto>> Projection
{
get
{
return x => new SupplierComboDto
{
SupplierName = x.SupplierName,
SupplierId = x.SupplierId
};
}
}
}
}<file_sep>using System.Linq;
using MediatR;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Seyit.Business.Airways.Query;
using Seyit.Business.Infrastructure;
namespace Seyit.Web
{
public class Startup
{
public Startup(IConfiguration configuration)
{
Configuration = configuration;
}
public IConfiguration Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.AddControllers().ConfigureApiBehaviorOptions(options =>
{
options.ClientErrorMapping[500].Title="kk";});
new BusinessLayerInstaller().Install(services);
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
app.UseExceptionHandler(env.IsDevelopment() ? "/error-local-development" : "/error");
app.UseHttpsRedirection();
app.UseRouting();
app.UseAuthorization();
app.UseEndpoints(endpoints => { endpoints.MapControllers(); });
}
}
}<file_sep>using Seyit.Data.Currencies;
using Seyit.Data.Suppliers;
using Seyit.Data.SystemAccounts;
namespace Seyit.Data
{
public class MockData
{
public void Generate()
{
var context = new SeyitDbContext();
context.Database.EnsureDeleted();
context.Database.EnsureCreated();
var supplier = new Supplier {ProcessOrder = 1, SupplierName = "supplier 1"};
context.Add(supplier);
var systemAccount=new SystemAccount
{
AccountCode = "OtherSupplier",
AccountPath = "1",
AccountName = "",
CurrencyId = 0,ParentAccountId = 0,RegionLetter = "tr-TR"
};
context.Add(systemAccount);
var currency1=new Currency
{
CurrencyCode = "TL",
CurrencyName = "Türk Lirası",
OrderProcess = 1
};
var currency2=new Currency
{
CurrencyCode = "USD",
CurrencyName = "Dolar",
OrderProcess = 2
};
context.Add(currency1);
context.Add(currency2);
context.SaveChanges();
}
}
}<file_sep>using System;
using MediatR;
namespace Seyit.Business.Airways.Command
{
public class CreateAirwayCommand:IRequest<Guid>
{
public string AirWayName { get; set; }
public string CarrierCode { get; set; }
public string WebServicesUrl { get; set; }
public string WebServicesUserName { get; set; }
public string WebServisPassword { get; set; }
public string WebServicesParamOne { get; set; }
public string WebServicesParamTwo { get; set; }
public decimal ServicesPrice { get; set; }
public decimal DefaultAgencyCommissionsPrice { get; set; }
public bool Status { get; set; }
public bool CanCreateRezervation { get; set; }
public int SearchServiceTimeOutSecond { get; set; }
public string RegionLetter { get; set; }
public int SupplierId { get; set; }
}
}<file_sep>using System;
using System.Linq.Expressions;
namespace Seyit.Data.SystemAccounts
{
public class SystemAccountParentAccountDto
{
public string AccountPath { get; set; }
public long AccountId { get; set; }
public static Expression<Func<SystemAccount,SystemAccountParentAccountDto>> Projection
{
get
{
return x => new SystemAccountParentAccountDto
{
AccountPath = x.AccountPath,
AccountId = x.AccountId
};
}
}
}
}<file_sep>using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace Seyit.Data.Airways
{
public class AirwayConfiguration:IEntityTypeConfiguration<Airway>
{
public void Configure(EntityTypeBuilder<Airway> builder)
{
builder.HasKey(x => x.AirWayId);
builder.Property(x => x.AirWayName);
builder.Property(x => x.Status);
builder.Property(x => x.AccountId);
builder.Property(x => x.CarrierCode);
builder.Property(x => x.ServicesPrice);
builder.Property(x => x.CanCreateRezervation);
builder.Property(x => x.WebServicesUrl);
builder.Property(x => x.WebServisPassword);
builder.Property(x => x.DefaultAgencyCommissionsPrice);
builder.Property(x => x.WebServicesParamOne);
builder.Property(x => x.WebServicesParamTwo);
builder.Property(x => x.WebServicesUserName);
builder.Property(x => x.SearchServiceTimeOutSecond);
builder.HasOne(x => x.Supplier);
}
}
}<file_sep>using System;
using MediatR;
namespace Seyit.Business.Suppliers.Command
{
public class CreateSupplierCommand:IRequest<int>
{
public int SupplierId { get; set; }
public string SupplierName { get; set; }
public int ProcessOrder { get; set; }
}
}<file_sep>using FluentValidation;
using Seyit.Business.Suppliers.Command;
namespace Seyit.Business.Suppliers.Validator
{
public class CreateSupplierCommandValidator:AbstractValidator<CreateSupplierCommand>
{
public CreateSupplierCommandValidator()
{
RuleFor(x => x.SupplierName).NotEmpty();
RuleFor(x => x.ProcessOrder).NotEmpty();
}
}
}<file_sep>using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
namespace Seyit.Data.Infrastructure
{
public class GenericRepository<T> : IGenericRepository<T> where T : class
{
private readonly DbSet<T> _table;
public GenericRepository(SeyitDbContext dbContext)
{
_table = dbContext.Set<T>();
}
protected IQueryable<T> Table => _table;
public ValueTask<T> GetByIdAsync(object id)
{
return _table.FindAsync(id);
}
public void Insert(T obj)
{
_table.Add(obj);
}
public void Update(T entity)
{
_table.Update(entity);
}
public void Delete(object id)
{
var existing = _table.Find(id);
_table.Remove(existing);
}
}
}<file_sep>using MediatR;
using Seyit.Data.Airways;
namespace Seyit.Business.Airways.Query
{
public class GetAirwayCombosQuery:IRequest<AirwayComboDto[]>
{
}
}<file_sep>using System;
using System.Linq.Expressions;
namespace Seyit.Data.Currencies
{
public class CurrencyComboDto
{
public int CurrencyId { get; set; }
public string CurrencyName { get; set; }
public static Expression<Func<Currency,CurrencyComboDto>> Projection
{
get
{
return x => new CurrencyComboDto
{
CurrencyId = x.CurrencyId,
CurrencyName = x.CurrencyName
};
}
}
}
}<file_sep>#definitions
* ComboDto : select option modeline verilen isim
* ListDto : tablo halinde gösterim modeline verilen isim
* DetailDto : detay grünüm modeline verilen isim
#todos
* Logging
* Caching
* ef core performance tuning
* aspnet configuration
*
|
a68166a9dbdf3a9ca849b005d966f3d1751ccbbb
|
[
"Markdown",
"C#"
] | 42 |
C#
|
seyfside/Seyit
|
d4ddf7053d839413dae1911f3ba7d70eefc06e83
|
171d1faf2581657d2fc83074cca3189a99a1fcb2
|
refs/heads/main
|
<file_sep>const fetch = require('node-fetch');
const { nanoid } = require('nanoid');
const API_ENDPOINT = 'https://rest.moveconnect.com/movewareUAT/v1/jobs';
const { MW_COMPANY_ID, MW_USERNAME, MW_PASSWORD } = process.env;
const headers = {
'mw-correlation-id': nanoid(),
'mw-company-id': MW_COMPANY_ID,
'mw-username': MW_USERNAME,
'mw-password': <PASSWORD>,
'mw-request-id': nanoid(),
'Content-Type': 'application/json',
};
exports.handler = async (event) => {
if (event.httpMethod !== 'POST') {
return { statusCode: 405, body: 'Method Not Allowed' };
}
const {
first_name,
last_name,
email_address,
contact_number,
moving_from,
moving_to,
how_did_you_hear,
} = JSON.parse(event.body);
const data = JSON.stringify({
type: { code: 'R', text: 'Removal' },
method: { code: 'Road', text: 'Road' },
removalsType: { code: 'CO', text: 'Country Removal' },
referral: {
source: 'Web-FRN',
comments: how_did_you_hear,
otherInfo: '',
},
brandCode: 'FRN',
branchCode: 'FRN',
moveManager: 'LW1',
locations: {
origin: {
contact: {
firstName: first_name,
lastName: last_name,
mobile: contact_number,
email: email_address,
},
address: {
postcode: moving_from,
state: '',
country: 'Australia',
},
},
destination: {
contact: {
title: '',
firstName: first_name,
lastName: last_name,
mobile: contact_number,
email: email_address,
},
address: {
postcode: moving_to,
state: '',
country: 'Australia',
},
},
},
});
return (
fetch(API_ENDPOINT, {
method: 'POST',
body: data,
headers,
})
.then((response) => response.json())
// eslint-disable-next-line no-console
.then((json) => console.log(json))
.then((json) => ({
statusCode: 200,
body: JSON.stringify(json, null, 2),
}))
.catch((error) => ({ statusCode: 422, body: String(error) }))
);
};
|
2c27d6b909d0f1d3483e1aa22ff230005824d207
|
[
"JavaScript"
] | 1 |
JavaScript
|
phirannodesigns/moveconnect-mvp
|
481cf6d4f6fded5ad6eca2badb0d1a97cbd4bb6c
|
d0e2f2d6be716a3dfd64e962161599e95a430384
|
refs/heads/master
|
<repo_name>kpxt/friend-finder<file_sep>/app/routing/apiRoutes.js
var friendData = require("../data/friends");
module.exports = function(app) {
// Displays all friend options
app.get("/api/friends", function(req, res) {
return res.json(friendData);
});
// Displays a single user, or returns false
app.get("/api/friends/:friend", function(req, res) {
var chosen = req.params.friend;
console.log(chosen);
for (var i = 0; i < friend.length; i++) {
if (chosen === friend[i].routeName) {
return res.json(friend[i]);
}
}
return res.json(false);
});
// Create New friend
app.post("/api/friends", function(req, res) {
var bestMatch;
for (var i = 0; i <= friendData.length - 1; i++) {
if (friendData[i].name === req.body.name) {
userIndex = i;
continue;
} else {
var userScore = friendData[i].scores;
var difference = 0;
for (var j = 0; j <= userScore.length - 1; j++) {
difference = difference + Math.abs(req.body.scores[j] - userScore[j]);
}
if (!(bestMatch == null)) {
if (bestMatch.diff > difference) {
bestMatch = {index: i, diff: difference};
} else if (bestMatch.diff === difference && Math.floor(Math.random() * 2) === 0) {
bestMatch = {index: i, diff: difference};
}
} else {
bestMatch = {index: i, diff: difference};
}
}
}
console.log(req.body);
friendData.push(req.body);
res.json(friendData[bestMatch.index]);
res.end();
});
}
|
67ca7a0b14ba3c9fa824e6c0eb82c1b47ca771af
|
[
"JavaScript"
] | 1 |
JavaScript
|
kpxt/friend-finder
|
5f79703625ff46da18249bdb6cf6567981bb6d0c
|
e1dbf2dbdeda7bba688a352e28138163524002f1
|
refs/heads/main
|
<file_sep>import { Component, OnInit } from '@angular/core';
import { MenuController, ModalController, Platform } from '@ionic/angular';
import { AddPage } from 'src/app/pages/materias/add/add.page';
import { ModalConfirmPage } from 'src/app/pages/modal-confirm/modal-confirm.page';
import { AuthService } from 'src/app/routes/auth.service';
import { MateriasService } from 'src/app/routes/materias.service';
import { UserService } from 'src/app/routes/user.service';
import { EventEmitterService } from 'src/app/services/event-emitter.service';
import { ExecuteService } from 'src/app/services/execute.service';
import { ModalService } from 'src/app/services/modal.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-materias',
templateUrl: './materias.component.html',
styleUrls: ['./materias.component.scss'],
})
export class MateriasComponent implements OnInit {
private currentId : any = {};
private currentUser: any = {};
private materiasList : any[] = [];
private device : string = "desktop";
private gridOptions = {
materias: "2"
};
constructor(
private emitter : EventEmitterService,
private modal : ModalController,
private modals : ModalService,
private execute : ExecuteService,
private materias : MateriasService,
private toaster : ToasterService,
private platform : Platform,
private menu: MenuController,
private user : UserService,
private auth : AuthService
) { }
ngOnInit() {
this.currentId = JSON.parse(localStorage.getItem('user'));
console.log(this.currentId);
if (this.currentId) {
this.loadUser(this.currentId);
}
else {
this.toaster.presentToast('Falha ao carregar usuário', 'danger', 2000);
this.auth.SignOut();
}
if (this.platform.is('hybrid')) {
this.device = "hybrid";
this.gridOptions = {
materias: "12"
};
}
this.listarMaterias();
}
async loadUser(id : string){
(await this.user.getUsers()).subscribe(data => {
for (let i = 0; i < data.length; i++) {
if (data[i].id == id) {
this.currentUser = data[i].data;
console.error(this.currentUser);
}
}
});
}
async openMenu() {
await this.menu.open();
}
listarMaterias(){
this.materias.listar().subscribe(data => {
this.materiasList = data;
console.error(this.materiasList);
});
}
abrirMateria(materia : string){
console.log(materia);
this.execute.functionExecute('muralComponent', materia);
}
criarMateria(){
let css = "";
if (this.device === 'desktop') css = "fullscreen-modal";
this.modals.modalOpen(AddPage, {}, css);
}
async removerMateria(id : string){
const modal = await this.modal.create({
component: ModalConfirmPage,
mode: 'ios',
showBackdrop: true,
cssClass: 'alert-modal',
componentProps: {
data: 'Deseja excluir esta matéria?'
}
});
modal.onDidDismiss().then(dados => {
let data = dados.data.value;
if (data === true) {
this.materias.apagar(id)
.then(() => this.toaster.presentToast("Matéria excluída com sucesso!", "success", 2000))
.catch(err => this.toaster.presentToast(`Falha ao excluir matéria - ${err}`, "danger", 2000));
}
});
await modal.present();
}
getRandomColor() {
let letters = '0123456789ABCDEF';
let color = '#';
for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
}
<file_sep>import { Component, Input, OnInit } from '@angular/core';
import { ModalController } from '@ionic/angular';
@Component({
selector: 'app-modal-confirm',
templateUrl: './modal-confirm.page.html',
styleUrls: ['./modal-confirm.page.scss'],
})
export class ModalConfirmPage implements OnInit {
@Input() data : any;
constructor(
private modal : ModalController
) { }
ngOnInit() {
}
modalResponse(response : boolean){
this.modal.dismiss({value: response});
}
}
<file_sep>import { TestBed } from '@angular/core/testing';
import { ComponentsRenderService } from './components-render.service';
describe('ComponentsRenderService', () => {
let service: ComponentsRenderService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ComponentsRenderService);
});
it('should be created', () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Component, Input, OnInit } from '@angular/core';
import { ModalController, Platform } from '@ionic/angular';
import { ChatPage } from 'src/app/pages/materias/chat/chat.page';
import { MuralService } from 'src/app/routes/mural.service';
import { ExecuteService } from 'src/app/services/execute.service';
import { ModalService } from 'src/app/services/modal.service';
import { AddPublicacaoPage } from 'src/app/pages/publicacao/add-publicacao/add-publicacao.page';
import { AlunosComponent } from '../../alunos/alunos.component';
import { AlunoPage } from 'src/app/pages/aluno/aluno.page';
import { UserService } from 'src/app/routes/user.service';
import { AuthService } from 'src/app/routes/auth.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-mural',
templateUrl: './mural.component.html',
styleUrls: ['./mural.component.scss'],
})
export class MuralComponent implements OnInit {
@Input() data : any;
private currentId : any = {};
private currentUser: any = {};
private arquivos : any[];
private arquivoShow : boolean = false;
private posts : any[];
private postShow : boolean = false;
private alunos : any[];
private alunoShow : boolean = false;
private device : string = "desktop";
constructor(
private execute : ExecuteService,
private modals : ModalService,
private modal : ModalController,
private murals : MuralService,
private platform : Platform,
private user : UserService,
private auth : AuthService,
private toaster : ToasterService
) {
}
async ngOnInit() {
this.currentId = JSON.parse(localStorage.getItem('user'));
console.log(this.currentId);
if (this.currentId) {
this.loadUser(this.currentId);
}
else {
this.toaster.presentToast('Falha ao carregar usuário', 'danger', 2000);
this.auth.SignOut();
}
if(this.platform.is('hybrid')){
this.device = "hybrid";
}
if (this.data == undefined) this.data = this.execute.data;
await this.carregar();
}
async loadUser(id : string){
(await this.user.getUsers()).subscribe(data => {
for (let i = 0; i < data.length; i++) {
if (data[i].id == id) {
this.currentUser = data[i].data;
console.error(this.currentUser);
}
}
});
}
voltar(){
this.execute.functionExecute('materiasComponent', {});
}
visualizar(item : any, tipo : string){
let css = "";
if (this.device === 'desktop') css = "fullscreen-modal";
if (tipo === 'arquivo') {
this.modals.modalOpen(AddPublicacaoPage, { materia: this.data.id, action: 'view-file', item: item }, css);
}
else {
this.modals.modalOpen(AddPublicacaoPage, { materia: this.data.id, action: 'view-post', item: item }, css);
}
}
alunosAdd(){
let css = "";
if (this.device === 'desktop') css = "fullscreen-modal";
this.modals.modalOpen(AlunosComponent, {action: 'modal', id: this.data.id}, css);
}
publicar(){
let css = "";
if (this.device === 'desktop') css = "fullscreen-modal";
this.modals.modalOpen(AddPublicacaoPage, { materia: this.data.id, action: 'add' }, css);
}
async carregar(){
(await this.murals.listarArquivos(this.data.id)).subscribe(dados => this.arquivos = dados);
(await this.murals.listarPublicacoes(this.data.id)).subscribe(dados => this.posts = dados);
(await this.murals.listarAlunos(this.data.id)).subscribe(dados => {
if (!dados) return false;
this.alunos = dados;
});
}
editar(){
}
remover(){
}
visualizarAluno(dados : any){
let css = "";
if (this.device === 'desktop') css = "fullscreen-modal";
this.modals.modalOpen(AlunoPage, dados, css);
}
async abrirChat(){
let css = "";
if (this.device === 'desktop') css = "fullscreen-modal";
await this.modals.modalOpen(ChatPage, { id: this.data.id, nome: this.data.data.nome }, css)
}
showPosts(){
this.postShow ? this.postShow = false : this.postShow = true;
}
showArquivos(){
this.arquivoShow ? this.arquivoShow = false : this.arquivoShow = true;
}
showAlunos(){
this.alunoShow ? this.alunoShow = false : this.alunoShow = true;
}
}
<file_sep>import { Injectable } from '@angular/core';
import { PopoverController } from '@ionic/angular';
@Injectable({
providedIn: 'root'
})
export class PopoverService {
constructor(
private popover : PopoverController
) { }
async presentPopover(page : any, props : any, css : string) {
const popover = await this.popover.create({
component: page,
cssClass: 'my-custom-class',
translucent: true
});
return await popover.present();
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { AuthService } from 'src/app/routes/auth.service';
@Component({
selector: 'app-recuperar',
templateUrl: './recuperar.page.html',
styleUrls: ['./recuperar.page.scss'],
})
export class RecuperarPage implements OnInit {
private email : string;
constructor(
private auth : AuthService
) { }
ngOnInit() {
}
recuperar(){
this.auth.ForgotPassword(this.email);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { ModalController } from '@ionic/angular';
import { MateriasService } from 'src/app/routes/materias.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-add',
templateUrl: './add.page.html',
styleUrls: ['./add.page.scss'],
})
export class AddPage implements OnInit {
private materia = {
nome : '',
descricao: ''
};
constructor(
private modal : ModalController,
private materias : MateriasService,
private toaster : ToasterService
) { }
ngOnInit() {
}
closeModal(){
this.modal.dismiss();
}
criar(){
this.materias.adicionar(this.materia).then(() => {
this.toaster.presentToast('Matéria adicionada com sucesso.', 'success', 2000);
this.modal.dismiss();
})
}
}
<file_sep>import { Injectable } from '@angular/core';
import { AngularFireAuth } from '@angular/fire/auth';
import { AngularFirestore } from '@angular/fire/firestore';
import { AngularFireStorage } from '@angular/fire/storage';
import { map } from 'rxjs/operators';
@Injectable({
providedIn: 'root'
})
export class MuralService {
constructor(
private DB:AngularFirestore,
private afa:AngularFireAuth,
private storage: AngularFireStorage,
) { }
async listarArquivos(id : string){
return this.DB.collection('materias').doc(id).collection('arquivos').snapshotChanges().pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
return dados;
}))
)
}
async listarPublicacoes(id : string){
return this.DB.collection('materias').doc(id).collection('publicacoes').snapshotChanges().pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
return dados;
}))
)
}
async listarAlunos(id : string){
return this.DB.collection('materias').doc(id).collection('participantes').snapshotChanges().pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
return dados;
}))
)
}
adicionarAluno(id : string, dados : any){
return this.DB.collection('materias').doc(id).collection('participantes').add(dados);
}
adicionarMateriaAluno(id: string, materias : any){
return this.DB.collection('usuarios').doc(id).update({materias: materias});
}
enviarArquivo(id : string, dados : any){
return this.DB.collection('materias').doc(id).collection('arquivos').add(dados);
}
enviarPost(id : string, dados : any){
return this.DB.collection('materias').doc(id).collection('publicacoes').add(dados);
}
apagarArquivo(materia_id : string, arquivo_id : string){
return this.DB.collection('materias').doc(materia_id).collection('arquivos').doc(arquivo_id).delete()
}
apagarPost(materia_id : string, publicacao_id : string){
return this.DB.collection('materias').doc(materia_id).collection('publicacoes').doc(publicacao_id).delete();
}
}
<file_sep>import { Component, Input, OnInit } from '@angular/core';
import { ModalController } from '@ionic/angular';
import { AuthService } from 'src/app/routes/auth.service';
import { ModalService } from 'src/app/services/modal.service';
import { PopoverService } from 'src/app/services/popover.service';
import { ToasterService } from 'src/app/services/toaster.service';
import { MateriasPage } from '../materias/materias.page';
import { ModalConfirmPage } from '../modal-confirm/modal-confirm.page';
@Component({
selector: 'app-aluno',
templateUrl: './aluno.page.html',
styleUrls: ['./aluno.page.scss'],
})
export class AlunoPage implements OnInit {
@Input() data : any;
private user : any = {};
constructor(
private modals : ModalService,
private modal : ModalController,
private toaster : ToasterService,
private auths : AuthService,
private popovers : PopoverService
) { }
ngOnInit() {
if (!this.data) console.error("Falha ao carregar dados");
console.warn(this.data);
this.user = this.data;
}
async admin(event : any){
if (event.detail.checked) {
this.auths.setUserRole(this.data.id, {role: 'admin'}).then(() => {
this.toaster.presentToast('Administrador adicionado com sucesso', 'success', 2000);
})
}
else {
this.auths.setUserRole(this.data.id, {role: 'user'}).then(() => {
this.toaster.presentToast('Administrador removido com sucesso', 'success', 2000);
})
}
}
materias(){
this.popovers.presentPopover(MateriasPage, {}, '').then(dados => console.log(dados));
}
closeModal(){
this.modal.dismiss();
}
}
<file_sep>import { Injectable } from '@angular/core';
import { AlunosComponent } from '../components/alunos/alunos.component';
import { MateriasComponent } from '../components/materias/materias.component';
import { MuralComponent } from '../components/materias/mural/mural.component';
import { MeusDadosComponent } from '../components/meus-dados/meus-dados.component';
import { SobreComponent } from '../components/sobre/sobre.component';
@Injectable({
providedIn: 'root'
})
export class ComponentsRenderService {
constructor() { }
async resolveComponentsName(componentName : any) {
if (componentName === 'materiasComponent') {
return MateriasComponent;
}
else if (componentName === 'alunosComponent') {
return AlunosComponent;
}
else if (componentName === 'dadosComponent') {
return MeusDadosComponent;
}
else if (componentName === 'muralComponent') {
return MuralComponent;
}
else if (componentName === 'sobreComponent') {
return SobreComponent;
}
else
{
console.error('Componente não encontrado');
return false;
}
}
}
<file_sep>import { Injectable } from '@angular/core';
import { AngularFireAuth } from '@angular/fire/auth';
import { AngularFirestore } from '@angular/fire/firestore';
import { map } from 'rxjs/operators';
@Injectable({
providedIn: 'root'
})
export class UserService {
public user_id : any;
constructor(
private DB:AngularFirestore,
private afa:AngularFireAuth,
)
{
}
public dadosLogado:any;
async getUserData(){
let userUid = localStorage.getItem('user');
if (userUid) {
return this.DB.collection('usuarios', ref => ref.where('doc.id','==', userUid )).snapshotChanges().pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
return dados;
}))
)
}
}
async getUserDados(){
let userUid = localStorage.getItem('user');
if (userUid) {
// return this.DB.collection('usuarios').doc(userUid).get().subscribe(data => {
// return data.data();
// })
return this.DB.collection('usuarios').doc(userUid).collection('dados').snapshotChanges().pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
return dados;
}))
)
}
}
async getUsers(){
return this.DB.collection('usuarios').snapshotChanges().pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
return dados;
}))
)
}
userUpdate(id : string, dados : any){
return this.DB.collection('usuarios').doc(id).update(dados);
}
}
<file_sep>import { CommonModule } from '@angular/common';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { IonicModule } from '@ionic/angular';
import { ColorPickerModule } from 'ngx-color-picker';
import { AlunosComponent } from './alunos/alunos.component';
import { MateriasComponent } from './materias/materias.component';
import { MuralComponent } from './materias/mural/mural.component';
import { MeusDadosComponent } from './meus-dados/meus-dados.component';
import { SobreComponent } from './sobre/sobre.component';
const PAGES_COMPONENTS = [
MateriasComponent,
MeusDadosComponent,
SobreComponent,
MuralComponent,
AlunosComponent
];
@NgModule({
declarations: [
PAGES_COMPONENTS
],
exports: [
PAGES_COMPONENTS
],
imports: [
CommonModule,
IonicModule,
FormsModule,
ColorPickerModule
]
})
export class ComponentsModule { }<file_sep>import { Component, OnInit } from '@angular/core';
import { Platform } from '@ionic/angular';
@Component({
selector: 'app-sobre',
templateUrl: './sobre.component.html',
styleUrls: ['./sobre.component.scss'],
})
export class SobreComponent implements OnInit {
private device : string = "desktop";
constructor(
private platform : Platform
) { }
ngOnInit() {
if (this.platform.is('hybrid')) {
this.device = "hybrid";
}
}
}
<file_sep>import { EventEmitter, Injectable } from '@angular/core';
import { Subscription } from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class EventEmitterService {
invokeFirstComponentFunction = new EventEmitter();
subsVar: Subscription;
public backcomponent:string = '';
constructor() {
this.subsVar = undefined
}
onFirstComponentButtonClick(param?:any) {
this.invokeFirstComponentFunction.emit(param);
}
}
<file_sep>import { Injectable, NgZone } from '@angular/core';
import { AngularFireAuth } from "@angular/fire/auth";
import { AngularFirestore, AngularFirestoreDocument } from '@angular/fire/firestore';
import { Router } from "@angular/router";
// import { auth } from 'firebase/app';
import { ToasterService } from '../services/toaster.service';
import { UserService } from './user.service';
@Injectable({
providedIn: 'root'
})
export class AuthService {
userData: any; // Save logged in user data
constructor(
public afs: AngularFirestore,
public afAuth: AngularFireAuth,
public router: Router,
public ngZone: NgZone,
public toast : ToasterService,
public users : UserService,
) {
/* Saving user data in localstorage when
logged in and setting up null when logged out */
this.afAuth.authState.subscribe(user => {
if (user) {
this.userData = user.uid;
localStorage.setItem('user', JSON.stringify(this.userData));
JSON.parse(localStorage.getItem('user'));
} else {
localStorage.setItem('user', null);
JSON.parse(localStorage.getItem('user'));
}
})
}
// Sign in with email/password
async SignIn(email : string, password : string) {
return await this.afAuth.signInWithEmailAndPassword(email, password)
.then((result) => {
if (result) {
if(result.user.emailVerified == true){
const dados = {
uid: result.user.uid || "",
name: result.user.displayName || "",
email: result.user.email || "",
photoURL: result.user.photoURL || ""
};
this.users.user_id = dados.uid;
localStorage.setItem('user', JSON.stringify(dados.uid));
this.router.navigate(['home-desktop']);
// this.SetUserData(dados);
}
else {
this.toast.presentToast('Conta não validada, acesse seu email e abra o link para concluir o cadastro', 'danger', 2000 )
}
}
else {
this.toast.presentToast('Falha ao consultar banco, tente novamente', 'danger', 2000 )
}
}).catch((error) => {
console.error(error);
this.toast.presentToast('Login ou senha inválidos', 'danger', 2000 )
})
}
// Sign up with email/password
async SignUp(email, password) {
return this.afAuth.createUserWithEmailAndPassword(email, password)
.then((result) => {
/* Call the SendVerificaitonMail() function when new user sign
up and returns promise */
this.SendVerificationMail();
this.SetUserData(result.user);
}).catch((error) => {
window.alert(error.message)
})
}
// Send email verfificaiton when new user sign up
async SendVerificationMail() {
return (await this.afAuth.currentUser).sendEmailVerification()
.then(() => {
this.router.navigate(['verify-email-address']);
})
}
// Reset Forggot password
ForgotPassword(passwordResetEmail) {
return this.afAuth.sendPasswordResetEmail(passwordResetEmail)
.then(() => {
window.alert('Password reset email sent, check your inbox.');
}).catch((error) => {
window.alert(error)
})
}
// Returns true when user is looged in and email is verified
get isLoggedIn(): boolean {
const user = JSON.parse(localStorage.getItem('user'));
return (user !== null && user.emailVerified !== false) ? true : false;
}
// Sign in with Google
// GoogleAuth() {
// return this.AuthLogin(new auth.GoogleAuthProvider());
// }
// Auth logic to run auth providers
async AuthLogin(provider) {
return await this.afAuth.signInWithPopup(provider)
.then((result) => {
console.warn(result);
this.ngZone.run(() => this.router.navigate(['home-desktop']));
this.SetUserData(result.user);
}).catch((error) => {
window.alert(error)
})
}
/* Setting up user data when sign in with username/password,
sign up with username/password and sign in with social auth
provider in Firestore database using AngularFirestore + AngularFirestoreDocument service */
SetUserData(user : any) {
const userRef: AngularFirestoreDocument<any> = this.afs.doc(`usuarios/${user.uid}`);
const userData: any = {
uid: user.uid,
email: user.email || '',
displayName: user.displayName || 'Sem Nome',
photoURL: user.photoURL || '../../assets/images/avatar.png',
emailVerified: user.emailVerified || '',
role: user.role || 'user',
theme: {
color1: user.color1 || '#5d66d3',
color2: user.color2 || '#353a85',
color3: user.color3 || '#ffffff',
},
materias: user.materias || []
};
return userRef.set(userData, {
merge: true
})
}
// Sign out
async SignOut() {
return await this.afAuth.signOut().then(() => {
localStorage.removeItem('user');
this.router.navigate(['login']);
})
}
setUserRole(id : string, dados : any){
return this.afs.collection('usuarios').doc(id).update(dados);
}
}<file_sep>import { Component, Input, OnInit, ɵisDefaultChangeDetectionStrategy } from '@angular/core';
import { ModalController } from '@ionic/angular';
import { AuthService } from 'src/app/routes/auth.service';
import { ChatService } from 'src/app/routes/chat.service';
import { UserService } from 'src/app/routes/user.service';
import { ModalService } from 'src/app/services/modal.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-chat',
templateUrl: './chat.page.html',
styleUrls: ['./chat.page.scss'],
})
export class ChatPage implements OnInit {
@Input() data : any;
private currentId : any = {};
private currentUser: any = {};
private messages : any[] = [];
private input : any;
constructor(
private modal : ModalController,
private modals : ModalService,
private chats : ChatService,
private user : UserService,
private toaster : ToasterService,
private auth : AuthService
) { }
ngOnInit() {
console.warn(this.data);
this.currentId = JSON.parse(localStorage.getItem('user'));
console.log(this.currentId);
if (this.currentId) {
this.loadUser(this.currentId);
}
else {
this.toaster.presentToast('Falha ao carregar usuário', 'danger', 2000);
this.auth.SignOut();
}
this.loadMessages();
}
async loadUser(id : string){
(await this.user.getUsers()).subscribe(data => {
for (let i = 0; i < data.length; i++) {
if (data[i].id == id) {
this.currentUser = data[i].data;
console.error(this.currentUser);
}
}
});
}
loadMessages(){
this.chats.listar(this.data.id).subscribe(dados => {
for (let i = 0; i < dados.length; i++) {
let search = this.messages.some(message => message.id === dados[i].id);
!search ? this.messages.push(dados[i]) : console.log('mensagem já adicionada');
}
// this.messages.sort((a, b) => {
// let c = new Date(a.date);
// let d = new Date(b.date);
// return c-d;
// });
});
}
closeModal(){
this.modal.dismiss();
}
clickSend(){
this.sendMessage('click');
}
sendMessage(event : any){
if (this.input.length <= 0) {
this.toaster.presentToast('Digite uma mensagem para enviar!', 'danger', 2000);
return false;
}
if (event !== 'click') {
if (event.keyCode != 13) {
return false;
}
}
let dadosMsg = {
id: this.currentUser.uid,
displayName: this.currentUser.displayName,
message: this.input,
createAt: new Date().getTime()
};
this.chats.enviar(this.data.id, dadosMsg).then(() => this.input = "");
}
}
<file_sep><h1 align="center">
<img alt="LearnIn" title="Learnin" src=".github/logo.png" width="50%" />
</h1>
<p align="center">
<a href="#-tecnologias">Tecnologias</a> |
<a href="#-projeto">Projeto</a> |
<a href="#-instalação">Instalação</a> |
<a href="#-layout">Layout</a> |
<a href="#memo-licença">Licença</a>
</p>
<br>
<p align="center">
<img alt="Happy" src=".github/learnin.png" width="100%">
</p>
## 🚀 Tecnologias
Esse projeto foi desenvolvido com as seguintes tecnologias:
- [Firebase](https://nodejs.org/en/)
- [Angular](https://angular.io)
- [Capacitor](https://capacitorjs.com)
- [Ionic](https://ionicframework.com)
- [TypeScript](https://www.typescriptlang.org/)
## 💻 Projeto
O Learnin é uma plataforma onde realiza a conexão entre alunos e docentes no processo de envio de atividades, materiais de estudo e dúvidas.💜
A ideia do projeto foi desenvolver uma aplicação web que seja armazenada em algum dos serviços de nuvem disponíveis atualmente, o escolhido foi o Firebase.
- [Versão Web Funcional](https://projeto-sdv.web.app)
- [Versão App] Para testar a versão app (PWA) do site, será necessário executar os passos do tópico <a href="#-instalação">Instalação</a>.
## 🔧 Instalação
Para executar o projeto em sua máquina/dispostivo, serão necessários os seguintes recursos instalados:
- [Layout Web](Em breve)
- [Layout Web](Em breve)
- [Layout Mobile](Em breve)
## 🔖 Layout
Nos links abaixo você encontra o layout do projeto web e também do mobile. Lembrando que você precisa ter uma conta no [Figma](http://figma.com/) para acessá-lo.
- [Layout Web](Em breve)
- [Layout Mobile](Em breve)
## :memo: Licença
Esse projeto está sob a licença MIT. Veja o arquivo [LICENSE](LICENSE.md) para mais detalhes.
---
Projeto feito por <NAME> (Emeritvs) para a matéria de Redes e Sistemas Distribuídos durante a faculdade de Ciência da Computação :wave:
<file_sep>import { DOCUMENT } from '@angular/common';
import { Inject, Input } from '@angular/core';
import { Component, ComponentFactoryResolver, OnInit, ViewChild, ViewContainerRef } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { MenuController, Platform } from '@ionic/angular';
import { map } from 'rxjs/operators';
import { AuthService } from 'src/app/routes/auth.service';
import { UserService } from 'src/app/routes/user.service';
import { ComponentsRenderService } from 'src/app/services/components-render.service';
import { EventEmitterService } from 'src/app/services/event-emitter.service';
import { ProcessService } from 'src/app/services/process.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-home-desktop',
templateUrl: './home-desktop.page.html',
styleUrls: ['./home-desktop.page.scss'],
})
export class HomeDesktopPage implements OnInit {
@Input() teste : any;
@ViewChild('content', { read: ViewContainerRef, static: true }) private content : ViewContainerRef;
private currentId : any = {};
private currentUser: any = {};
private device : string = "desktop";
constructor(
private platform : Platform,
private router : Router,
private auth : AuthService,
private components : ComponentsRenderService,
private resolver: ComponentFactoryResolver,
private toaster : ToasterService,
private emitter : EventEmitterService,
private user : UserService,
private route: ActivatedRoute,
private menu : MenuController,
@Inject(DOCUMENT) private document : Document
) {
}
ngOnInit() {
this.currentId = JSON.parse(localStorage.getItem('user'));
console.log(this.currentId);
if (this.platform.is('hybrid')) {
this.device = "hybrid";
}
if (this.currentId) {
this.loadUser(this.currentId);
}
else {
this.toaster.presentToast('Falha ao carregar usuário', 'danger', 2000);
this.auth.SignOut();
}
if (this.emitter.subsVar == undefined) {
this.emitter.subsVar = this.emitter.invokeFirstComponentFunction
.subscribe((param : any) => {
let data = param.data;
//ABRIR COMPONENTE
this.abrirComponente(this.content, 'D',param.function ,data)
.catch(err => {
console.log(err);
this.toaster.presentToast('Houve um problema ao processar sua solicitação. Tente novamente mais tarde', 'danger', 0);
})
});
}
this.abrirComponente(this.content, 'D','materiasComponent',{});
}
async loadUser(id : string){
(await this.user.getUsers()).subscribe(data => {
for (let i = 0; i < data.length; i++) {
if (data[i].id == id) {
this.currentUser = data[i].data;
console.error(this.currentUser);
this.loadTheme(this.currentUser);
}
}
});
}
loadTheme(data : any){
document.documentElement.style.setProperty('--ion-color-learnin-primary', data.theme.color1);
document.documentElement.style.setProperty('--ion-color-learnin-secondary', data.theme.color2);
document.documentElement.style.setProperty('--ion-background-color', data.theme.color1);
document.documentElement.style.setProperty('--ion-item-background', data.theme.color1);
document.querySelector("body").style.setProperty('--ion-text-color', data.theme.color3);
}
async openContent(option : string){
this.menu.close();
await this.abrirComponente(this.content, 'D', option,{});
}
async abrirComponente(element : ViewContainerRef, container:string,componentName:string,data?:any): Promise<Boolean> {
return new Promise((resolve, reject) => {
//DEU CERTO O CARREGAMENTO
this.toaster.presentLoading('Carregando...')
.then(async res => {
res.present();
try
{
let comp : any = await this.components.resolveComponentsName(componentName);
if(comp != false)
{
let newItem = new ProcessService(comp, data);
const factory = this.resolver.resolveComponentFactory(newItem.component);
//Criar o componente
if(container == 'D')
{
element.clear();
// let componentRef = this.direito.createComponent(factory);
this.content.createComponent(factory);
}
else
{
console.error('Não existe um container ativo ('+container+')');
}
res.dismiss();
resolve(true)
}
else
{
console.error('Falha ao carregar '+componentName);
console.error('Componente nào esta instanciado');
reject(false);
}
}
catch(err)
{
reject(false);
console.error(err);
res.dismiss();
this.toaster.presentToast('Falha ao carregar tela', 'danger', 4000);
}
})
.catch()
.finally();
});
}
logout(){
this.auth.SignOut();
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { AuthService } from 'src/app/routes/auth.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-cadastro',
templateUrl: './cadastro.page.html',
styleUrls: ['./cadastro.page.scss'],
})
export class CadastroPage implements OnInit {
private signData = {
email: '',
password: ''
};
constructor(
private auth : AuthService,
private toaster : ToasterService
) { }
ngOnInit() {
}
cadastrar(){
this.auth.SignUp(this.signData.email, this.signData.password)
.then(() => this.toaster.presentToast('Uma confirmação foi enviada para o seu email, confirme-a para concluir o cadastro.', 'success', 2000))
.catch(err => this.toaster.presentToast(`[ERRO]: ${err}`, 'danger', 2000));
}
}
<file_sep>import { Component, Input, OnInit } from '@angular/core';
import { ModalController, Platform } from '@ionic/angular';
import { AlunoPage } from 'src/app/pages/aluno/aluno.page';
import { MuralService } from 'src/app/routes/mural.service';
import { UserService } from 'src/app/routes/user.service';
import { ModalService } from 'src/app/services/modal.service';
@Component({
selector: 'app-alunos',
templateUrl: './alunos.component.html',
styleUrls: ['./alunos.component.scss'],
})
export class AlunosComponent implements OnInit {
@Input() data : any;
private usersList : any[] = [];
private materiaId : string;
private device : string = "desktop";
private userOptions : any = {
action: 'component',
device: 'desktop'
};
constructor(
private modals : ModalService,
private modal : ModalController,
private murals : MuralService,
private users : UserService,
private platform : Platform
) { }
ngOnInit() {
if (this.data) {
console.log(this.data);
this.userOptions.action = this.data.action;
this.materiaId = this.data.id;
}
if (this.platform.is('hybrid')) {
this.device = "hybrid";
}
this.list();
}
viewAluno(dados : any){
let css = "";
if (this.device === 'desktop') css = "fullscreen-modal";
dados.action = "modal";
this.modals.modalOpen(AlunoPage, dados, css);
}
addAluno(aluno : any){
const dados = {
user_id: aluno.id,
user_name: aluno.data.displayName,
user_photo: aluno.data.photoURL
};
this.murals.adicionarAluno(this.materiaId, dados).then(() => this.alunoMateria(aluno, this.materiaId));
}
alunoMateria(aluno : any, materia : string){
const usuario = aluno;
usuario.materias.push(materia);
this.murals.adicionarMateriaAluno(usuario.id, usuario.materias).then(() => this.modal.dismiss());
}
listarAlunosMateria(){
}
async list(){
(await this.users.getUsers()).subscribe(async data => {
if (this.userOptions.action != 'modal') {
this.usersList = data;
return false;
}
this.usersList = data;
(await this.murals.listarAlunos(this.data.id)).subscribe(dados => {
if (dados.length <= 0) {
this.usersList = data;
return false;
}
for (let i = 0; i < data.length; i++) {
for (let j = 0; j < dados.length; j++) {
console.log(dados[j])
console.log(`${dados[j].data.user_id} - ${data[i].id}`);
if (dados[j].data.user_id == this.usersList[i].id) {
this.usersList.splice(i, 1);
}
}
}
});
});
}
closeModal(){
this.modal.dismiss();
}
}
<file_sep>import { DOCUMENT } from '@angular/common';
import { Component, Inject, OnInit } from '@angular/core';
import { AngularFireAuth } from '@angular/fire/auth';
import { AngularFireStorage } from '@angular/fire/storage';
import { Platform } from '@ionic/angular';
import { finalize } from 'rxjs/operators';
import { UserService } from 'src/app/routes/user.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-meus-dados',
templateUrl: './meus-dados.component.html',
styleUrls: ['./meus-dados.component.scss'],
})
export class MeusDadosComponent implements OnInit {
private currentId : string;
private nameInput : boolean = false;
private user = {
displayName: '',
photoURL: '',
theme: {
color1: '',
color2: '',
color3: '',
}
};
private downloadURL : any;
private dark_mode : boolean = false;
private cor : string;
private device : string = "desktop";
constructor(
private users : UserService,
private toaster : ToasterService,
private storage: AngularFireStorage,
private afa : AngularFireAuth,
private platform : Platform,
@Inject(DOCUMENT) private document : Document
) {
}
ngOnInit() {
this.currentId = JSON.parse(localStorage.getItem('user'));
this.loadUser();
if (this.platform.is('hybrid')) {
this.device = "hybrid";
}
}
async loadUser(){
// (await this.users.getUserDados()).subscribe(data => console.log(data))
(await this.users.getUsers()).subscribe(data => {
for (let i = 0; i < data.length; i++) {
if (data[i].id == this.currentId) {
console.log(data[i]);
this.user = data[i].data;
return true;
}
}
});
}
atualizarDados(){
console.log(this.user);
this.users.userUpdate(this.currentId, this.user).then(() => {
if (this.user.theme.color1 != "") document.documentElement.style.setProperty('--ion-color-learnin-primary', this.user.theme.color1);
if (this.user.theme.color2 != "") document.documentElement.style.setProperty('--ion-color-learnin-secondary', this.user.theme.color2);
if (this.user.theme.color3 != "") document.querySelector("body").style.setProperty('--ion-text-color', this.user.theme.color3);
this.toaster.presentToast('Dados atualizados com sucesso!', 'success', 2000);
})
}
edit(param : string){
this.nameInput ? this.nameInput = false : this.nameInput = true;
}
escuro(event : any){
if (event.detail.checked) {
this.user.theme.color1 = '#0f0f0f';
this.user.theme.color2 = '#080808';
this.user.theme.color3 = '#ffffff';
document.documentElement.style.setProperty('--ion-color-learnin-primary', '#0f0f0f');
document.documentElement.style.setProperty('--ion-color-learnin-secondary', '#080808');
document.querySelector("body").style.setProperty('--ion-text-color', '#b5b5b5');
document.documentElement.style.setProperty('--color', '#ffffff');
}
else {
this.user.theme.color1 = 'transparent';
this.user.theme.color2 = 'transparent';
this.user.theme.color3 = '#000000';
document.documentElement.style.setProperty('--ion-color-learnin-primary', 'transparent');
document.documentElement.style.setProperty('--ion-color-learnin-secondary', 'transparent');
document.querySelector("body").style.setProperty('--ion-text-color', '#000000');
document.documentElement.style.removeProperty('--color');
}
}
async uploadFile(event) {
const file = event.target.files[0];
const randomId = Math.random().toString(36).substring(2);
const nomeArquivo = new Date().getTime()+randomId+file.name;
const filePath = nomeArquivo;
const user_id = localStorage.getItem('user');
const dadosRef = `usuarios/${user_id}/${filePath}`;
const fileRef = this.storage.ref(dadosRef);
const task = this.storage.upload(dadosRef, file);
// observe percentage changes
// this.uploadPercent = task.percentageChanges();
// get notified when the download URL is available
task.snapshotChanges().pipe(
finalize(() =>{
this.downloadURL = fileRef.getDownloadURL();
this.downloadURL.subscribe(resp=>{
if(resp != '')
{
this.toaster.presentLoading('Carregando Prévia').then(result => {
result.present();
this.getBackLink(resp, result);
});
}
else {
this.toaster.presentToast('Falha ao atualizar prévia, tente novamente.', 'danger', 2000);
}
})
})
)
.subscribe()
}
getBackLink(url:string, resloading : any) {
this.user.photoURL = url;
console.log(this.user);
resloading.dismiss();
this.toaster.presentToast('Prévia carregada, clique em salvar para confirmar as alterações.', 'secondary', 2000);
}
}
<file_sep>import { NgModule } from '@angular/core';
import { PreloadAllModules, RouterModule, Routes } from '@angular/router';
import { AuthGuard } from './guards/auth.guard';
const routes: Routes = [
{
path: '',
redirectTo: 'login',
pathMatch: 'full'
},
{
path: 'home-desktop',
loadChildren: () => import('./pages/home-desktop/home-desktop.module').then( m => m.HomeDesktopPageModule)
},
{
path: 'publicacao',
loadChildren: () => import('./pages/publicacao/publicacao.module').then( m => m.PublicacaoPageModule)
},
{
path: 'login',
loadChildren: () => import('./pages/login/login.module').then( m => m.LoginPageModule)
},
{
path: 'cadastro',
loadChildren: () => import('./pages/cadastro/cadastro.module').then( m => m.CadastroPageModule)
},
{
path: 'recuperar',
loadChildren: () => import('./pages/recuperar/recuperar.module').then( m => m.RecuperarPageModule)
},
{
path: 'verificacao',
loadChildren: () => import('./pages/verificacao/verificacao.module').then( m => m.VerificacaoPageModule)
},
{
path: 'modal-confirm',
loadChildren: () => import('./pages/modal-confirm/modal-confirm.module').then( m => m.ModalConfirmPageModule)
},
{
path: 'aluno',
loadChildren: () => import('./pages/aluno/aluno.module').then( m => m.AlunoPageModule)
},
{
path: 'materias',
loadChildren: () => import('./pages/materias/materias.module').then( m => m.MateriasPageModule)
},
];
@NgModule({
imports: [
RouterModule.forRoot(routes, { preloadingStrategy: PreloadAllModules })
],
exports: [RouterModule]
})
export class AppRoutingModule { }
<file_sep>import { Injectable } from '@angular/core';
import { ModalController } from '@ionic/angular';
import { ModalConfirmPage } from '../pages/modal-confirm/modal-confirm.page';
@Injectable({
providedIn: 'root'
})
export class ModalService {
public params : any;
constructor(
private modal : ModalController,
) { }
async modalOpen(page : any, props : any, css : string) {
const modal = await this.modal.create({
component: page,
mode: 'ios',
showBackdrop: true,
cssClass:`selector-modal ${css}`,
componentProps: {
data: props
}
});
modal.onDidDismiss().then((dados) => {
return dados;
});
await modal.present();
}
closeModal(){
this.modal.dismiss();
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { IonicModule } from '@ionic/angular';
import { PublicacaoPageRoutingModule } from './publicacao-routing.module';
import { PublicacaoPage } from './publicacao.page';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
PublicacaoPageRoutingModule
],
declarations: [PublicacaoPage]
})
export class PublicacaoPageModule {}
<file_sep>import { Component, Input, OnInit } from '@angular/core';
import { AngularFireStorage } from '@angular/fire/storage';
import { ModalController, Platform } from '@ionic/angular';
import { Observable } from 'rxjs';
import { finalize } from 'rxjs/operators';
import { User } from 'src/app/interfaces/user';
import { AuthService } from 'src/app/routes/auth.service';
import { MuralService } from 'src/app/routes/mural.service';
import { UserService } from 'src/app/routes/user.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-add-publicacao',
templateUrl: './add-publicacao.page.html',
styleUrls: ['./add-publicacao.page.scss'],
})
export class AddPublicacaoPage implements OnInit {
@Input() data : any;
private currentId : any = {};
private currentUser: any = {};
private device : string = "desktop";
private tipo : string = "post";
private form : any = {
materia: '',
autor: '',
titulo: '',
descricao: '',
anexos: [],
createAt: ''
};
private fileUrl : string = "";
private arquivos : any[] = [];
selectedFile: File = null;
fb;
downloadURL: Observable<string>;
constructor(
private modal : ModalController,
private murals : MuralService,
private storage: AngularFireStorage,
private toaster : ToasterService,
private platform : Platform,
private user : UserService,
private auth : AuthService
) { }
ngOnInit() {
this.currentId = JSON.parse(localStorage.getItem('user'));
console.log(this.currentId);
if (this.platform.is('hybrid')) {
this.device = "hybrid";
}
if (this.currentId) {
this.loadUser(this.currentId);
}
else {
this.toaster.presentToast('Falha ao carregar usuário', 'danger', 2000);
this.auth.SignOut();
}
console.warn(this.data);
if (this.data.item) {
if(this.data.action === 'view-post') {
this.tipo = "post";
this.form.materia = this.data.materia;
this.form.autor = this.data.item.autor;
this.form.titulo = this.data.item.data.titulo;
this.form.descricao = this.data.item.data.descricao;
this.form.anexos = this.data.item.data.anexos;
this.form.createAt = this.data.item.data.anexos;
}
else {
this.tipo = "arquivo";
this.form.materia = this.data.materia;
this.form.autor = this.data.item.autor;
this.form.titulo = this.data.item.data.name;
this.form.createAt = this.data.item.data.createAt;
this.fileUrl = this.data.item.data.url;
this.arquivos.push(this.data.item.data);
}
}
}
async loadUser(id : string){
(await this.user.getUsers()).subscribe(data => {
for (let i = 0; i < data.length; i++) {
if (data[i].id == id) {
this.currentUser = data[i].data;
}
}
});
}
startDownload(url : string) {
window.open(url, "_blank")
}
async salvar(){
if (this.tipo === 'arquivo') {
console.log('arquivo!');
await this.uploadArquivos().then(() => {
this.form = {
materia: '',
autor: '',
titulo: '',
descricao: '',
anexos: [],
createAt: ''
};
this.modal.dismiss();
});
}
else {
console.log('post!');
await this.uploadArquivos().then(() => {
console.warn(this.form);
this.murals.enviarPost(this.data.materia, this.form)
.then(() => {
this.form = {
materia: '',
autor: '',
titulo: '',
descricao: '',
anexos: [],
createAt: ''
};
this.modal.dismiss();
})
}
);
}
}
async apagar(){
if (this.tipo === 'arquivo') {
this.murals.apagarArquivo(this.data.materia, this.data.item.id).then(() => {
this.toaster.presentToast('Arquivo apagado com sucesso', 'success', 2000);
this.modal.dismiss();
})
}
else {
this.murals.apagarPost(this.data.materia, this.data.item.id).then(() => {
this.toaster.presentToast('Post apagado com sucesso', 'success', 2000);
this.modal.dismiss();
})
}
}
selecionarArquivo(event : any){
if(event.target.files.length > 0){
let files = event.target.files;
for (let i = 0; i < files.length; i++) {
console.log(files[i])
this.arquivos.push(files.item(i));
}
}
}
removerArquivo(id : number){
console.log('remover arquivo '+id)
this.arquivos.splice(id, 1);
}
async uploadArquivos() {
if (this.arquivos.length > 0) {
const n = Date.now();
const files = this.arquivos;
for (let i = 0; i < files.length; i++) {
let fileName = files[i].name;
// fileName = fileName.split('.').shift();
const createName = `${n}_${fileName}`;
const filePath = `materias/${this.data.materia}/${n}_${fileName}`;
const fileRef = this.storage.ref(filePath);
const task = this.storage.upload(`materias/${this.data.materia}/${createName}`, files[i]);
const fileObj = {
lastModified: files[i].lastModified,
name: createName,
size: files[i].size,
type: files[i].type,
};
task.snapshotChanges().pipe(
finalize(() => {
this.downloadURL = fileRef.getDownloadURL();
this.downloadURL.subscribe(url => {
if (url){
fileObj['url'] = url;
fileObj['createAt'] = new Date().getTime();
this.murals.enviarArquivo(this.data.materia, fileObj);
}
});
this.toaster.presentToast('Arquivos enviados com sucesso!', 'success', 2000);
})
)
.subscribe(url => {});
this.form.anexos.push(fileObj);
}
}
else {
this.toaster.presentToast('Selecione arquivos para enviar!', 'danger', 2000);
}
}
closeModal(){
this.modal.dismiss();
}
}
<file_sep>import { Injectable } from '@angular/core';
import { LoadingController, ToastController } from '@ionic/angular';
@Injectable({
providedIn: 'root'
})
export class ToasterService {
constructor(
public toastController: ToastController,
public loadingController: LoadingController,
) {}
async presentLoading(message:string) {
const loading = await this.loadingController.create({
message
});
return loading
}
async presentToast(message : string, color:string, duration : number) {
const toast = await this.toastController.create({
message: message || 'Your settings have been saved.',
color: color || "primary",
duration: duration || 2000
});
toast.present();
}
async presentToastWithOptions(header : string, message : string, position : "top" | "bottom" | "middle", button1 : string, button2 : string) {
const toast = await this.toastController.create({
header: header,
message: message || 'Click to Close',
position: position || 'top',
buttons: [
{
side: 'start',
icon: 'star',
text: button1 || 'Favorite',
handler: () => {
console.log('Favorite clicked');
}
}, {
text: button2 || 'Done',
role: 'cancel',
handler: () => {
console.log('Cancel clicked');
}
}
]
});
toast.present();
}
}
<file_sep>import { Injectable } from '@angular/core';
import { AngularFireAuth } from '@angular/fire/auth';
import { AngularFirestore } from '@angular/fire/firestore';
import { map } from 'rxjs/operators';
@Injectable({
providedIn: 'root'
})
export class ChatService {
constructor(
private DB:AngularFirestore,
private afa:AngularFireAuth,
) { }
listar(id : string){
return this.DB.collection('materias').doc(id).collection('mensagens').snapshotChanges().pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
return dados;
}))
)
}
enviar(id : string, dados : any){
return this.DB.collection('materias').doc(id).collection('mensagens').add(dados);
}
}
<file_sep>import { Injectable } from '@angular/core';
import { AngularFireAuth } from '@angular/fire/auth';
import { AngularFirestore, AngularFirestoreCollection, QueryFn } from '@angular/fire/firestore';
import { map } from 'rxjs/operators';
@Injectable({
providedIn: 'root'
})
export class MateriasService {
private currentUser : string;
constructor(
private DB:AngularFirestore,
private afa:AngularFireAuth,
) {
this.currentUser = localStorage.getItem('user');
}
listar(){
return this.DB.collection('materias').snapshotChanges()
.pipe(
map(action => action.map(a=>{
const dados = {
id: a.payload.doc.id,
data: a.payload.doc.data() as any,
};
dados.data.color = this.getRandomColor();
return dados;
}))
)
}
adicionar(data : any){
data.createAt = new Date().getTime();
return this.DB.collection('materias').add(data);
}
editar(){
}
apagar(id : string){
return this.DB.collection("materias").doc(id).delete();
}
getRandomColor() {
let letters = '0123456789ABCDEF';
let color = '#';
for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
}
<file_sep>export const environment = {
production: true,
firebase: {
apiKey: "<KEY>",
authDomain: "projeto-sdv.firebaseapp.com",
databaseURL: "https://projeto-sdv.firebaseio.com",
projectId: "projeto-sdv",
storageBucket: "projeto-sdv.appspot.com",
messagingSenderId: "895614015555",
appId: "1:895614015555:web:1a1bd92b87a26d132f5cf6",
measurementId: "G-WX8VHX183B"
}
};
<file_sep>import { Injectable } from '@angular/core';
import { EventEmitterService } from './event-emitter.service';
@Injectable({
providedIn: 'root'
})
export class ExecuteService {
public data : any;
constructor(
private emitter : EventEmitterService
) { }
functionExecute(functionName:string,params:any)
{
console.log('execute')
this.carregarParametros(params);
const param = {
function:functionName,
data:params
}
this.emitter.onFirstComponentButtonClick(param);
}
carregarParametros(dados : any){
this.data = dados;
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { AuthService } from 'src/app/routes/auth.service';
import { ToasterService } from 'src/app/services/toaster.service';
@Component({
selector: 'app-login',
templateUrl: './login.page.html',
styleUrls: ['./login.page.scss'],
})
export class LoginPage implements OnInit {
userData = {
login : '',
password: ''
};
constructor(
private auth : AuthService,
private router : Router,
private toast : ToasterService
) { }
ngOnInit() {
}
async login(){
await this.auth.SignIn(this.userData.login, this.userData.password);
}
}
<file_sep>import { Injectable, Type } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class ProcessService {
constructor(public component: Type<any>, public desc: string) {}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { IonicModule } from '@ionic/angular';
import { HomeDesktopPageRoutingModule } from './home-desktop-routing.module';
import { HomeDesktopPage } from './home-desktop.page';
import { ComponentsModule } from 'src/app/components/components.module';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
HomeDesktopPageRoutingModule,
ComponentsModule
],
declarations: [HomeDesktopPage]
})
export class HomeDesktopPageModule {}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-verificacao',
templateUrl: './verificacao.page.html',
styleUrls: ['./verificacao.page.scss'],
})
export class VerificacaoPage implements OnInit {
constructor() { }
ngOnInit() {
}
}
|
e95f84ca24b8181713d8f1f62f3c3ef6f0d56d3e
|
[
"Markdown",
"TypeScript"
] | 34 |
TypeScript
|
Emeritvs/learnin
|
46345fc1af2262b7ea6c732dddbbe07b51abbbf2
|
4730197850a1fd72ca081f601a6d7023f4803df9
|
refs/heads/master
|
<file_sep>import os
import random
import logging
import discord
import queue
import asyncio
import threading
import time
from discord.ext import commands
crabs = ['./crabs/'+f for f in os.listdir('./crabs')]
files = ['./claps/'+f for f in os.listdir('./claps')]
claps = queue.Queue()
loop = asyncio.get_event_loop()
bot = commands.Bot(command_prefix='cl!')
proxy1 = commands.Bot(command_prefix='cl!')
proxy2 = commands.Bot(command_prefix='cl!')
logging.basicConfig(level=logging.INFO)
#discord.opus.load_opus()
def clap_worker(vc, queue):
print('creating worker')
# vc = message.guild.voice_client
# clap = discord.FFmpegPCMAudio(random.choice(files))
while vc.is_connected():
if not vc.is_playing() and not queue.empty():
try:
clap = queue.get_nowait()
vc.play(discord.FFmpegPCMAudio(clap), after=queue.task_done())
except:
pass
else:
time.sleep(1)
print('worker destroyed')
@bot.command()
async def connect(ctx):
print('connecting')
vc = await ctx.message.author.voice.channel.connect()
threading.Thread(target=clap_worker, args=(vc,claps)).start()
@proxy1.command()
async def connect(ctx):
print('connecting1')
vc = await ctx.message.author.voice.channel.connect()
threading.Thread(target=clap_worker, args=(vc,claps)).start()
@proxy2.command()
async def connect(ctx):
print('connecting2')
vc = await ctx.message.author.voice.channel.connect()
threading.Thread(target=clap_worker, args=(vc,claps)).start()
@bot.event
async def on_message(message):
triggers = ['clap',':clap:','👏']
if any(trigger in message.content.lower() for trigger in triggers):
claps.put_nowait(random.choice(files))
await message.add_reaction('👏')
triggers = ['carp',':fish:','🐟']
if any(trigger in message.content.lower() for trigger in triggers):
claps.put_nowait(random.choice(files))
await message.add_reaction('🐟')
triggers = ['crab', ':crab:', '🦀']
if any(trigger in message.content.lower() for trigger in triggers):
claps.put_nowait(crabs[0])
crabs.append(crabs.pop(0))
await message.add_reaction('🦀')
await bot.process_commands(message)
@bot.event
async def on_raw_reaction_add(payload):
if str(payload.emoji) == '👏':
for i in range(random.randint(1,5)):
claps.put_nowait(random.choice(files))
@bot.event
async def on_raw_reaction_remove(payload):
if str(payload.emoji) == '👏':
claps.put_nowait(discord.FFmpegPCMAudio(random.choice(files)))
task1 = loop.create_task(bot.start(os.getenv('TOKEN_1')))
task2 = loop.create_task(proxy1.start(os.getenv('TOKEN_2')))
task3 = loop.create_task(proxy2.start(os.getenv('TOKEN_3')))
gathered = asyncio.gather(task1, task2, task3, loop=loop)
loop.run_until_complete(gathered)<file_sep># Virtual Applause

# SETUP
## Option 1: running with docker (reccomended)
- Option 1a running from Docker Hub
run the built container from Docker Hub with the token environment variables:
`$ docker run -e TOKEN_1=DISCORD_TOKEN_HERE -e TOKEN_2=OTHER_DISCORD_TOKEN_HERE -e TOKEN_3=OTHER_OTHER_DISCORD_TOKEN_HERE oohwooh/virtual-applause:latest`
- Option 1b building and running the Docker image from source
`git clone https://github.com/oohwooh/virtual-applause.git`
`cd virtual-applause`
`docker build . -t virtual-applause`
`$ docker run -e TOKEN_1=DISCORD_TOKEN_HERE -e TOKEN_2=OTHER_DISCORD_TOKEN_HERE -e TOKEN_3=OTHER_OTHER_DISCORD_TOKEN_HERE virtual-applause`
## Option 2: running from command line
sorry but you're on your own with this one, have fun
# Commands:
## cl!connect
`cl!connect` connects the bots to your current channel, for clapping.
<file_sep>FROM python:3.7-slim-buster
RUN apt-get update && apt-get install ffmpeg -y
COPY requirements.txt /
RUN pip install -r /requirements.txt
COPY claps /app/claps
COPY crabs /app/crabs
COPY bot.py /app/bot.py
WORKDIR /app
CMD ["python3", "bot.py"]
|
9e6c70607924cec0e9ba477c520fd5041a5cd167
|
[
"Markdown",
"Python",
"Dockerfile"
] | 3 |
Python
|
lynnlo/virtual-applause
|
3bc89ee26f5177b8432ed8be16d07e5a8363299d
|
25b9e44cf842b7dc8094b9c6c8259898b8f2278c
|
refs/heads/master
|
<repo_name>jiawei103/RepData_PeerAssessment1<file_sep>/PA1_template.md
---
title: "Reproducible Research: Peer Assessment 1"
output:
html_document:
keep_md: true
---
## Loading and preprocessing the data
Place the data file activit.zip in your working directory and run the following to load the data
```r
data <- read.csv(unz("activity.zip", "activity.csv"), header = TRUE)
head(data)
```
```
## steps date interval
## 1 NA 2012-10-01 0
## 2 NA 2012-10-01 5
## 3 NA 2012-10-01 10
## 4 NA 2012-10-01 15
## 5 NA 2012-10-01 20
## 6 NA 2012-10-01 25
```
## What is mean total number of steps taken per day?
First we find the total number of steps for each day and display a histogram of the results
```r
sum_by_day <- tapply(data$step, data$date, sum, na.rm = TRUE)
hist(sum_by_day, main = "Total Steps per Day", xlab = "Total Steps", ylab = "Days")
```
<!-- -->
The mean of the total and the median of the total are as follows
```r
print(mean(sum_by_day))
```
```
## [1] 9354.23
```
```r
print(median(sum_by_day))
```
```
## [1] 10395
```
## What is the average daily activity pattern?
We find the average number of steps for each time interval
```r
average_by_interval <- tapply(data$steps, data$interval, mean, na.rm = TRUE)
```
Converting the intervals into a list of time variables
```r
times <- unique(data$interval)
times <- formatC(times, width = 4, format = "d", flag = "0")
times <- strptime(times, "%H%M")
```
Plotting the time series of averages
```r
library(ggplot2)
data2 <- data.frame(interval = times, average = average_by_interval)
g <- ggplot(data2, aes(interval, average)) + geom_line() + scale_x_datetime(date_labels = "%H:%M")
print(g)
```
<!-- -->
Finding the maximum average number of steps and when it occurs
```r
library(lubridate)
```
```
##
## Attaching package: 'lubridate'
```
```
## The following object is masked from 'package:base':
##
## date
```
```r
maximum <- max(data2$average)
location <- grep(maximum, data2$average)
h <- hour(data2[location, 1])
m <- minute(data2[location, 1])
print(paste("Maximum average number of steps at", h, ":", m))
```
```
## [1] "Maximum average number of steps at 8 : 35"
```
## Imputing missing values
Finding the number of missing values in the rows of our dataframe
```r
#1
sum(is.na(data$steps))
```
```
## [1] 2304
```
```r
sum(is.na(data$date))
```
```
## [1] 0
```
```r
sum(is.na(data$interval))
```
```
## [1] 0
```
We see that only the first column has missing values.
Replace each missing value with the mean of that interval
```r
#2-3 Using mean of each interval to replace NA
data_imputed <- data
for(i in 1:dim(data_imputed)[1]) {
if(is.na(data_imputed$steps[i])) {
data_imputed$steps[i] <- data2[as.character(data_imputed$interval[i]),2]
}
}
```
Recalcuating the total number of steps per day and displaying a new histogram
```r
#4
sum_by_day2 <- tapply(data_imputed$step, data_imputed$date, sum)
hist(sum_by_day2, main = "Total Steps per Day", xlab = "Total Steps", ylab = "Days")
```
<!-- -->
Recomputing the mean and median of the totals
```r
print(mean(sum_by_day2))
```
```
## [1] 10766.19
```
```r
print(median(sum_by_day2))
```
```
## [1] 10766.19
```
## Are there differences in activity patterns between weekdays and weekends?
Now we add a factor indicating where a perticular day was a weekday or a weekend
```r
#Weekday vs Weekend
data_imputed$dayofweek <- weekdays(strptime(data_imputed$date, "%Y-%m-%d"))
wdays <- c("Monday", "Tuesday", "Wednesday", "Thursday", "Friday")
wend <- c("Saturday", "Sunday")
for(i in 1:dim(data_imputed)[1]) {
if(data_imputed$dayofweek[i] %in% wdays) {
data_imputed$dayofweek[i] <- 1
} else {
data_imputed$dayofweek[i] <- 2
}
}
data_imputed$dayofweek <- factor(data_imputed$dayofweek, levels = c(1, 2), labels = c("Weekday", "Weekend"))
```
Finally we make two time series plots to compare activitiy between weekday and weekend.
```r
#Comparing plots
library(dplyr)
```
```
##
## Attaching package: 'dplyr'
```
```
## The following objects are masked from 'package:lubridate':
##
## intersect, setdiff, union
```
```
## The following objects are masked from 'package:stats':
##
## filter, lag
```
```
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
```
```r
data3 <- as.tbl(data_imputed)
data3 <- group_by(data3, dayofweek, interval)
data3 <- summarise(data3, mean = mean(steps))
library(lattice)
xyplot(mean ~ interval | dayofweek, data = data3, type = "l", xlab = "Interval", ylab = "Number of Steps", layout = c(1,2))
```
<!-- -->
<file_sep>/PA1_template.Rmd
---
title: "Reproducible Research: Peer Assessment 1"
output:
html_document:
keep_md: true
---
## Loading and preprocessing the data
Place the data file activit.zip in your working directory and run the following to load the data
```{r load}
data <- read.csv(unz("activity.zip", "activity.csv"), header = TRUE)
head(data)
```
## What is mean total number of steps taken per day?
First we find the total number of steps for each day and display a histogram of the results
```{r totals}
sum_by_day <- tapply(data$step, data$date, sum, na.rm = TRUE)
hist(sum_by_day, main = "Total Steps per Day", xlab = "Total Steps", ylab = "Days")
```
The mean of the total and the median of the total are as follows
```{r mean total}
print(mean(sum_by_day))
print(median(sum_by_day))
```
## What is the average daily activity pattern?
We find the average number of steps for each time interval
```{r averages by interval}
average_by_interval <- tapply(data$steps, data$interval, mean, na.rm = TRUE)
```
Converting the intervals into a list of time variables
```{r}
times <- unique(data$interval)
times <- formatC(times, width = 4, format = "d", flag = "0")
times <- strptime(times, "%H%M")
```
Plotting the time series of averages
```{r time plot}
library(ggplot2)
data2 <- data.frame(interval = times, average = average_by_interval)
g <- ggplot(data2, aes(interval, average)) + geom_line() + scale_x_datetime(date_labels = "%H:%M")
print(g)
```
Finding the maximum average number of steps and when it occurs
```{r maximum}
library(lubridate)
maximum <- max(data2$average)
location <- grep(maximum, data2$average)
h <- hour(data2[location, 1])
m <- minute(data2[location, 1])
print(paste("Maximum average number of steps at", h, ":", m))
```
## Imputing missing values
Finding the number of missing values in the rows of our dataframe
```{r missing}
#1
sum(is.na(data$steps))
sum(is.na(data$date))
sum(is.na(data$interval))
```
We see that only the first column has missing values.
Replace each missing value with the mean of that interval
```{r replace}
#2-3 Using mean of each interval to replace NA
data_imputed <- data
for(i in 1:dim(data_imputed)[1]) {
if(is.na(data_imputed$steps[i])) {
data_imputed$steps[i] <- data2[as.character(data_imputed$interval[i]),2]
}
}
```
Recalcuating the total number of steps per day and displaying a new histogram
```{r resum}
#4
sum_by_day2 <- tapply(data_imputed$step, data_imputed$date, sum)
hist(sum_by_day2, main = "Total Steps per Day", xlab = "Total Steps", ylab = "Days")
```
Recomputing the mean and median of the totals
```{r re-average}
print(mean(sum_by_day2))
print(median(sum_by_day2))
```
## Are there differences in activity patterns between weekdays and weekends?
Now we add a factor indicating where a perticular day was a weekday or a weekend
```{r indicating wday or wend}
#Weekday vs Weekend
data_imputed$dayofweek <- weekdays(strptime(data_imputed$date, "%Y-%m-%d"))
wdays <- c("Monday", "Tuesday", "Wednesday", "Thursday", "Friday")
wend <- c("Saturday", "Sunday")
for(i in 1:dim(data_imputed)[1]) {
if(data_imputed$dayofweek[i] %in% wdays) {
data_imputed$dayofweek[i] <- 1
} else {
data_imputed$dayofweek[i] <- 2
}
}
data_imputed$dayofweek <- factor(data_imputed$dayofweek, levels = c(1, 2), labels = c("Weekday", "Weekend"))
```
Finally we make two time series plots to compare activitiy between weekday and weekend.
```{r comparing plots}
#Comparing plots
library(dplyr)
data3 <- as.tbl(data_imputed)
data3 <- group_by(data3, dayofweek, interval)
data3 <- summarise(data3, mean = mean(steps))
library(lattice)
xyplot(mean ~ interval | dayofweek, data = data3, type = "l", xlab = "Interval", ylab = "Number of Steps", layout = c(1,2))
```
<file_sep>/script.R
data <- read.csv(unz("activity.zip", "activity.csv"), header = TRUE)
#sum of steps per day
sum_by_day <- tapply(data$step, data$date, sum, na.rm = TRUE)
#hist(sum_by_day, main = "Total Steps per Day", xlab = "Total Steps", ylab = "Days")
print(mean(sum_by_day))
print(median(sum_by_day))
#average per interval across all the days
average_by_interval <- tapply(data$steps, data$interval, mean, na.rm = TRUE)
times <- unique(data$interval)
times <- formatC(times, width = 4, format = "d", flag = "0")
times <- strptime(times, "%H%M")
library(ggplot2)
data2 <- data.frame(interval = times, average = average_by_interval)
#g <- ggplot(data2, aes(interval, average)) + geom_line() + scale_x_datetime(date_labels = "%H:%M")
#print(g)
library(lubridate)
maximum <- max(data2$average)
location <- grep(maximum, data2$average)
h <- hour(data2[location, 1])
m <- minute(data2[location, 1])
print(paste("Maximum average number of steps at", h, ":", m))
#Imputing missing values
#1
sum(is.na(data$steps))
sum(is.na(data$date))
sum(is.na(data$interval))
#2-3 Using mean of each interval to replace NA
data_imputed <- data
for(i in 1:dim(data_imputed)[1]) {
if(is.na(data_imputed$steps[i])) {
data_imputed$steps[i] <- data2[as.character(data_imputed$interval[i]),2]
}
}
#4
sum_by_day2 <- tapply(data_imputed$step, data_imputed$date, sum)
#hist(sum_by_day2, main = "Total Steps per Day", xlab = "Total Steps", ylab = "Days")
print(mean(sum_by_day2))
print(median(sum_by_day2))
#Weekday vs Weekend
data_imputed$dayofweek <- weekdays(strptime(data_imputed$date, "%Y-%m-%d"))
wdays <- c("Monday", "Tuesday", "Wednesday", "Thursday", "Friday")
wend <- c("Saturday", "Sunday")
for(i in 1:dim(data_imputed)[1]) {
if(data_imputed$dayofweek[i] %in% wdays) {
data_imputed$dayofweek[i] <- 1
} else {
data_imputed$dayofweek[i] <- 2
}
}
data_imputed$dayofweek <- factor(data_imputed$dayofweek, levels = c(1, 2), labels = c("Weekday", "Weekend"))
#Comparing plots
library(dplyr)
data3 <- as.tbl(data_imputed)
data3 <- group_by(data3, dayofweek, interval)
data3 <- summarise(data3, mean = mean(steps))
library(lattice)
xyplot(mean ~ interval | dayofweek, data = data3, type = "l", xlab = "Interval", ylab = "Number of Steps", layout = c(1,2))
|
e13e28e51683eeba09415bde567237361c6fed7f
|
[
"Markdown",
"R",
"RMarkdown"
] | 3 |
Markdown
|
jiawei103/RepData_PeerAssessment1
|
5abf5749ba090aa9382e82ddd4e85ef62d2838ba
|
e64e58f18637c696a8f2a089fba1a602716d843a
|
refs/heads/master
|
<repo_name>thejoshcruz/demoservice<file_sep>/DemoService/Data/IDataClient.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Couchbase.N1QL;
namespace DemoService.Data
{
/// <summary>
/// things you can do with a data client
/// </summary>
public interface IDataClient
{
List<dynamic> ExecuteQuery(string name, string query);
List<dynamic> ExecuteQuery(string name, IQueryRequest query);
void Upsert(string name, object content);
}
}
<file_sep>/DemoService/Cryptography/Hasher.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Cryptography
{
public static class Hasher
{
public static byte[] GetHash(string toHash)
{
return null;
}
}
}
<file_sep>/DemoService/Models/Enumerations.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Models
{
/// <summary>
/// possible account status
/// </summary>
public enum AccountStatus
{
/// <summary>
/// active
/// </summary>
Active = 0,
/// <summary>
/// not active (aka inactive, pointdexter)
/// </summary>
Inactive,
/// <summary>
/// someone gone and canceled this account, pa!
/// </summary>
Canceled
}
/// <summary>
/// possible error codes
/// </summary>
public enum ErrorCodes
{
/// <summary>
/// nobody knows what happened
/// </summary>
Unknown = 0,
/// <summary>
/// something broke while processing with couchbase
/// </summary>
CouchbaseProcessing,
/// <summary>
/// invalid input values
/// </summary>
InvalidInputParameters
}
}
<file_sep>/DemoService.Tests/Configuration/CouchbaseConfigTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using NUnit.Framework;
using DemoService.Configuration;
using DemoService.Exceptions;
namespace DemoService.Tests.Configuration
{
[TestFixture]
public class CouchbaseConfigTests
{
private readonly string FAKEPORTFOLIO = "PortfolioState";
private readonly string FAKEACCOUNT = "AccountState";
private readonly string FAKEUSERS = "Users";
[SetUp]
public void SetTestEnvironmentVariables()
{
Environment.SetEnvironmentVariable("COUCHBASE_USER", "username");
Environment.SetEnvironmentVariable("COUCHBASE_PWD", "<PASSWORD>");
Environment.SetEnvironmentVariable("COUCHBASE_SERVERS", "server01;server02");
Environment.SetEnvironmentVariable("COUCHBASE_PORTFOLIO", FAKEPORTFOLIO);
Environment.SetEnvironmentVariable("COUCHBASE_ACCOUNT", FAKEACCOUNT);
Environment.SetEnvironmentVariable("COUCHBASE_USERS", FAKEUSERS);
}
[Test]
public void Initialize_WithInvalidUsername_ThrowsConfigurationException()
{
Environment.SetEnvironmentVariable("COUCHBASE_USER", string.Empty);
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
}
catch (ConfigurationException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithInvalidPassword_ThrowsConfigurationException()
{
Environment.SetEnvironmentVariable("COUCHBASE_PWD", string.Empty);
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
}
catch (ConfigurationException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithInvalidServerList_ThrowsConfigurationException()
{
Environment.SetEnvironmentVariable("COUCHBASE_SERVERS", string.Empty);
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
}
catch (ConfigurationException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithInvalidPortfolioName_ThrowsConfigurationException()
{
Environment.SetEnvironmentVariable("COUCHBASE_PORTFOLIO", string.Empty);
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
}
catch (ConfigurationException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithInvalidAccountName_ThrowsConfigurationException()
{
Environment.SetEnvironmentVariable("COUCHBASE_ACCOUNT", string.Empty);
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
}
catch (ConfigurationException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithInvalidUsersBucket_ThrowsConfigurationException()
{
Environment.SetEnvironmentVariable("COUCHBASE_USERS", string.Empty);
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
}
catch (ConfigurationException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithAllInvalidBuckets_ThrowsConfigurationException()
{
Environment.SetEnvironmentVariable("COUCHBASE_PORTFOLIO", string.Empty);
Environment.SetEnvironmentVariable("COUCHBASE_ACCOUNT", string.Empty);
Environment.SetEnvironmentVariable("COUCHBASE_USERS", string.Empty);
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
}
catch (ConfigurationException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithValidInputs_ConfiguresCorrectly()
{
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
result = config.AccountBucketName == FAKEACCOUNT
&& config.PortfolioBucketName == FAKEPORTFOLIO
&& config.UserBucketName == FAKEUSERS;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void Initialize_WithValidInputs_InitializesClusterHelper()
{
CouchbaseConfig config = new CouchbaseConfig();
bool result = false;
try
{
config.Initialize();
result = Couchbase.ClusterHelper.Initialized;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
}
}
<file_sep>/DemoService/Controllers/AdminController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Couchbase;
using Couchbase.Configuration.Client;
using Couchbase.Authentication;
using DemoService.Data;
using DemoService.Exceptions;
using DemoService.Models;
namespace DemoService.Controllers
{
/// <summary>
/// the admin controller where all admin stuff lives
/// </summary>
[Route("api/[controller]")]
public class AdminController : BaseController
{
/// <summary>
/// default constructor
/// </summary>
/// <param name="dataProcessor">the processors to use when performing actions with data</param>
public AdminController(IDataProcessor dataProcessor)
:base(dataProcessor)
{ }
/// <summary>
/// provides the ability to check if the service is up and accepting requests
/// </summary>
/// <remarks>
/// Sample request:
///
/// GET /Ping
/// {
/// "echo": woohoo!
/// }
///
/// </remarks>
/// <param name="echo">input string to return in the response</param>
/// <returns>Returns a response string that includes the input string</returns>
[HttpGet("Ping")]
public string Ping([FromQuery]string echo)
{
return $"Received {echo} at {DateTime.Now.ToString("HH:mm:ss")}";
}
}
}
<file_sep>/DemoService/Controllers/BaseController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using DemoService.Data;
namespace DemoService.Controllers
{
/// <summary>
/// base controller
/// </summary>
public class BaseController : Controller
{
/// <summary>
/// used for processing functions on data
/// </summary>
public IDataProcessor DataProcessor { get; set; }
/// <summary>
/// default constructor
/// </summary>
/// <param name="dataProcessor">the processors to use when performing actions with data</param>
public BaseController(IDataProcessor dataProcessor)
{
DataProcessor = dataProcessor;
}
}
}
<file_sep>/DemoService/Cryptography/HashLogic.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace DemoService.Cryptography
{
internal class HashLogic
{
internal byte[] ComputeHash(string toHash)
{
byte[] hashed = null;
using (SHA512 hasher = SHA512.Create())
{
hashed = hasher.ComputeHash(Encoding.UTF8.GetBytes(toHash));
}
return hashed;
}
}
}
<file_sep>/DemoService.Tests/Controllers/BaseControllerTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using NUnit.Framework;
using Moq;
using DemoService.Controllers;
using DemoService.Data;
namespace DemoService.Tests.Controllers
{
[TestFixture]
public class BaseControllerTests
{
[Test]
public void Constructor_WithDataProcessorInput_SetsDataProcessor()
{
CouchbaseProcessor proc = new CouchbaseProcessor(new CouchbaseDataClient());
BaseController controller = new BaseController(proc);
Assert.AreEqual(controller.DataProcessor, proc);
}
}
}
<file_sep>/DemoService/Configuration/CouchbaseConfig.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Couchbase;
using Couchbase.Configuration.Client;
using Couchbase.Authentication;
using DemoService.Exceptions;
namespace DemoService.Configuration
{
/// <summary>
/// configuration of the couchbase client; offers a singleton instance
/// </summary>
public class CouchbaseConfig
{
/// <summary>
/// the uris to the couchbase server
/// </summary>
private List<Uri> ServerUris { get; set; }
/// <summary>
/// the couchbase buckets to configure with the ClusterHelper
/// </summary>
private List<string> Buckets { get; set; }
/// <summary>
/// username to access couchbase
/// </summary>
private string Username { get; set; }
/// <summary>
/// password to access couchbase
/// </summary>
private string Password { get; set; }
/// <summary>
/// the couchbase cluster instance
/// </summary>
private Cluster Cluster { get; set; }
/// <summary>
/// the name of the account bucket
/// </summary>
public string AccountBucketName { get; set; }
/// <summary>
/// the name of the portfolio bucket
/// </summary>
public string PortfolioBucketName { get; set; }
/// <summary>
/// the name of the user bucket
/// </summary>
public string UserBucketName { get; set; }
/// <summary>
/// initialize the couchbase configuration values (for use when processing actions)
/// </summary>
public void Initialize()
{
ParseEnvironmentVariables();
VerifyEnvironmentVariables();
ClientConfiguration config = new ClientConfiguration();
config.BucketConfigs.Clear();
config.Servers = ServerUris;
// add all the buckets to the config
Buckets.ForEach(bucket => config.BucketConfigs.Add(bucket, new BucketConfiguration { BucketName = bucket, Username = Username, Password = <PASSWORD> }));
// set up cluster
Cluster = new Cluster(config);
PasswordAuthenticator authenticator = new PasswordAuthenticator(Username, Password);
Cluster.Authenticate(authenticator);
ClusterHelper.Initialize(Cluster.Configuration);
}
/// <summary>
/// close the ClusterHelper
/// </summary>
public void Close()
{
if (Cluster != null)
{
Cluster.Dispose();
}
ClusterHelper.Close();
}
/// <summary>
/// parse and import the env variables into the config for use while processing
/// </summary>
private void ParseEnvironmentVariables()
{
// get the couchbase details from the environment vars passed in
Username = Environment.GetEnvironmentVariable("COUCHBASE_USER");
Password = Environment.GetEnvironmentVariable("<PASSWORD>");
// get servers from the env variables
string servers = Environment.GetEnvironmentVariable("COUCHBASE_SERVERS");
if (!String.IsNullOrEmpty(servers))
{
string[] list = servers.Split(";", StringSplitOptions.RemoveEmptyEntries);
List<Uri> uris = new List<Uri>();
foreach (string uri in list)
{
string tmp = "http://" + uri;
uris.Add(new Uri(tmp));
}
ServerUris = uris;
}
// get the buckets to configure as well
PortfolioBucketName = Environment.GetEnvironmentVariable("COUCHBASE_PORTFOLIO");
AccountBucketName = Environment.GetEnvironmentVariable("COUCHBASE_ACCOUNT");
UserBucketName = Environment.GetEnvironmentVariable("COUCHBASE_USERS");
List<string> buckets = new List<string> { PortfolioBucketName, AccountBucketName, UserBucketName };
Buckets = buckets;
// debug. prove to me you're working
Console.WriteLine($"Starting: {servers} : {Username} : {Password}");
Console.WriteLine($"Buckets: {PortfolioBucketName}, {AccountBucketName}, {UserBucketName}");
}
private void VerifyEnvironmentVariables()
{
if (String.IsNullOrEmpty(Username))
{
throw new ConfigurationException("invalid username");
}
if (String.IsNullOrEmpty(Password))
{
throw new ConfigurationException("invalid password");
}
if (Buckets == null
|| Buckets.Count == 0
|| !Buckets.Exists(bucket => !String.IsNullOrEmpty(bucket)))
{
throw new ConfigurationException("bucket list cannot be null or empty");
}
if (String.IsNullOrEmpty(PortfolioBucketName))
{
throw new ConfigurationException("invalid portfolio bucket name");
}
if (String.IsNullOrEmpty(AccountBucketName))
{
throw new ConfigurationException("invalid account bucket name");
}
if (String.IsNullOrEmpty(UserBucketName))
{
throw new ConfigurationException("invalid user bucket name");
}
if (ServerUris == null || ServerUris.Count == 0)
{
throw new ConfigurationException("server list cannot be null or empty");
}
}
}
}
<file_sep>/DemoService.Tests/Controllers/UserControllerTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using NUnit.Framework;
using Moq;
using DemoService.Controllers;
using DemoService.Data;
using DemoService.Exceptions;
using DemoService.Models;
namespace DemoService.Tests.Controllers
{
public class UserControllerTests : BaseTests
{
[Test]
public void Authenticate_WithInvalidUsername_ReturnsBadRequest()
{
UserController controller = new UserController(new CouchbaseProcessor(new CouchbaseDataClient()));
object result = controller.Authenticate(string.Empty, "not empty");
Assert.IsInstanceOf(typeof(BadRequestObjectResult), result);
}
[Test]
public void Authenticate_WithInvalidPassword_ReturnsBadRequest()
{
UserController controller = new UserController(new CouchbaseProcessor(new CouchbaseDataClient()));
object result = controller.Authenticate("not empty", string.Empty);
Assert.IsInstanceOf(typeof(BadRequestObjectResult), result);
}
[Test]
public void Authenticate_EncountersException_ReturnsBadRequest()
{
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.AuthenticateUser(It.IsAny<string>(), It.IsAny<string>())).Throws(new Exception("dogs and cats, living together!"));
UserController controller = new UserController(mock.Object);
object result = controller.Authenticate("user", "pwd");
int code = ParseBadRequestForErrorCode(result);
Assert.AreEqual(code, (int)ErrorCodes.Unknown);
}
[Test]
public void Authenticate_EncountersCouchbaseException_ReturnsBadRequest()
{
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.AuthenticateUser(It.IsAny<string>(), It.IsAny<string>())).Throws(new CouchbaseException("You keep using that word. I do not think it means what you think it means."));
UserController controller = new UserController(mock.Object);
object result = controller.Authenticate("user", "pwd");
int code = ParseBadRequestForErrorCode(result);
Assert.AreEqual(code, (int)ErrorCodes.CouchbaseProcessing);
}
}
}
<file_sep>/DemoService.Tests/Controllers/PortfolioControllerTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using NUnit.Framework;
using Moq;
using DemoService.Controllers;
using DemoService.Data;
using DemoService.Exceptions;
using DemoService.Models;
namespace DemoService.Tests.Controllers
{
[TestFixture]
public class PortfolioControllerTests : BaseTests
{
[Test]
public void GetPortfolios_EncountersException_ReturnsBadRequest()
{
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.GetPortfolios()).Throws(new Exception("I'm Henry the Eighth, I am. Henry the Eighth, I am, I am"));
PortfolioController controller = new PortfolioController(mock.Object);
object result = controller.GetPortfolios();
int code = ParseBadRequestForErrorCode(result);
Assert.AreEqual(code, (int)ErrorCodes.CouchbaseProcessing);
}
[Test]
public void GetPortfolios_WithValidInputs_ReturnsPortfolios()
{
PortfolioState portfolio = new PortfolioState
{
AccountCount = 2,
AsOfDate = DateTime.Now,
Number = "1",
Name = "Henry!",
TotalBalance = 100.0M
};
List<PortfolioState> list = new List<PortfolioState> { portfolio };
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.GetPortfolios()).Returns(list as object);
PortfolioController controller = new PortfolioController(mock.Object);
object result = controller.GetPortfolios();
object portfolios = null;
if (result is OkObjectResult)
{
portfolios = ((OkObjectResult)result).Value;
}
Assert.AreEqual(portfolios, list as object);
}
}
}
<file_sep>/DemoService/Models/ErrorDetails.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Models
{
/// <summary>
/// details about an error returned from an incoming request
/// </summary>
public class ErrorDetails
{
/// <summary>
/// description of what went wrong
/// </summary>
public string Message { get; set; }
/// <summary>
/// the error code
/// </summary>
public int Code { get; set; }
}
}
<file_sep>/DemoService/Data/CouchbaseProcessor.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Couchbase;
using Couchbase.Core;
using Couchbase.Configuration.Client;
using Couchbase.Authentication;
using Couchbase.N1QL;
using Newtonsoft.Json;
using DemoService.Configuration;
using DemoService.Exceptions;
using DemoService.Models;
namespace DemoService.Data
{
/// <summary>
/// provides functions to perform on the couchbase cluster
/// </summary>
public class CouchbaseProcessor : IDataProcessor
{
private readonly string SELECTACCOUNT = "select AccountNumber, PortfolioName, CurrentBalance, AccountStatus, AsOfDate, LastPaymentDate, LastPaymentAmount, DaysDelinquent, Username, AccountInventory";
private IDataClient DataClient;
/// <summary>
/// default constructor
/// </summary>
/// <param name="dataClient">data client to use for data operations</param>
public CouchbaseProcessor(IDataClient dataClient)
{
DataClient = dataClient;
}
/// <summary>
/// gets all portfolios from the bucket
/// </summary>
/// <returns>Returns a list of portfolios as an object</returns>
public object GetPortfolios()
{
string name = CouchbaseConfigManager.Instance.PortfolioBucketName;
string query = $"select PortfolioName, AccountCount, Name, TotalBalance, AsOfDate, Debug from {name}";
return DataClient.ExecuteQuery(name, query);
}
/// <summary>
/// gets all portfolios using the aggregated data from the account bucket
/// </summary>
/// <returns>Returns a list of portfolios as an object</returns>
public object GetPortfoliosByAggregate()
{
string name = CouchbaseConfigManager.Instance.AccountBucketName;
string query = $"select PortfolioName, TRUNC(SUM(CurrentBalance),2) as TotalBalance, COUNT(AccountNumber) as AccountCount from {name} GROUP BY PortfolioName";
return DataClient.ExecuteQuery(name, query);
}
/// <summary>
/// gets all account for a given portfolio
/// </summary>
/// <param name="portfolioName">the name of the portfolio to retrieve accounts for</param>
/// <returns>Returns a list of accounts</returns>
public object GetAccountsByPortfolioName(string portfolioName)
{
if (String.IsNullOrEmpty(portfolioName))
{
throw new ArgumentException("invalid or null portfolio number");
}
string name = CouchbaseConfigManager.Instance.AccountBucketName;
string query = $"{SELECTACCOUNT} from {name} WHERE PortfolioName = '{portfolioName}'";
return DataClient.ExecuteQuery(name, query);
}
/// <summary>
/// gets all account for a given user
/// </summary>
/// <param name="username">the name of the user to retrieve accounts for</param>
/// <returns>Returns a list of accounts</returns>
public object GetAccountsByUsername(string username)
{
if (String.IsNullOrEmpty(username))
{
throw new ArgumentException("invalid or null username");
}
string name = CouchbaseConfigManager.Instance.AccountBucketName;
string query = $"{SELECTACCOUNT} from {name} WHERE Username = '{username}'";
return DataClient.ExecuteQuery(name, query);
}
/// <summary>
/// authenticates a user
/// </summary>
/// <param name="username">the username to authenticate</param>
/// <param name="password">the password to use in authentication</param>
/// <returns></returns>
public object AuthenticateUser(string username, string password)
{
if (String.IsNullOrEmpty(username)
|| String.IsNullOrEmpty(password))
{
throw new ArgumentException("invalid or empty credentials");
}
string name = CouchbaseConfigManager.Instance.UserBucketName;
string query = $"select username, lastLogin from {name} where username = '{username}' and pwd = '{<PASSWORD>}'";
List<dynamic> results = DataClient.ExecuteQuery(name, query);
if (results.Count != 1)
{
throw new CouchbaseException("invalid login");
}
return results.First();
}
private void AddAccount(AccountState account)
{
DataClient.Upsert(CouchbaseConfigManager.Instance.AccountBucketName, account);
}
private void AddPortfolioState(PortfolioState portfolio)
{
DataClient.Upsert(CouchbaseConfigManager.Instance.PortfolioBucketName, portfolio);
}
private void AddUsers(int count)
{
List<User> users = new List<User>
{
new User { Username = "admin", Password = "<PASSWORD>", LastLogin = DateTime.Now, ID = 0 }
};
for (int i=1; i <= count; i++)
{
users.Add(new User { ID = i, Username = $"user{i}", Password = "<PASSWORD>", LastLogin = DateTime.Now });
}
users.ForEach(user => DataClient.Upsert(CouchbaseConfigManager.Instance.UserBucketName, user));
}
}
}
<file_sep>/DemoService.Tests/Controllers/AdminControllerTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using NUnit.Framework;
using Moq;
using DemoService.Controllers;
using DemoService.Data;
using DemoService.Exceptions;
using DemoService.Models;
namespace DemoService.Tests.Controllers
{
[TestFixture]
public class AdminControllerTests : BaseTests
{
[Test]
public void Ping_WithValidInput_ReturnsCorrectEcho()
{
AdminController controller = new AdminController(new CouchbaseProcessor(new CouchbaseDataClient()));
string echo = "HAHAHAHAHAHA";
string result = controller.Ping(echo);
Assert.IsTrue(result.Contains(echo));
}
}
}
<file_sep>/DemoService/Data/CouchbaseDataClient.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Couchbase;
using Couchbase.Core;
using Couchbase.N1QL;
namespace DemoService.Data
{
public class CouchbaseDataClient : IDataClient
{
public List<dynamic> ExecuteQuery(string name, string query)
{
return ClusterHelper
.GetBucket(name)
.Query<dynamic>(query)
.Rows;
}
public List<dynamic> ExecuteQuery(string name, IQueryRequest query)
{
return ClusterHelper
.GetBucket(name)
.Query<dynamic>(query)
.Rows;
}
public void Upsert(string name, object content)
{
var document = new Document<dynamic>
{
Id = DateTime.Now.ToString("MMddHHmmssfff"),
Content = content
};
IBucket bucket = ClusterHelper.GetBucket(name);
var upsert = bucket.Upsert(document);
if (!upsert.Success)
{
throw new Exception("failed to upsert record");
}
}
}
}
<file_sep>/DemoService.Tests/Data/CouchbaseProcessorTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using NUnit.Framework;
using Moq;
using DemoService.Data;
using DemoService.Exceptions;
using DemoService.Models;
namespace DemoService.Tests.Data
{
[TestFixture]
public class CouchbaseProcessorTests
{
private readonly string FakeUsername = "InigoMontoya";
private readonly string FakePortfolioName = "Portfolio!";
private readonly string FakeAccountNumber = "8675309";
private List<dynamic> FakePortfolioList
{
get
{
dynamic d = new PortfolioState
{
AccountCount = 21,
AsOfDate = DateTime.Now,
Number = "1",
Name = FakePortfolioName,
TotalBalance = 100.0M
};
return new List<dynamic> { d };
}
}
private List<dynamic> FakeAccountList
{
get
{
dynamic d = new AccountState
{
AccountStatus = AccountStatus.Active,
AsOfDate = DateTime.Now,
CurrentBalance = 100.0M,
AccountNumber = FakeAccountNumber,
PortfolioName = "Portfolio01",
AccountInventory = "Inventory01"
};
return new List<dynamic> { d };
}
}
private List<dynamic> FakeUserList
{
get
{
dynamic d = new User {
ID = 0,
LastLogin = DateTime.Now,
Password = "<PASSWORD>",
Username = FakeUsername
};
return new List<dynamic> { d };
}
}
private List<dynamic> FakeEmptyUserList
{
get { return new List<dynamic>(); }
}
[Test]
public void GetPortfolios_WithValidInputs_ReturnsPortfolioList()
{
Mock<IDataClient> mock = new Mock<IDataClient>();
mock.Setup(m => m.ExecuteQuery(It.IsAny<string>(), It.IsAny<string>())).Returns(FakePortfolioList);
CouchbaseProcessor proc = new CouchbaseProcessor(mock.Object);
object result = proc.GetPortfolios();
string name = string.Empty;
try
{
name = ((PortfolioState)((List<dynamic>)result)[0]).Name;
}
catch
{
}
Assert.AreEqual(name, FakePortfolioName);
}
[Test]
public void GetAccountsByPortfolioName_WithInvalidPortfolioName_ThrowsArgumentException()
{
CouchbaseProcessor proc = new CouchbaseProcessor(new CouchbaseDataClient());
bool result = false;
try
{
object tmp = proc.GetAccountsByPortfolioName(null);
}
catch (ArgumentException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void GetAccountsByPortfolioName_WithValidInputs_ReturnsPortfolioList()
{
Mock<IDataClient> mock = new Mock<IDataClient>();
mock.Setup(m => m.ExecuteQuery(It.IsAny<string>(), It.IsAny<string>())).Returns(FakeAccountList);
CouchbaseProcessor proc = new CouchbaseProcessor(mock.Object);
object result = proc.GetAccountsByPortfolioName("1");
string accountNumber = string.Empty;
try
{
accountNumber = ((AccountState)((List<dynamic>)result)[0]).AccountNumber;
}
catch
{
}
Assert.AreEqual(accountNumber, FakeAccountNumber);
}
[Test]
public void GetAccountsByUsername_WithValidInputs_ReturnsPortfolioList()
{
Mock<IDataClient> mock = new Mock<IDataClient>();
mock.Setup(m => m.ExecuteQuery(It.IsAny<string>(), It.IsAny<string>())).Returns(FakeAccountList);
CouchbaseProcessor proc = new CouchbaseProcessor(mock.Object);
object result = proc.GetAccountsByUsername("user1");
string accountNumber = string.Empty;
try
{
accountNumber = ((AccountState)((List<dynamic>)result)[0]).AccountNumber;
}
catch
{
}
Assert.AreEqual(accountNumber, FakeAccountNumber);
}
[Test]
public void AuthenticateUser_WithInvalidUsername_ThrowsArgumentException()
{
CouchbaseProcessor proc = new CouchbaseProcessor(new CouchbaseDataClient());
bool result = false;
try
{
object tmp = proc.AuthenticateUser(null, "password");
}
catch (ArgumentException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void AuthenticateUser_WithInvalidPassword_ThrowsArgumentException()
{
CouchbaseProcessor proc = new CouchbaseProcessor(new CouchbaseDataClient());
bool result = false;
try
{
object tmp = proc.AuthenticateUser("username", null);
}
catch (ArgumentException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
[Test]
public void AuthenticateUser_WithValidInputsAndExistingUser_ReturnsUserDetails()
{
Mock<IDataClient> mock = new Mock<IDataClient>();
mock.Setup(m => m.ExecuteQuery(It.IsAny<string>(), It.IsAny<string>())).Returns(FakeUserList);
CouchbaseProcessor proc = new CouchbaseProcessor(mock.Object);
object result = proc.AuthenticateUser(FakeUsername, "<PASSWORD>");
string username = string.Empty;
try
{
username = ((User)result).Username;
}
catch
{
}
Assert.AreEqual(username, FakeUsername);
}
[Test]
public void AuthenticateUser_WithValidInputsAndNoUser_ThrowsCouchbaseException()
{
Mock<IDataClient> mock = new Mock<IDataClient>();
mock.Setup(m => m.ExecuteQuery(It.IsAny<string>(), It.IsAny<string>())).Returns(FakeEmptyUserList);
CouchbaseProcessor proc = new CouchbaseProcessor(mock.Object);
bool result = false;
try
{
object tmp = proc.AuthenticateUser(FakeUsername, "asecure<PASSWORD>");
}
catch (CouchbaseException)
{
result = true;
}
catch
{
result = false;
}
Assert.IsTrue(result);
}
}
}
<file_sep>/docker-compose.yml
version: '3'
services:
demoservice:
image: demoservice
container_name: demoservice
ports:
- 9119:80
build:
context: ./DemoService
dockerfile: Dockerfile
environment:
COUCHBASE_SERVERS: docker05.concordservicing.com;docker04.concordservicing.com;docker06.concordservicing.com
COUCHBASE_USER: admin
COUCHBASE_PWD: <PASSWORD>
COUCHBASE_PORTFOLIO: PortfolioState
COUCHBASE_ACCOUNT: AccountState
COUCHBASE_USERS: Users
<file_sep>/DemoService/Models/AccountState.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.ComponentModel.DataAnnotations;
namespace DemoService.Models
{
/// <summary>
/// account details as of certain date
/// </summary>
public class AccountState
{
/// <summary>
/// account identifier/number
/// </summary>
[Required]
public string AccountNumber { get; set; }
/// <summary>
/// portfolio name
/// </summary>
public string PortfolioName { get; set; }
/// <summary>
/// the name of the user to which this account belongs
/// </summary>
public string Username { get; set; }
/// <summary>
/// current balance of the account
/// </summary>
public decimal CurrentBalance { get; set; }
/// <summary>
/// status of the account
/// </summary>
public AccountStatus AccountStatus { get; set; }
/// <summary>
/// date this record represents
/// </summary>
public DateTime AsOfDate { get; set; }
/// <summary>
/// last payment date
/// </summary>
public DateTime LastPaymentDate { get; set; }
/// <summary>
/// amount of last payment
/// </summary>
public decimal LastPaymentAmount { get; set; }
/// <summary>
/// number of days delinquent
/// </summary>
public int DaysDelinquent { get; set; }
/// <summary>
/// description of the account inventory
/// </summary>
public string AccountInventory { get; set; }
/// <summary>
/// creates a random instance of AccountState
/// </summary>
/// <param name="id">the id of the new instance</param>
/// <param name="maxPortfolioId">the maximum portfolio id to use when creating the instance</param>
/// <param name="maxUserId">the maximum user id to use when creating the instance</param>
/// <returns></returns>
public static AccountState Create(int id, int maxPortfolioId, int maxUserId)
{
// to determine days delinq, let's do this:
// generate a last payment date between DateTime.Today and random (0-180) days from today
// subtract last payment date from 30 prior to today = days delinq
Random random = new Random();
// generate last payment date
DateTime lastPayDate = DateTime.Today.Subtract(new TimeSpan(random.Next(0, 180), 0, 0, 0));
int daysDelinq = DateTime.Today.Subtract(new TimeSpan(30, 0, 0, 0)).Subtract(lastPayDate).Days;
if (daysDelinq < 0)
{
// no negative days delinq
daysDelinq = 0;
}
AccountState account = new AccountState
{
AccountNumber = (10000000000 + id).ToString(),
PortfolioName = "Portfolio" + random.Next(1, maxPortfolioId + 1),
Username = "User" + random.Next(1, maxUserId + 1),
CurrentBalance = Math.Round(2000 * (decimal)random.NextDouble(), 2),
LastPaymentAmount = Math.Round(600 * (decimal)random.NextDouble(), 2),
LastPaymentDate = lastPayDate,
DaysDelinquent = daysDelinq,
AccountStatus = (AccountStatus)random.Next(0, 2),
AsOfDate = DateTime.Now,
AccountInventory = "Inventory" + id
};
return account;
}
}
}
<file_sep>/DemoService.Tests/Controllers/BaseTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using DemoService.Models;
namespace DemoService.Tests.Controllers
{
/// <summary>
/// base class for tests; provides useful functions and properties for reuse by derived classes
/// </summary>
public class BaseTests
{
/// <summary>
/// parse the error code from a Bad request reponse object
/// </summary>
/// <param name="result">the bad request response from the service</param>
/// <returns>Returns the error code from the response; -1 if it fails to parse</returns>
public int ParseBadRequestForErrorCode(object result)
{
int code = -1;
if (result is BadRequestObjectResult)
{
object error = ((BadRequestObjectResult)result).Value;
if (error is ErrorDetails)
{
code = ((ErrorDetails)error).Code;
}
}
return code;
}
}
}
<file_sep>/DemoService/Startup.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.PlatformAbstractions;
using Swashbuckle.AspNetCore;
using Swashbuckle.AspNetCore.Swagger;
using DemoService.Configuration;
using DemoService.Data;
namespace DemoService
{
/// <summary>
/// startup service
/// </summary>
public class Startup
{
/// <summary>
/// This method gets called by the runtime. Use this method to add services to the container.
/// For more information on how to configure your application, visit https://go.microsoft.com/fwlink/?LinkID=398940
/// </summary>
/// <param name="services">services collection</param>
public void ConfigureServices(IServiceCollection services)
{
// set up dependency injection concrete services
services.AddTransient<IDataProcessor, CouchbaseProcessor>();
services.AddTransient<IDataClient, CouchbaseDataClient>();
services.AddCors();
services.AddMvc();
// register the Swagger generator
services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new Info
{
Title = "DemoService",
Version = "v1",
Description = "Service for CouchBase Demo"
});
// Set the comments path for the Swagger JSON and UI.
var basePath = PlatformServices.Default.Application.ApplicationBasePath;
var xmlPath = Path.Combine(basePath, "DemoService.xml");
c.IncludeXmlComments(xmlPath);
});
}
/// <summary>
/// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
/// </summary>
/// <param name="app">the application</param>
/// <param name="lifetime">application lifetime</param>
/// <param name="env">the application environment</param>
public void Configure(IApplicationBuilder app, IApplicationLifetime lifetime, IHostingEnvironment env)
{
lifetime.ApplicationStopping.Register(OnShutdown);
app.UseCors(builder => builder
.AllowAnyOrigin()
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials());
app.UseStaticFiles();
// set up the couchbase ClusterHelper and configuration
CouchbaseConfigManager.Instance.Initialize();
// enable middleware to serve generated Swagger as a JSON endpoint
app.UseSwagger();
// Enable middleware to serve swagger-ui (HTML, JS, CSS, etc.), specifying the Swagger JSON endpoint.
app.UseSwaggerUI(c =>
{
c.RoutePrefix = "help";
c.InjectStylesheet("/styles/custom.css");
//c.DocumentTitle = "Concord"; // doesn't work yet
c.SwaggerEndpoint("/swagger/v1/swagger.json", "DemoService V1");
});
app.UseMvc();
}
private void OnShutdown()
{
CouchbaseConfigManager.Instance.Close();
}
}
}
<file_sep>/DemoService/Data/IDataProcessor.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Data
{
/// <summary>
/// contract for data processing functions
/// </summary>
public interface IDataProcessor
{
/// <summary>
/// authenticates a user
/// </summary>
object AuthenticateUser(string username, string password);
/// <summary>
/// get all portfolios
/// </summary>
object GetPortfolios();
/// <summary>
/// get all portfolios by aggregating account data
/// </summary>
object GetPortfoliosByAggregate();
/// <summary>
/// gets the list of accounts for a given portfolio
/// </summary>
object GetAccountsByPortfolioName(string portfolioName);
/// <summary>
/// gets a list of accounts for a given username
/// </summary>
object GetAccountsByUsername(string username);
}
}
<file_sep>/DemoService/Controllers/AccountController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json;
using DemoService.Data;
using DemoService.Models;
namespace DemoService.Controllers
{
/// <summary>
/// actions on accounts
/// </summary>
[Route("api/[controller]")]
public class AccountController : BaseController
{
/// <summary>
/// default constructor
/// </summary>
/// <param name="dataProcessor">the processors to use when performing actions with data</param>
public AccountController(IDataProcessor dataProcessor)
:base(dataProcessor)
{ }
/// <summary>
/// gets the list of accounts for a given portfolio
/// </summary>
/// <param name="portfolioName">the name of the portfolio for which to pull accounts</param>
/// <returns>Returns a list of <see cref="AccountState"/></returns>
/// <response code="200">Success</response>
/// <response code="400">Something failed</response>
[Produces("application/json")]
[Route("GetAccountsByPortfolioName")]
[HttpGet]
[ProducesResponseType(typeof(ErrorDetails), 400)]
public object GetAccountsByPortfolioName([FromQuery] string portfolioName)
{
if (String.IsNullOrEmpty(portfolioName))
{
return BadRequest(
new ErrorDetails {
Message = "portfolioName cannot be null or empty",
Code = (int)ErrorCodes.InvalidInputParameters}
);
}
object result = null;
try
{
result = Ok(DataProcessor.GetAccountsByPortfolioName(portfolioName));
}
catch (Exception ex)
{
result = BadRequest(
new ErrorDetails {
Message = ex.Message,
Code = (int)ErrorCodes.CouchbaseProcessing}
);
}
return result;
}
/// <summary>
/// gets the list of accounts for a given user id
/// </summary>
/// <param name="username">the name of the user for which to pull accounts</param>
/// <returns>Returns a list of <see cref="AccountState"/></returns>
/// <response code="200">Success</response>
/// <response code="400">Something failed</response>
[Produces("application/json")]
[Route("GetAccountsByUsername")]
[HttpGet]
[ProducesResponseType(typeof(ErrorDetails), 400)]
public object GetAccountsByUsername([FromQuery] string username)
{
if (String.IsNullOrEmpty(username))
{
return BadRequest(
new ErrorDetails
{
Message = "invalid or empty username",
Code = (int)ErrorCodes.InvalidInputParameters
}
);
}
object result = null;
try
{
result = Ok(DataProcessor.GetAccountsByUsername(username));
}
catch (Exception ex)
{
result = BadRequest(
new ErrorDetails
{
Message = ex.Message,
Code = (int)ErrorCodes.CouchbaseProcessing
}
);
}
return result;
}
}
}
<file_sep>/DemoService/Controllers/UserController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using DemoService.Data;
using DemoService.Exceptions;
using DemoService.Models;
namespace DemoService.Controllers
{
/// <summary>
/// actions on a user
/// </summary>
[Route("api/[controller]")]
public class UserController : BaseController
{
/// <summary>
/// default constructor
/// </summary>
/// <param name="dataProcessor">the processors to use when performing actions with data</param>
public UserController(IDataProcessor dataProcessor)
:base(dataProcessor)
{ }
/// <summary>
/// authenticates a user with the given credentials
/// </summary>
/// <remarks>
/// Sample request:
///
/// POST /Authenticate
/// {
/// "username": "admin",
/// "password": "<PASSWORD>"
/// }
///
/// </remarks>
/// <param name="username">the username</param>
/// <param name="password">the password</param>
/// <returns>Returns the user profile</returns>
[Route("Authenticate")]
[HttpPost]
public object Authenticate(string username, string password)
{
if (String.IsNullOrEmpty(username)
|| String.IsNullOrEmpty(password))
{
return BadRequest(
new ErrorDetails
{
Code = (int)ErrorCodes.InvalidInputParameters,
Message = "invalid credentials"
}
);
}
object result = null;
try
{
result = Ok(DataProcessor.AuthenticateUser(username, password));
}
catch (CouchbaseException cex)
{
result = BadRequest(
new ErrorDetails {
Code = (int)ErrorCodes.CouchbaseProcessing,
Message = cex.Message }
);
}
catch (Exception ex)
{
result = BadRequest(
new ErrorDetails
{
Message = $"Something failed: {ex.Message}",
Code = (int)ErrorCodes.Unknown
}
);
}
return result;
}
}
}
<file_sep>/DemoService.Tests/Controllers/AccountControllerTests.cs
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using NUnit.Framework;
using Moq;
using DemoService.Controllers;
using DemoService.Data;
using DemoService.Models;
namespace DemoService.Tests.Controllers
{
[TestFixture]
public class AccountControllerTests : BaseTests
{
private readonly string FakeAccountNumber = "8675309";
private AccountState FakeAccount
{
get
{
return new AccountState
{
AccountStatus = AccountStatus.Active,
AsOfDate = DateTime.Now,
CurrentBalance = 100.0M,
AccountNumber = FakeAccountNumber,
PortfolioName = "Portfolio01",
AccountInventory = "Inventory01",
LastPaymentAmount = 100.0M,
LastPaymentDate = DateTime.Now.Subtract(new TimeSpan(5,0,0,0)),
DaysDelinquent = 2
};
}
}
[Test]
public void GetAccountsByPortfolioName_InvalidInput_ReturnsBadRequest()
{
AccountController controller = new AccountController(new CouchbaseProcessor(new CouchbaseDataClient()));
object result = controller.GetAccountsByPortfolioName(null);
Assert.IsInstanceOf(typeof(BadRequestObjectResult), result);
}
[Test]
public void GetAccountsByPortfolioName_EncountersException_ReturnsBadRequest()
{
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.GetAccountsByPortfolioName(It.IsAny<string>())).Throws(new Exception("dogs and cats, living together!"));
AccountController controller = new AccountController(mock.Object);
object result = controller.GetAccountsByPortfolioName("1");
int code = ParseBadRequestForErrorCode(result);
Assert.AreEqual(code, (int)ErrorCodes.CouchbaseProcessing);
}
[Test]
public void GetAccountsByPortfolioName_WithValidInputs_ReturnsAccounts()
{
List<AccountState> list = new List<AccountState> { FakeAccount };
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.GetAccountsByPortfolioName(It.IsAny<string>())).Returns(list as object);
AccountController controller = new AccountController(mock.Object);
object result = controller.GetAccountsByPortfolioName("1");
object accounts = null;
if (result is OkObjectResult)
{
accounts = ((OkObjectResult)result).Value;
}
Assert.AreEqual(accounts, list as object);
}
[Test]
public void GetAccountsByUsername_InvalidInput_ReturnsBadRequest()
{
AccountController controller = new AccountController(new CouchbaseProcessor(new CouchbaseDataClient()));
object result = controller.GetAccountsByUsername(null);
Assert.IsInstanceOf(typeof(BadRequestObjectResult), result);
}
[Test]
public void GetAccountsByUsername_EncountersException_ReturnsBadRequest()
{
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.GetAccountsByUsername(It.IsAny<string>())).Throws(new Exception("i'm you're huckleberry"));
AccountController controller = new AccountController(mock.Object);
object result = controller.GetAccountsByUsername("user1");
int code = ParseBadRequestForErrorCode(result);
Assert.AreEqual(code, (int)ErrorCodes.CouchbaseProcessing);
}
[Test]
public void GetAccountsByUsername_WithValidInputs_ReturnsAccounts()
{
List<AccountState> list = new List<AccountState> { FakeAccount };
Mock<IDataProcessor> mock = new Mock<IDataProcessor>();
mock.Setup(m => m.GetAccountsByUsername(It.IsAny<string>())).Returns(list as object);
AccountController controller = new AccountController(mock.Object);
object result = controller.GetAccountsByUsername("user1");
object accounts = null;
if (result is OkObjectResult)
{
accounts = ((OkObjectResult)result).Value;
}
Assert.AreEqual(accounts, list as object);
}
}
}
<file_sep>/DemoService/Exceptions/CouchbaseException.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Exceptions
{
/// <summary>
/// default exception type for couchbase operations
/// </summary>
public class CouchbaseException : Exception
{
/// <summary>
/// creates exception with the given message and inner exception
/// </summary>
/// <param name="message">the message to include</param>
/// <param name="innerException">the inner exception of the new exception</param>
public CouchbaseException(string message, Exception innerException)
: base(message, innerException)
{ }
/// <summary>
/// creates exception with the given message
/// </summary>
/// <param name="message">the message to include</param>
public CouchbaseException(string message)
: base(message)
{ }
}
}
<file_sep>/DemoService/Configuration/CouchbaseConfigManager.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Configuration
{
/// <summary>
/// contains the singleton for the couchbase config. logic is elsewhere so it can be tested
/// </summary>
public class CouchbaseConfigManager
{
private static readonly CouchbaseConfig _instance = new CouchbaseConfig();
/// <summary>
/// singleton instance
/// </summary>
public static CouchbaseConfig Instance
{
get { return _instance; }
}
}
}
<file_sep>/DemoService/Exceptions/ConfigurationException.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Exceptions
{
/// <summary>
/// exception type for configuration problems
/// </summary>
public class ConfigurationException : ApplicationException
{
/// <summary>
/// creates exception with the given message and inner exception
/// </summary>
/// <param name="message">the message to include</param>
/// <param name="innerException">the inner exception of the new exception</param>
public ConfigurationException(string message, Exception innerException)
: base(message, innerException)
{ }
/// <summary>
/// creates exception with the given message
/// </summary>
/// <param name="message">the message to include</param>
public ConfigurationException(string message)
: base(message)
{ }
}
}
<file_sep>/DemoService/Controllers/PortfolioController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json;
using DemoService.Data;
using DemoService.Models;
namespace DemoService.Controllers
{
/// <summary>
/// actions on portfolios
/// </summary>
[Route("api/[controller]")]
public class PortfolioController : BaseController
{
/// <summary>
/// default constructor
/// </summary>
/// <param name="dataProcessor">the processors to use when performing actions with data</param>
public PortfolioController(IDataProcessor dataProcessor)
:base(dataProcessor)
{ }
/// <summary>
/// get all portfolios
/// </summary>
/// <remarks>
/// Sample request:
///
/// GET /GetPortfolios
/// {
/// }
///
/// </remarks>
/// <returns>Returns a list of portfolios</returns>
/// <response code="200">Success</response>
/// <response code="201">Demonstrating how to show more</response>
/// <response code="400">Something is null</response>
[Produces("application/json")]
[ProducesResponseType(typeof(object), 201)]
[ProducesResponseType(typeof(ErrorDetails), 400)]
[Route("GetPortfolios")]
[HttpGet]
public object GetPortfolios()
{
object result;
try
{
result = Ok(DataProcessor.GetPortfolios());
}
catch (Exception ex)
{
result = BadRequest(
new ErrorDetails {
Message = ex.Message,
Code = (int)ErrorCodes.CouchbaseProcessing}
);
}
return result;
}
/// <summary>
/// get all portfolios by aggregating account data
/// </summary>
/// <remarks>
/// Sample request:
///
/// GET /GetPortfoliosByAggregate
/// {
/// }
///
/// </remarks>
/// <returns>Returns a list of portfolios with data generated from aggregated accounts</returns>
[Produces("application/json")]
[Route("GetPortfoliosByAggregate")]
[HttpGet]
public object GetPortfoliosByAggregate()
{
object result;
try
{
result = Ok(DataProcessor.GetPortfoliosByAggregate());
}
catch (Exception ex)
{
result = BadRequest(
new ErrorDetails
{
Message = ex.Message,
Code = (int)ErrorCodes.CouchbaseProcessing
}
);
}
return result;
}
}
}
<file_sep>/DemoService/Models/PortfolioState.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DemoService.Models
{
/// <summary>
/// portfolio state info
/// </summary>
public class PortfolioState
{
/// <summary>
/// portfolio number
/// </summary>
public string Number { get; set; }
/// <summary>
/// name of the portfolio
/// </summary>
public string Name { get; set; }
/// <summary>
/// total balance of the portfolio
/// </summary>
public decimal TotalBalance { get; set; }
/// <summary>
/// number of accounts in the portfolio
/// </summary>
public int AccountCount { get; set; }
/// <summary>
/// as of date for this state of the portfolio
/// </summary>
public DateTime AsOfDate { get; set; }
/// <summary>
/// creates a random portfolio state instance
/// </summary>
/// <param name="number">the number of the new instance</param>
public static PortfolioState Create(string number)
{
PortfolioState p = new PortfolioState
{
Number = number,
Name = "Portfolio " + number,
AsOfDate = DateTime.Now,
AccountCount = 0,
TotalBalance = decimal.Zero
};
return p;
}
}
}
|
a8c547c9d12c15e6aee899860bef7aa8b00c3271
|
[
"C#",
"YAML"
] | 29 |
C#
|
thejoshcruz/demoservice
|
86875d57c1dbcee0daed9bb64a7a11966be9f30d
|
4d8822e7ad2f11b7031e02757a20b4c831bc1688
|
refs/heads/master
|
<file_sep>package com.example.corikachu.popularmoviesapp.ui.movies;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.VolleyLog;
import com.android.volley.toolbox.JsonObjectRequest;
import com.example.corikachu.popularmoviesapp.ApplicationController;
import com.example.corikachu.popularmoviesapp.MovieData;
import com.example.corikachu.popularmoviesapp.R;
import com.example.corikachu.popularmoviesapp.ui.details.DetailFragment;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.API_BASE_URL;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.API_IMAGE_BASE_URL;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.POPULARITY;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.QUERY_API_KEY;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.QUERY_SORT_BY;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.THEMOVIEDB_API_KEY;
public class MovieListActivity extends Activity {
private final static String TAG = MovieListActivity.class.getSimpleName();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_movie_list);
if (findViewById(R.id.detailFragment) != null) {
MovieListFragments.adapter.setTwoPane(true);
}
}
}
<file_sep>package com.example.corikachu.popularmoviesapp;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Movie Data
*/
public class MovieData implements Parcelable {
private int id;
private String title;
private String overview;
private double voteAverage;
private String releaseDate;
private String backdropPath;
public MovieData(){
}
public MovieData(Parcel in) {
id = in.readInt();
title = in.readString();
overview = in.readString();
voteAverage = in.readDouble();
releaseDate = in.readString();
backdropPath = in.readString();
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(id);
dest.writeString(title);
dest.writeString(overview);
dest.writeDouble(voteAverage);
dest.writeString(releaseDate);
dest.writeString(backdropPath);
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getOverview() {
return overview;
}
public void setOverview(String overview) {
this.overview = overview;
}
public double getVoteAverage() {
return voteAverage;
}
public void setVoteAverage(double voteAverage) {
this.voteAverage = voteAverage;
}
public String getReleaseDate() {
return releaseDate;
}
public void setReleaseDate(String releaseDate) {
this.releaseDate = releaseDate;
}
public String getBackdropPath() {
return backdropPath;
}
public void setBackdropPath(String backdropPath) {
this.backdropPath = backdropPath;
}
public static final Creator<MovieData> CREATOR = new Creator<MovieData>() {
@Override
public MovieData createFromParcel(Parcel in) {
return new MovieData(in);
}
@Override
public MovieData[] newArray(int size) {
return new MovieData[size];
}
};
}
<file_sep>package com.example.corikachu.popularmoviesapp.ui.details;
import android.app.ActionBar;
import android.app.Fragment;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.RatingBar;
import android.widget.TextView;
import android.widget.Toast;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.VolleyLog;
import com.android.volley.toolbox.JsonObjectRequest;
import com.bumptech.glide.Glide;
import com.example.corikachu.popularmoviesapp.ApplicationController;
import com.example.corikachu.popularmoviesapp.MovieData;
import com.example.corikachu.popularmoviesapp.R;
import com.example.corikachu.popularmoviesapp.utils.Favorite;
import org.json.JSONArray;
import org.json.JSONObject;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.API_MOVIE_URL;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.HIGHEST_RATED;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.POPULARITY;
import static com.example.corikachu.popularmoviesapp.utils.MovieAPIConstants.THEMOVIEDB_API_KEY;
/**
* Default File Header.
* Need a comments!
*/
public class DetailFragment extends Fragment {
@InjectView(R.id.detailBackdropImage)
ImageView backdropImage;
@InjectView(R.id.detailTitleTextView)
TextView title;
@InjectView(R.id.detailRatingBar)
RatingBar ratingBar;
@InjectView(R.id.detailReleaseDate)
TextView releaseDate;
@InjectView(R.id.detailOverviewTextView)
TextView overview;
@InjectView(R.id.detailReviewAuthorTextView)
TextView reviewAuthor;
@InjectView(R.id.detailReviewContentTextView)
TextView reviewContent;
@InjectView(R.id.detailVideoButton)
Button videoButton;
@InjectView(R.id.detailFavorite)
Button favorite;
final static String TAG = DetailFragment.class.getSimpleName();
private MovieData data;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_movie_detail, container, false);
ButterKnife.inject(this, rootView);
//Get Data from intent
Intent intent = getActivity().getIntent();
data = intent.getParcelableExtra(getString(R.string.movie_list_key));
Bundle bundle = this.getArguments();
if (bundle != null) {
data = bundle.getParcelable(getString(R.string.movie_list_key));
}
if(data == null){
return inflater.inflate(R.layout.fragment_movie_empty, container, false);
}
//Request Video API
requestVideoAPIQuery(String.valueOf(data.getId()));
//Set contents.
Glide.with(this).load(data.getBackdropPath()).into(backdropImage);
title.setText(data.getTitle());
ratingBar.setRating((float) data.getVoteAverage() / 2);
overview.setText(data.getOverview());
releaseDate.setText(data.getReleaseDate());
//Set actionbar title.
ActionBar actionBar = getActivity().getActionBar();
if(actionBar != null) {
actionBar.setTitle(data.getTitle());
}
favorite.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
Favorite favorite = new Favorite(getActivity().getApplicationContext());
boolean save = favorite.save(String.valueOf(data.getId()));
if(save) {
Toast.makeText(getActivity().getApplicationContext(), "Add Favorite", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(getActivity().getApplicationContext(), "Remove Favorite", Toast.LENGTH_SHORT).show();
}
}
});
return rootView;
}
// Request movie data to api server with api key.
private void requestVideoAPIQuery(String id) {
String queryUrlVideo = API_MOVIE_URL + id + "/videos?api_key=" + THEMOVIEDB_API_KEY;
JsonObjectRequest jsonObjectRequestVideo = new JsonObjectRequest(Request.Method.GET, queryUrlVideo, null,
new ResponseListenerVideo(), error ->
VolleyLog.d(TAG, "ERROR : " + error.getMessage())
);
String queryUrlReview = API_MOVIE_URL + id + "/reviews?api_key=" + THEMOVIEDB_API_KEY;
JsonObjectRequest jsonObjectRequestReview = new JsonObjectRequest(Request.Method.GET, queryUrlReview, null,
new ResponseListenerReview(), error ->
VolleyLog.d(TAG, "ERROR : " + error.getMessage())
);
ApplicationController.getInstance().addToRequestQueue(jsonObjectRequestVideo);
ApplicationController.getInstance().addToRequestQueue(jsonObjectRequestReview);
}
class ResponseListenerVideo implements Response.Listener<JSONObject> {
@Override
public void onResponse(JSONObject response) {
try {
JSONArray jsonArray = response.getJSONArray("results");
JSONObject jsonObject = jsonArray.getJSONObject(0);
String key = jsonObject.getString("key");
videoButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (key != null) {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.youtube.com/watch?v=" + key)));
} else {
Toast.makeText(getActivity().getApplicationContext(), "No video", Toast.LENGTH_SHORT).show();
}
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
}
class ResponseListenerReview implements Response.Listener<JSONObject> {
@Override
public void onResponse(JSONObject response) {
try {
JSONArray jsonArray = response.getJSONArray("results");
JSONObject jsonObject = jsonArray.getJSONObject(0);
String author = jsonObject.getString("author");
String content = jsonObject.getString("content");
reviewAuthor.setText(author);
reviewContent.setText(content);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
<file_sep>package com.example.corikachu.myappportfolio;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends Activity {
Button spotifyStreamerButton;
Button scoresAppButton;
Button libraryapp;
Button builtItBiggerButton;
Button xyzReaderButton;
Button capstoneButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
spotifyStreamerButton = (Button)findViewById(R.id.spotifyStreamerButton);
scoresAppButton = (Button)findViewById(R.id.scoreAppButton);
libraryapp = (Button)findViewById(R.id.libraryAppButton);
builtItBiggerButton = (Button)findViewById(R.id.builtItBiggerButton);
xyzReaderButton = (Button)findViewById(R.id.xyzReaderButton);
capstoneButton = (Button)findViewById(R.id.myOwnAppButton);
spotifyStreamerButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(), "This button will launch SPOTIFY STREAMER", Toast.LENGTH_SHORT).show();
}
});
scoresAppButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(), "This button will launch SCORES App", Toast.LENGTH_SHORT).show();
}
});
libraryapp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(), "This button will launch Library App", Toast.LENGTH_SHORT).show();
}
});
builtItBiggerButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(), "This button will launch BUILT IT BIGGER", Toast.LENGTH_SHORT).show();
}
});
xyzReaderButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(), "This button will launch XYZ READER", Toast.LENGTH_SHORT).show();
}
});
capstoneButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(), "This button will launch my capstone app", Toast.LENGTH_SHORT).show();
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
<file_sep>package com.example.corikachu.popularmoviesapp.ui.details;
import android.app.Activity;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.transition.Explode;
import android.transition.Fade;
import android.transition.Slide;
import android.transition.Transition;
import android.view.Window;
import com.example.corikachu.popularmoviesapp.R;
/**
* Movie Detail Activity
*/
public class DetailActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_movie_detail);
}
}
|
3828111f405fe5d628fefd4511afa7ac43fb4eed
|
[
"Java"
] | 5 |
Java
|
Corikachu/UdacityAndroidNanodegree
|
d9d5c6e8e2af7166a3aeb23ae46dfb73d8a91fb4
|
163971cb2372cc32c29679b501f53d5a7094ab8d
|
refs/heads/master
|
<file_sep>git config --global user.email "<EMAIL>"
git config --global user.name "fengjilong123456"
git congfig --global push.defalut simple
git add .
git commit -m $1
git push
|
8ef8971982a66ece37f68d07237feac8fbbb737e
|
[
"Shell"
] | 1 |
Shell
|
fengjilong123456/fengjilong321.github.io
|
8b0a305963bdf6ed8c6e28e63b530db54ecf86a1
|
f2d62ba8681318a1a5da76b3eb263e9dcb37ba28
|
refs/heads/master
|
<file_sep>import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { CoursesComponent } from './courses/courses.component';
import { AuthorsComponent } from './authors/authors.component';
import {ServerComponent} from './server/server.component';
import { ServersComponent } from './servers/servers.component';
import { WarningComponent } from './warning/warning.component';
import {SuccessComponent} from './success/success.component';
import {FormsModule} from '@angular/forms';
import { PasswordComponent } from './password/password.component';
import {HeaderComponent} from './header/header.component';
import { RecipesComponent } from './components/recipes/recipes.component';
import { RecipeListComponent } from './components/recipes/recipe-list/recipe-list.component';
import { RecipeDetailComponent } from './components/recipes/recipe-detail/recipe-detail.component';
import { RecipeItemComponent } from './components/recipes/recipe-list/recipe-item/recipe-item.component';
import { ShoppingListComponent } from './components/shopping-list/shopping-list.component';
import { ShoppingEditComponent } from './components/shopping-list/shopping-edit/shopping-edit.component';
@NgModule({
declarations: [
AppComponent,
CoursesComponent,
AuthorsComponent,
ServerComponent,
ServersComponent,
WarningComponent,
SuccessComponent,
PasswordComponent,
HeaderComponent,
RecipesComponent,
RecipeListComponent,
RecipeDetailComponent,
RecipeItemComponent,
ShoppingListComponent,
ShoppingEditComponent
],
imports: [
BrowserModule,
AppRoutingModule,
FormsModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-password',
templateUrl: './password.component.html',
styleUrls: ['./password.component.scss']
})
export class PasswordComponent implements OnInit {
hidden: boolean = false
hiddenMoves = [];
colorful = [];
constructor() { }
ngOnInit() {
}
makeHidden() {
if (!this.hidden) {
this.hidden = true;
} else {
this.hidden = false;
}
this.hiddenMoves.push(new Date());
}
}
<file_sep>import {Component, Injectable, OnInit} from '@angular/core';
import {CoursesService} from './courses.service';
import {AutoGrowDirective} from '../auto-grow.directive';
@Component({
selector: 'app-courses',
templateUrl: './courses.component.html',
styleUrls: ['./courses.component.scss'],
providers: [ AutoGrowDirective ],
})
export class CoursesComponent implements OnInit {
title = 'Naber iyilik senden';
courses;
constructor(private coursesService: CoursesService) {
this.courses = coursesService.getCourses();
}
ngOnInit() {
}
}
|
02c16a512a5aed79acb133c2b9a77cb3924950d5
|
[
"TypeScript"
] | 3 |
TypeScript
|
ugurmuslim/angular-lessons
|
2acc82778c5e424a3d0e7bb9002855bf1c02b784
|
3845619b443f7457208143b46d25576f8c610bd9
|
refs/heads/master
|
<repo_name>morgom94/erickmorales<file_sep>/practica/src/main/java/com/erick/morales/calculadora.java
package com.erick.morales;
public class calculadora
{
int num1;
int num2;
public calculadora(int n1, int n2)
{
this.num1=n1;
this.num2=n2;
}
public static int sumar(int n, int m){
return n+m;
}
public static int restar(int n, int m){
return n-m;
}
public static int dividir(int n, int m){
return n / m;
}
public static int multiplicar(int n, int m){
return n*m;
}
public boolean esPar(int n){
return false;
}
public boolean esImpar(int n){
return true;
}
}
|
1ccdcf0c1cb7352e95b7b8c332f0d351c4fa6230
|
[
"Java"
] | 1 |
Java
|
morgom94/erickmorales
|
78d99aa820e17be2676ae87afec301d3303a05c1
|
51c49d13b9f690d3cad95e30e8ad4ec25de6c88d
|
refs/heads/master
|
<file_sep># Predis
## 关于 Redis
* 官方网站:http://redis.io/
* 关于客户端的扩展推荐,php 推荐有两个,当前使用的是 Predis : http://redis.io/clients
## 关于 Predis :
* predis 官方wiki : https://github.com/nrk/predis
* 使用composer 安装 predis : wiki :https://packagist.org/packages/predis/predis
## 通过predis 实现一些功能
* 类似examples中的示例,做了几个简单的功能实现
* 使用 list 数据类型,实现消息队列功能
如果是多队列,可以使用brpop依次获取队列内容
按照队列名称前后顺序,其特性等同于简单的优先级队列
* 使用 sort set 数据类型,实现优先级队列功能
虽然可以按照score获取相应的数据,如何删除相应的数据,是需要重视的问题
<file_sep><?php
/*
* This file is part of the Predis package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
require __DIR__.'/shared.php';
// Create a client and disable r/w timeout on the socket
$client = new Predis\Client($single_server + array('read_write_timeout' => 0));
// 使用 list 实现队列功能
$queue_list = 'queue_list';
for($i = 0 ; $i < 10 ; $i ++){
$value = rand(100, 999);
$lpush = $client->lpush($queue_list, $value);
}
echo $queue_list.PHP_EOL;
$all_queue_list = $client->lrange($queue_list, 0 , -1);
var_dump($all_queue_list);
$queue_list_high = 'queue_list_high';
for($i = 0 ; $i < 5 ; $i ++){
$value = rand(1200, 1999);
$lpush = $client->lpush($queue_list_high, $value);
}
echo $queue_list_high.PHP_EOL;
$all_queue_list = $client->lrange($queue_list_high, 0 , -1);
var_dump($all_queue_list);
echo "BRPOP 依次获取队列数据".PHP_EOL;
for($j = 0 ; $j < 25 ; $j ++){
$brpop = $client->brpop([$queue_list_high, $queue_list], 1);
var_dump($brpop);
}
$client->del($queue_list);
$client->del($queue_list_high);
// Say goodbye :-)
$version = redis_version($client->info());
echo "Goodbye from Redis $version!", PHP_EOL;
<file_sep><?php
/*
* This file is part of the Predis package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
require __DIR__.'/shared.php';
// Create a client and disable r/w timeout on the socket
$client = new Predis\Client($single_server + array('read_write_timeout' => 0));
// 使用 sort set 实现优先级队列功能
$priority_queue_list = 'priority_queue_list';
for($i = 0 ; $i < 20 ; $i ++) {
$weight = rand(1, 10);
$value = rand(100, 999);
$unique_id = ceil(microtime(true)*1000).'_'.$i.'_'.$value;
var_dump($unique_id);
$action = $client->zadd($priority_queue_list, $weight, $unique_id);
}
$all_values = $client->zrange($priority_queue_list, 0, -1, ['withscores' => true]);
echo $priority_queue_list.PHP_EOL;
var_dump($all_values);
for($i = 0 ; $i < 10 ; $i ++) {
$limit = $client->zRevRangeByScore($priority_queue_list, '+inf', '-inf', array('withscores'=>false, 'limit'=>array(0,3)));
echo 'limit = '.$i.PHP_EOL;
var_dump($limit);
foreach($limit as $value){
$pid = pcntl_fork();
if ($pid == -1) {
die('could not fork');
} else if ($pid) {
// we are the parent
pcntl_wait($status); //Protect against Zombie children
echo 'parent pid='.$pid. " child status $status ".PHP_EOL;
} else {
// we are the child
var_dump($value);
$del = $client->zrem($priority_queue_list, $value);
exit;
}
}
}
$client->del($priority_queue_list);
// Say goodbye :-)
$version = redis_version($client->info());
echo "Goodbye from Redis $version!", PHP_EOL;
|
0feccc942d050fda8c9dc90a08b8d37d969c22f2
|
[
"Markdown",
"PHP"
] | 3 |
Markdown
|
yujipeng/predis
|
fe10cf1c9060afa3bebcf60fb3e40e14ec0f3fb1
|
894582a6a4d77eef22d27319e6efed48d0a50368
|
refs/heads/master
|
<repo_name>sadik491/Pokey-Ball<file_sep>/Pokey Ball/Assets/finish.cs
using UnityEngine;
public class finish : MonoBehaviour
{
public void OnCollisionEnter(Collision collision)
{
UiManager.instence.GameOver();
}
}
<file_sep>/Pokey Ball/Assets/FinishLevel.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FinishLevel : MonoBehaviour
{
public void OnTriggerExit(Collider other)
{
UiManager.instence.nextLvl();
}
}
<file_sep>/Pokey Ball/Assets/GameManager.cs
using UnityEngine;
public class GameManager : MonoBehaviour
{
public static GameManager instence;
void Awake()
{
if (instence == null)
{
instence = this;
}
}
public void gameStart()
{
}
public void gameOver()
{
}
public void restart()
{
UiManager.instence.GameStart();
}
}
<file_sep>/Pokey Ball/Assets/Scripts/FollowCam.cs
using UnityEngine;
public class FollowCam : MonoBehaviour
{
public GameObject ball;
public Vector3 offset;
void Update()
{
transform.position = ball.transform.position + offset;
}
}
<file_sep>/Pokey Ball/Assets/Scripts/BallControl.cs
using UnityEngine;
public class BallControl : MonoBehaviour
{
public Rigidbody rb;
public float force;
public bool gameStart;
public Vector2 startTouchPos;
public Vector2 endTouchPos;
public bool triggerEnable;
void Start()
{
gameStart = false;
triggerEnable = false;
}
void Update()
{
if (Input.touchCount > 0)
{
Touch touch = Input.GetTouch(0);
if (touch.phase == TouchPhase.Began)
{
startTouchPos = Camera.main.ScreenToViewportPoint(touch.position);
}
else if (touch.phase == TouchPhase.Ended)
{
endTouchPos = Camera.main.ScreenToViewportPoint(touch.position);
}
if (touch.phase == TouchPhase.Ended && startTouchPos.y > endTouchPos.y && triggerEnable == false)
{
rb.AddForce(0, force * Time.deltaTime, 0);
rb.useGravity = true;
gameStart = true;
}
//Vector2 dis = startPos - endPos;
//Debug.Log(dis.ToString());
if (touch.phase == TouchPhase.Stationary && gameStart == true && triggerEnable == false)
{
rb.velocity = new Vector3(0, 0, 0);
rb.useGravity = false;
}
}
}
public void OnTriggerEnter(Collider other)
{
rb.useGravity = true;
triggerEnable = true;
}
public void OnTriggerExit(Collider other)
{
triggerEnable = false;
}
}
<file_sep>/Pokey Ball/Assets/UiManager.cs
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class UiManager : MonoBehaviour
{
public static UiManager instence;
public GameObject lvlClear;
public GameObject gameOver;
public BallControl ballControl;
public GameObject score;
void Awake()
{
if (instence == null)
{
instence = this;
}
}
public void GameStart()
{
SceneManager.LoadScene(SceneManager.GetActiveScene().buildIndex );
}
public void GamePlayAgain()
{
SceneManager.LoadScene(SceneManager.GetActiveScene().buildIndex - 3);
}
public void newlvl()
{
lvlClear.SetActive(false);
}
public void nextLvl()
{
lvlClear.SetActive(true);
Invoke("lvlLoadtym", 1f);
}
public void GameOver()
{
gameOver.SetActive(true);
ballControl.enabled = false;
score.SetActive(false);
}
public void lvlLoadtym()
{
SceneManager.LoadScene(SceneManager.GetActiveScene().buildIndex + 1);
}
}
|
114c991598ad549e97d09d6b8ef0cfb30ec81239
|
[
"C#"
] | 6 |
C#
|
sadik491/Pokey-Ball
|
8562d0fdbb169f2c99122a4eb2720765212392b3
|
acbd79831377ad08ae7925a63c37bf3cc1f52ec0
|
refs/heads/main
|
<repo_name>kumimochi/Weather-App<file_sep>/README.md
# Weather App Project
Weather application with icons from https://openweathermap.org
<file_sep>/app.js
const api = {
key: '<KEY>',
};
//input
const search = document.querySelector('#search');
search.addEventListener('keypress', setCity);
//getting input value
function setCity(e) {
if (e.keyCode === 13) {
getWeather(search.value);
console.log(search.value);
search.value = '';
}
}
//getting the input value when the search icon is clicked
const searchIcon = document
.querySelector('.fa-search')
.addEventListener('click', () => {
getWeather(search.value);
console.log(search.value);
search.value = '';
});
//getting data from the api
function getWeather(city) {
fetch(
`https://api.openweathermap.org/data/2.5/weather?q=${city}&units=metric&id=524901&appid=${api.key}`
)
.then((data) => {
return data.json();
})
.then(displayWeather);
}
function displayWeather(data) {
console.log(data);
//displays city and country
let city = document.querySelector('#city');
city.innerText = `${data.name},`;
let country = document.querySelector('#country');
country.innerText = `${data.sys.country}`;
// let location = document.querySelector("#city #country");
// location.innerText = `${data.name}, ${data.sys.country}`;
//displays the current temperature
let temp = document.querySelector('#current-temp');
temp.innerHTML = `${Math.floor(data.main.temp)}°c`;
//displays the minimum temp and maximum temp
let hilow = document.querySelector('.hi-low');
hilow.innerText = `${Math.round(data.main.temp_min)}°c / ${Math.round(
data.main.temp_max
)}°c`;
//displays the weather description
let weather = document.querySelector('#weather');
weather.innerText = `${data.weather[0].description}`;
//displays the humidity
let humidity = document.querySelector('#humidity');
humidity.innerText = `humidity: ${data.main.humidity}%`;
//displays the wind speed
let wind = document.querySelector('#wind-speed');
wind.innerText = `wind speed: ${data.wind.speed}km/h`;
//displays and updates the icon based on the weather description
let icon = document.querySelector('.icon');
icon.src = `https://openweathermap.org/img/wn/${data.weather[0].icon}@2x.png`;
}
|
073b97ba693c34a553b079fa6d7a892053f5a273
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
kumimochi/Weather-App
|
dbfa9cd1886ca92c6515089d27eb6a26d415c4d3
|
c98befbe92c5334d825890b80c5dba5482de9d6e
|
refs/heads/master
|
<file_sep>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^result', views.result, name='result'),
url(r'^intro', views.intro, name='intro'),
]
<file_sep>from django.db import models
# Create your models here.
class input_beer(models.Model):
first = models.CharField(max_length=20)
second = models.CharField(max_length=20, null=True)
third = models.CharField(max_length=20, null=True)
class Meta:
db_table = 'recommand_input_beer'
class Beer(models.Model):
feel = models.TextField(blank=True, null=True)
look = models.TextField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
smell = models.TextField(blank=True, null=True)
taste = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'beer'
<file_sep>from django.shortcuts import render, redirect
from .models import input_beer, Beer
from .forms import beerForm
from django_pandas.io import read_frame
import pandas as pd
import numpy as np
import operator
def recommand(*args, data):
"""
:param args: this
:param data:
:return:
"""
# Pre-Processing
for col in data.columns:
if col == 'name' or col == 'id':
continue
data[col] = data[col].apply(lambda x: eval(x))
# 각 feature별 리스트 생성
feel = []
look = []
taste = []
smell = []
# 각 feature의 값들은 리뷰의 카운트 / 전체 카운트 값을 가중치로
# 모든 feature의 가줓이는 하나의 딕셔너리에
# key = 해당 feature의 명칭, value 가중치 값
new_dict = {}
for arg in args:
if arg not in list(data.name):
return ["존재하지 않는 맥주입니다. 다시 입력해주세요."], False
for feature in ['feel', 'look', 'taste', 'smell']:
data_f = data[data['name'] == arg][feature].values[0]
for key in data_f.keys():
if key in new_dict.keys():
new_dict[key] = new_dict[key] + (data_f[key] / sum(data_f.values()))
else:
new_dict[key] = (data_f[key] / sum(data_f.values()))
result_dict = {}
for j in range(len(data)):
value = 0
for feature in ['feel', 'look', 'taste', 'smell']:
data_t = data.loc[j][feature]
name = data.loc[j]['name']
for key in data_t:
if key in new_dict.keys():
value += new_dict[key] * data_t[key] / sum(data_t.values())
result_dict[name] = value
# max_값 찾기(3개 출력)
a_list = sorted(result_dict.items(), key=operator.itemgetter(1), reverse=True)
result = []
cnt = 0
while cnt != 3:
beer = a_list.pop(0)[0]
if beer not in args:
cnt += 1
result.append([beer,str(int(data[data['name'] == beer].id))])
return result, True
def home(request):
if request.method == 'POST':
form = beerForm(request.POST)
if form.is_valid():
form.save()
return redirect('/result')
else:
form = beerForm()
return render(request, 'user/home.html', {'form': form})
def result(request):
beerquery = input_beer.objects.order_by('-id')
dataset = Beer.objects.all()
beerdf = read_frame(dataset)
recommand_result, chk = recommand(beerquery[0].first, beerquery[0].second, beerquery[0].third, data=beerdf)
return render(request, 'user/result.html', {'result': recommand_result, 'check': chk})
def intro(request):
return render(request, 'user/intro.html')
<file_sep># Generated by Django 2.2.4 on 2019-10-15 11:19
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Beer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('feel', models.TextField(blank=True, null=True)),
('look', models.TextField(blank=True, null=True)),
('name', models.TextField(blank=True, null=True)),
('smell', models.TextField(blank=True, null=True)),
('taste', models.TextField(blank=True, null=True)),
],
options={
'db_table': 'beer',
'managed': False,
},
),
migrations.CreateModel(
name='input_beer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first', models.CharField(max_length=20)),
('second', models.CharField(max_length=20, null=True)),
('third', models.CharField(max_length=20, null=True)),
],
options={
'db_table': 'recommand_input_beer',
},
),
]
<file_sep>from django import forms
from .models import input_beer
class beerForm(forms.ModelForm):
class Meta:
model = input_beer
fields = ['first', 'second', 'third']<file_sep># beer_web
## 개요
DJango로 구현한 리뷰 기반 맥주 추천 알고리즘입니다.
리뷰는 https://www.beeradvocate.com/ 에서 추출하였고, 추출한 리뷰를 알고리즘에 적합하게 전처리를 거친 후 페이지랭크 알고리즘을 기반으로 하여 맥주 추천 모델을 설계하였습니다.
현재 github은 알고리즘이 적용된, 추천 웹 페이지와 팀원 소개 페이지를 구현한 Django 소스 코드이며, 전처리 및 맥주 추천 알고리즘은 하단에 걸어둔 github 링크를 통해 확인하실 수 있습니다.
https://github.com/pakupoko/NHE - fork한 팀 프로젝트 소스코드
## 구동
myweb/config/settings.py 에 Database를 설정을 해야 구동됩니다.
또, 로컬 DB에 데이터를 이식하고, 추가로 입력로그 기록 테이블을 구축해야 작동됩니다.
데이터셋은 CSV파일 형태로 현재 git 폴더 내 beer_data 폴더에 보관되어 있습니다.
라이브러리 몇 개를 설치해야 합니다. django, pandas, django-pandas 입니다. 최신 버전 기반으로 적용하면 문제없이 작동하겠지만, 문제 발생시 알려주시면 확인해보겠습니다. 또 추가 설치가 필요한 라이브러리가 있다면 README에 업데이트 하겠습니다.
$python mange.py runserver 명령어를 통해 작동하여 서버를 구축하고, 생성된 주소로 들어가면 밑의 초기화면이 켜집니다.

페이지 내에 세 개의 입력 칸에, 좋아하는 맥주를 입력하면 됩니다.
다만 입력 맥주의 경우, 해외에서 유명한 크래프트 비어 위주라 우리가 잘 알고 있는 맥주들은 리스트에 없으며, 이름이 완전히 매칭이 되지 않으면 오류가 발생합니다. 이 부분은 이후 업데이트 할 예정입니다.
맥주 리스트는 DB에서 탐색하여 입력하면 됩니다.
맥주 세개를 입력하면 유사한 맥주 3개를 이름과 사진으로 출력합니다.

맨 위의 상단바를 통해 팀원들의 간단한 소개가 있는 페이지로 이동할 수 있습니다.

|
501ff1d87f8861887b10d09f779059740c31d313
|
[
"Markdown",
"Python"
] | 6 |
Python
|
Daikoku1/beer_web
|
1f800934387c242eb20bbde2fe9e2ac9e2a0fa96
|
ffe7e11e172828afe6a11ccf61589819fe1e1855
|
refs/heads/master
|
<repo_name>ahzavala/Aerolinea17<file_sep>/src/java/com/aerolinea/control/ControlUsuario.java
package com.aerolinea.control;
import com.aerolinea.dao.UsuarioDao;
import com.aerolinea.dao.UsuarioDaoImpl;
import com.aerolinea.entidad.Pais;
import com.aerolinea.entidad.Rol;
import com.aerolinea.entidad.Usuario;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
@Controller
public class ControlUsuario {
private UsuarioDaoImpl dao;
@Autowired
public void setDao(UsuarioDaoImpl dao) {
this.dao = dao;
}
@RequestMapping("/login")
public String logins(@RequestParam("txtUsuario")String usuario,
@RequestParam("txtClave") String clave, HttpServletRequest req){
Usuario u=new Usuario();
u.setIdusuario(usuario);
u.setClave(clave);
Usuario usuarioLogueado=dao.validarUsuario(u);
if (usuarioLogueado!=null){
req.getSession().setAttribute("usuario", usuarioLogueado.getIdusuario());
req.getSession().setAttribute("idrol", usuarioLogueado.getRol().getIdrol());
req.getSession().setAttribute("correo", usuarioLogueado.getEmail());
req.getSession().setAttribute("nombre", usuarioLogueado.getNombres()+" "+usuarioLogueado.getApellidos());
//req.getSession().setMaxInactiveInterval(10); // 10 segundos
return "redirect:/principal";
}else
return "redirect:/home?error=1";
}
@RequestMapping(value="/usuarios", method=GET)
public ModelAndView ListaUsuarios(){
ModelAndView mv = new ModelAndView("usuario");
String msg="Listado de usuarios";
try {
List<Usuario> lista = dao.consultarUsuarios();
List<Usuario> lista2 = dao.consultarUsuarios_backup();
mv.addObject("usuarios", lista);
mv.addObject("usuarios2", lista2);
} catch (Exception e) {
System.out.println(e.getMessage());
}
mv.addObject("mensaje", msg);
return mv;
}
@RequestMapping(value="/registrar", method=GET)
public String mostrarFormRegistro
(
@RequestParam(value="id", required = false)String id,
Map<String, Object> model
){
Usuario user;
if (id==null)
user = new Usuario();
else
user = dao.getUsuario(id);
model.put("userForm", user);
try {
List<Pais> p = dao.getPaises();
List<Rol> r = dao.getRoles();
model.put("paises", p);
model.put("roles", r);
} catch (Exception e) {
System.out.println(e.getMessage());
}
return "registrarse";
}
@RequestMapping(value="/addUsuario", method = RequestMethod.POST)
public String addUsuario(@Valid @ModelAttribute("userForm")Usuario u,
BindingResult result){
if (result.hasErrors())
return "registrarse";
try {
u.setClave(UsuarioDaoImpl.sha1(u.getClave()));
dao.guardarUsuario(u);
} catch (Exception e) {
System.out.println(e.getMessage());
}
return "redirect:/usuarios";
}
@RequestMapping(value="/editar", method=GET)
public String editar(@RequestParam("id")String id, Map<String, Object> model){
Usuario userForm = dao.getUsuario(id);
model.put("userForm", userForm);
try {
List<Pais> p = dao.getPaises();
List<Rol> r = dao.getRoles();
model.put("paises", p);
model.put("roles", r);
} catch (Exception e) {
System.out.println(e.getMessage());
}
return "registrarse";
}
}
|
25da0d0c11ffcb9732e5a8ed7db123b02237d9c6
|
[
"Java"
] | 1 |
Java
|
ahzavala/Aerolinea17
|
735f3118a13fa206efd49dd8b808a7bc8fcc1fa4
|
249d5d577087a7a99934e54c94f59dcdcce3df9c
|
refs/heads/master
|
<repo_name>dgswAlgorithm/KimDongGyun<file_sep>/lecture0102막대자르기드럼통/lecture0102막대자르기.c
#include<stdio.h>
#define MAX(X,Y) ((X)>(Y) ? (X):(Y))
#define N 50
int max(int a, int b)
{
if (a > b)
return a;
return b;
}
int main()
{
int p[51] = { 0,1,5,8,9,10,17,17,20,24,30, };
int r[N + 1] = { 0, };
r[0] = p[0];
r[1] = p[1];
for (int i = 1; i <= N; i++)
{
int temp = 0;
for (int j = 0; j <=i; j++)
{
temp = MAX(temp, r[i - j] + p[j]);
}
r[i] = temp;
}
//printf("최대이익 : %d\n", r[N]);
for (int i = 1; i <= N; i++)
{
printf("길이 %3d : -> 최대이익 : %d\n", i, r[i]);
}
return 0;
}<file_sep>/lecture0102막대자르기드럼통/lecture0102막대자르기.cpp
#include<stdio.h>
#define MAX(X,Y) ((X)>(Y) ? (X):(Y))
#define N 7
int max(int a, int b)
{
if (a > b)
return a;
return b;
}
int main()
{
int p[51] = { 0,1,5,8,9,10,17,17,20,24,30, };
int r[N + 1] = { 0, };
r[0] = p[0];
r[1] = p[1];
for (int i = 1; i <= N; i++)
{
int temp = 0;
for (int j = 0; j <=i; j++)
{
temp = MAX(temp, r[i - j] + p[j]);
}
r[i] = temp;
}
//printf("최대이익 : %d\n", r[N]);
for (int i = 1; i <= N; i++)
{
printf("길이 %d : -> 최대이익 : %d\n",i, r[i]);
}
return 0;
}<file_sep>/lecture0100DP피보나치/lecture0100DP피보나치.c
//작은 문제에서 구한 정답을 배열에 저장하여
//큰 문제를 해결하는데 사용하자.
//메모이제이션
//피보나치 수열
#include<stdio.h>
int d[100] = { 0, };
int dp(int x)
{
if (x == 1) return 1;
if (x == 2) return 1;
if (d[x] != 0) return d[x];
return d[x]= dp(x - 1) + dp(x - 2);
}
int main()
{
int n = 5;
printf("%d != %d \n", dp(n));
}<file_sep>/사전테스트_암호/사전테스트_암호.c
//◦ 입력받은 문자를 아래와 같이 암호화한다.
//- 결과1 : 입력받은 문자 ASCII + 2
//- 결과2 : 입력받은 문자(ASCII * 7) % 80 + 48
//◦ 입력조건
//- 입력문자의 길이는 1~20 이하의 알파벳이다.
//- 입력문자는 공백은 포함하지 않는다.
//◦ 출력조건
//- 결과1, 결과2가 아래와 같이 모두 나오게 한다.
//
//[입출력 예시]
//입력 문자 : TEST
// 결과1 : VGUV
// 결과2 : L3EL
//
// 입력 문자 : Hello_world ^ ^;
// 결과1: Jgnnqayqtnf’‘ =
// 결과2 : HsTTiIQi~TIBB =
#include <stdio.h>
int main()
{
char pw[50];
int i;
printf("암호를 입력하시오. : ");
//scanf_s("%s", pw); //scanf와 gets 의 차이점을 말해보세요.
gets_s(pw,50);
printf("결과 1 : ");
for (i = 0; pw[i] != NULL; i++)
printf("%c", pw[i] + 2);
printf("\n결과 2 : ");
for (i = 0; pw[i] != NULL; i++)
printf("%c", (pw[i] * 7) % 80 + 48);
return 0;
}<file_sep>/lecture0100Quicksort/lecture0100QuicksortCall.cpp
#include<stdio.h>
#include<stdlib.h>
int test_bit[1000] = { 0 };
int bit_up(int index);
int make_word(int len, char* space); // 단어 만들기
int qcompare(const void* first, const void* second); // 내림차순
int main() {
int i, j;
int isfailed = 0;
int input;
int* input_arr, * sorted_arr;
char** dictionary;
scanf_s("%d", &input);
input_arr = (int*)malloc(input * sizeof(int)); // 길이를 순서대로 넣을 배열
sorted_arr = (int*)malloc(input * sizeof(int)); // 소팅해서 둘 배열
dictionary = (char**)malloc(input * sizeof(char*)); // 단어들을 담을 배열
for (i = 0; i < input; i++) {
scanf_s("%d", &(input_arr[i]));
sorted_arr[i] = input_arr[i];
}
qsort(sorted_arr, input, sizeof(int), qcompare);
dictionary[0] = (char*)malloc((sorted_arr[0] + 1) * sizeof(char)); // 가장 긴 단어는 전부 0으로 채우기 위해서 따로 뺌
make_word(sorted_arr[0] + 1, dictionary[0]); // 000000... 채워버리기
dictionary[0][sorted_arr[0]] = '\0';
for (i = 1; i < input; i++) { // 가장 긴 단어 제외하고는 밑에서부터 1씩 채워올림
dictionary[i] = (char*)malloc((sorted_arr[i] + 1) * sizeof(char));
dictionary[i][sorted_arr[i]] = '\0';
if (make_word(sorted_arr[i], dictionary[i])) { // bit overflow되면 1을 반환해서 실패로 간주
isfailed = 1;
// break를 안한 이유는 free코드 따로 짜기 귀찮아서..
}
}
if (isfailed) { // 비트 Over 실패
printf("-1");
}
else {
printf("1\n");
for (i = 0; i < input; i++) {
for (j = 0; j < input; j++) {
if (input_arr[i] == sorted_arr[j]) { // sorted_arr가 visit arr역할
printf("%s\n", dictionary[j]);
sorted_arr[j] = 0;
break;
}
}
}
}
for (i = 0; i < input; i++) {
free(dictionary[i]);
}
free(dictionary);
free(sorted_arr);
free(input_arr);
return 0;
}
int bit_up(int index) {
if (test_bit[index] == 0) {
test_bit[index] = 1;
return 0;
}
else {
if (index == 0) { // bit overflow == fail
return 1;
}
else {
test_bit[index] = 0;
bit_up(index - 1);
return 0;
}
}
}
int make_word(int len, char* space) {
int i;
if (bit_up(len - 1)) { // bit overflow == fail
return 1;
}
else {
for (i = 0; i < len; i++) {
space[i] = '0' + test_bit[i];
}
return 0;
}
}
int qcompare(const void* first,const void* second) {
if (*(int*)first < *(int*)second) {
return 1;
}
else if (*(int*)first < *(int*)second) {
return -1;
}
else {
return 0;
}
}<file_sep>/quiz_preTest/사전테스트_벽화마을.c
//벽화마을
//◦ 자동차의 너비는 200cm이다.
//◦ 세 개의 골목길 너비를 입력 받는다.
//(예: 200 170 180)
//◦ 자동차가 세 개의 골목길을 무사히 통과하면 “통과하였습니다.”라고 화면에 표시된다.그렇지 않으면 “ 170에서 충돌이 발생하였습니다.” 으로 나타난다.
//◦ 첫 번째 골목에서 충돌되면 그 이후 골목길은 지나가지 못한다.
#include<stdio.h>
int main()
{
int a, b, c;
printf("세 개의 골목길 너비를 입력하세요:");
scanf_s("%d %d %d", &a, &b, &c);
if (a < 200)
printf("충돌 %d", a);
else if (b < 200)
printf("충돌 %d", b);
else if (c < 200)
printf("충돌 %d", c);
else
printf("통과");
return 0;
}<file_sep>/lecture0204classTV/lecture0204classTV.cpp
#include <iostream>
#include <cstring>
using namespace std;
class Tv {
private:
int channel;
int volume;
public:
Tv() { // TV를 켰을 때
cout << "TV On!" << endl;
init(0, 3);
}
void init(int c, int v) { // 채널을 0, 볼륨을 3으로 초기화 시킨다.
channel = c;
volume = v;
}
void channelUp() { // 채널이 0~100까지 있을 때 채널 100에서 올리면 다시 채널 0으로 간다.
cout << "Channel Up!" << endl << endl;
channel++;
channel %= 101;
}
void channelDown() { // 채널 0에서 내리면 채널 100으로 간다.
cout << "Channel Down!" << endl << endl;
if (channel == 0)
channel = 100;
else
channel--;
}
void volumeUp() { // 볼륨은 0~10까지 가능하고 10에서 올리면 더이상 올라가지 않는다.
cout << "Volume Up!" << endl << endl;
if (volume < 10)
volume++;
}
void volumeDown() { // 볼륨을 0에서 내려도 더이상 내려가지 않는다.
cout << "Volume Down!" << endl << endl;
if (volume > 0)
volume--;
}
void getChannel(int c) { // 숫자를 입력하면 그 채널로 이동한다.
if (c > 0 && c < 101) {
channel = c;
cout << "채널 " << c << "로 이동했습니다." << endl << endl;
}
else {
cout << "잘못된 채널 입력" << endl;
}
}
void show_info() { // 채널과 볼륨을 출력해준다.
cout << "채널: " << channel << ", 볼륨: " << volume << endl << endl;
}
};
void main() {
Tv* t = new Tv();
int menu,ch,exit=0;
while (!exit) {
cout << "1: 채널 ▲ \t2: 채널 ▼ \t3: 채널 선택 " << endl;
cout << "4: 볼륨 ▲ \t5: 볼륨 ▼ \t6: 확인 ◎ \t7: 종료" << endl;
cin >> menu;
switch (menu) {
case 1:
t->channelUp();
break;
case 2:
t->channelDown();
break;
case 3:
cout << "Input Channel (0-100): ";
cin >> ch;
t->getChannel(ch);
break;
case 4:
t->volumeUp();
break;
case 5:
t->volumeDown();
break;
case 6:
t->show_info();
break;
case 7:
exit = 1;
break;
}
}
}<file_sep>/퀴즈_거꾸로숫자/퀴즈_거꾸로숫자.c
//#include<cstdio>
#include<stdio.h>
int first = 1; //1000이면 0001의 앞자리 0을 제거하기 위함
void solve(int n) //102라면 또는 1020이라면
{
if (n == 0) return;
if(first && (n%10)==0)//처음 실행시 끝자리가 0인지 체크
solve(n/10);
else
{
printf("%d", n % 10); //2
first = 0;
solve(n/10); //10
}
}
int main()
{
int n;
printf("숫자 입력 : ");
scanf_s("%d", &n);
printf("거꾸로 숫자 출력 : ");
solve(n);
return 0;
}<file_sep>/lecture0206Employee/Employee.h
#pragma once
class Employee
{
public:
Employee();
Employee(char* pName, char* pAddr); //char* --> string
~Employee();
void DisplayEmployee();
protected:
//private:
char* strName;
char* strAddr;
};
class Regular : public Employee
{
public:
Regular();
Regular(char* pName, char* pAddr, double dSalary);
~Regular();
double PayCheck() const;
private:
double salary;
};
class Temporary : public Employee
{
public:
Temporary();
Temporary(char* pName, char* pAddr, double dSalary, int ndays);
~Temporary();
double PayCheck() const;
private:
double dailyPayCheck;
int days;
};<file_sep>/lecture0206STLVectorStack/lecture0206STLVectorStack.cpp
#include <iostream>
#include <vector>
using namespace std;
int pop(vector<int>& v) {
if (v.size() > 0) {
int t = v[v.size() - 1];//스택 top
v.pop_back();
return t;
}
else {
cout << "오류 스택이 비었습니다." << endl;
return -1;
}
}
void push(vector<int>& v, int value) {
v.push_back(value);
cout << value << "를 넣었습니다.";
}
int main() {
int i, num;
vector<int> v;
while (true) {
cout << "[1]push [2]pop [3]print [4]exit" << endl;
cin >> num;
cout << "-----------------------" << endl;
switch (num) {
case 1:
cout << "값을 입력해주세요 : ";
cin >> i;
push(v, i);
cout << endl;
break;
case 2:
if (v.size() > 0)
cout << pop(v) << "를 꺼냈습니다." << endl;
break;
case 3:
for (int j = 0; j < v.size(); j++)
cout << v[j] << endl;
break;
default:
return 0;
}
cout << "-----------------------" << endl;
}
}<file_sep>/퀴즈바이러스백신/퀴즈바이러스백신2.c
#include <stdio.h>
int min(int a, int b)
{
if (a > b)
return b;
else
return a;
}
int main()
{
int v1, v2, v3, i;
scanf_s("%d %d %d", &v1, &v2, &v3);
for (i = min(v1, min(v2, v3)); i >= 0; i--)
{
if (v1 % i == 0 && v2 % i == 0 && v3 % i == 0)
break;
}
printf("%d\n", i);
return 0;
}
<file_sep>/lecture0009/lecture0009freadfwrite.c
#include <stdio.h>
int main(void)
{
FILE* stream;
char list[30];
int i, numread, numwritten;
// Open file in text mode:
if (fopen_s(&stream, "fread.out", "w+t") == 0)
{
for (i = 0; i < 25; i++)
list[i] = (char)('z' - i);
// Write 25 characters to stream
numwritten = fwrite(list, sizeof(char), 25, stream);
printf("Wrote %d items\n", numwritten);
fclose(stream);
}
else
printf("Problem opening the file\n");
if (fopen_s(&stream, "fread.out", "r+t") == 0)
{
// Attempt to read in 25 characters
numread = fread(list, sizeof(char), 25, stream);
printf("Number of items read = %d\n", numread);
printf("Contents of buffer = %.25s\n", list);
fclose(stream);
}
else
printf("File could not be opened\n");
}<file_sep>/시험코드따라기기0001/시험코드따라기기0001.cpp
#include<stdio.h>
int main() {
int i, j, t, a[3] = { 2,0,1 };
for (i = 0; i < 3; i++)
{
t = i;
for (j = 1; j <= 7777; j++) t = a[t];
printf("%d ", t);
}
return 0;
}
//2 0 1<file_sep>/k진법변환/k진법변환.c
#include<stdio.h>
char base[21] = "0123456789ABCDEFGHIJ";
void f(int n, int k)
{
if (n < k)
printf("%c", base[n]);
else
{
f(n / k, k);
printf("%c", base[n % k]);
//f(n / k, k), printf("%c", base[n % k]);
}
}
int main()
{
int n, k;
printf("Enter n, k : "), scanf_s("%d %d", &n, &k);
printf("%dÀÇ %dÁø¹ý : ",n, k);
f(n, k);
return 0;
}<file_sep>/사전테스트_앵무새/사전테스트_앵무새.c
//◦ 장난꾸러기 앵무새가 있다.이 앵무새는 사람이 하는 말의 철자를 거꾸로 말한다.
//◦ 앵무새에게 “Hello”라고 하면 앵무새는 “olleH”라고 철자를 말한다.
//◦ 입력 글자의 길이는 100미만이다.
#include <stdio.h>
#include <string.h>
int main()
{
char str[100];
int i;
printf("단어를 말하세요 : ");
gets_s(str,100);
for (i = strlen(str) - 1; i >= 0; i--)
{
printf("%c", str[i]);
}
return 0;
}<file_sep>/lecture0003/소스.c
#include<stdio.h>
int main()
{
static int i = 7, *p, ** pp;
p = &i;
pp = &p;
printf("p\'s Address = %p\n", p);
printf("pp\'s Address = %p\n", pp);
(*p)++;
printf("i=%d \t", i);
printf("*p=%d \t", *p);
printf("**pp=%d \n", **pp);
(**pp)++;
printf("i=%d \t", i);
printf("*p=%d \t", *p);
printf("**pp=%d \n", **pp);
}<file_sep>/lecture0006/test.c
//#include<stdio.h>
//#include "source.h"
//
//int maina()
//{
// FILE* fp;
// char ch;
// fp = fopen("test.c", "r");
// while ((ch = getc(fp)) != EOF)
// putc(ch, stdout);
//
// fclose(fp);
//}
<file_sep>/lecture02100adjacent_find/lecture0210adjacent_find.cpp
// alg_adj_fnd.cpp
// compile with: /EHsc
#include <list>
#include <algorithm>
#include <iostream>
// Returns whether second element is twice the first
bool twice(int elem1, int elem2)
{
return elem1 * 2 == elem2;
}
int main()
{
using namespace std;
list<int> L;
list<int>::iterator Iter;
list<int>::iterator result1, result2;
L.push_back(50);
L.push_back(40);
L.push_back(10);
L.push_back(20);
L.push_back(20);
cout << "L = ( ";
for (Iter = L.begin(); Iter != L.end(); Iter++)
cout << *Iter << " ";
cout << ")" << endl;
result1 = adjacent_find(L.begin(), L.end());
if (result1 == L.end())
cout << "There are not two adjacent elements that are equal."
<< endl;
else
cout << "There are two adjacent elements that are equal.\n"
<< "They have a value of "
<< *(result1) << "." << endl;
result2 = adjacent_find(L.begin(), L.end(), twice);
if (result2 == L.end())
cout << "There are not two adjacent elements where the "
<< "second is twice the first." << endl;
else
{
cout << "There are two adjacent elements where "
<< "the second is twice the first.\n"
<< "They have values of " << *(result2++)
<< " & " << *result2 << "." << endl;
}
}
//오류 발생 10 & 10. 이 아닌 10 & 20. 이 되어야 함.
//L = (50 40 10 20 20)
//There are two adjacent elements that are equal.
//They have a value of 20.
//There are two adjacent elements where the second is twice the first.
//They have values of 10 & 10.
//관련 주소
//https://docs.microsoft.com/ko-kr/cpp/standard-library/algorithm-functions?f1url=https%3A%2F%2Fmsdn.microsoft.com%2Fquery%2Fdev15.query%3FappId%3DDev15IDEF1%26l%3DKO-KR%26k%3Dk(XUTILITY%2Fstd%3A%3Areverse);k(std%3A%3Areverse);k(reverse);k(SolutionItemsProject);k(DevLang-C%2B%2B);k(TargetOS-Windows)%26rd%3Dtrue&view=vs-2019
<file_sep>/lecture0015/lecture0015도수분포.c
#include <stdio.h>
#define Num 20
int main(void)
{
static int a[] = { 35,25,56,78,43,66,71,73,80,90,
0,73,35,65,100,78,80,85,35,50 };
int i, rank, histo[11];
for (i = 0; i <= 10; i++)
histo[i] = 0;
for (i = 0; i < Num; i++) {
rank = a[i] / 10; /* ¸ÅÇÎ */
if (0 <= rank && rank <= 10)
histo[rank]++;
}
for (i = 0; i <= 10; i++)
printf("%3d - :%3d\n", i * 10, histo[i]);
return 0;
}
<file_sep>/퀴즈바이러스백신/퀴즈바이러스백신1.cpp
#include <stdio.h>
int main()
{
int v1, v2, v3, i, sol,min;
scanf_s("%d %d %d", &v1, &v2, &v3);
min = (v1 > v2 ? ((v2 > v3) ? v3 : v2) : v1 > v3 ? v3 : v1);
for (i = 1; i <= min; i++)
{
if (v1 % i == 0 && v2 % i == 0 && v3 % i == 0)
sol = i;
}
printf("%d\n", sol);
return 0;
}<file_sep>/중간테스트_화폐/중간테스트_화폐.c
#include <stdio.h>
int main() {
int sumofmoney, total;//금액,
int unitofmoney, sw;//돈 단위
int num;
do {
unitofmoney = 50000, sw = 1, sumofmoney = 0;
printf("금액을 입력하세요 : ");
scanf_s("%d", &sumofmoney);
if (sumofmoney == 0) break;
if (sumofmoney < 0)
printf("INPUT ERROR! (Quit 0) \n");
if (sumofmoney > 1000000)
printf("DATA RANGE(1-1000000), (Quit 0) \n");
else {
total = sumofmoney;
total = ((total + 5) / 10) * 10;//1자리 반올림
while (total != 0) {
if (sw == 1)
unitofmoney = unitofmoney / 5;
else
unitofmoney = unitofmoney / 2;
sw = -(sw);
num = total / unitofmoney;
total = total % unitofmoney;
if (num > 0) printf("%5d=%d \n", unitofmoney, num);
}
}
printf("\n");
} while (sumofmoney != 0);
return 0;
}<file_sep>/lecture0110cppTreeTraverse/lecture0110cppTreeTraverse.cpp
#include<iostream>
using namespace std;
class Node {
int key;
Node* left;
Node* right;
public:
Node()
{
key = -1;
left = NULL;
right = NULL;
};
void setKey(int key)
{
this->key = key;
}
void setLeft(Node* left)
{
this->left = left;
}
void setRight(Node* right)
{
this->right = right;
}
int getKey()
{
return key;
}
Node* getLeft()
{
return left;
}
Node* getRight()
{
return right;
}
};
class Tree
{
Node* root;
public:
Tree();
~Tree();
Node* getRoot()
{
return root;
}
void addNode(int key);
void inOrder(Node* n);
void preOrder(Node* n);
void postOrder(Node* n);
private:
void addNode(int key, Node* leaf);
void freeNode(Node* leaf);
};
Tree::Tree()
{
root = NULL;
}
Tree::~Tree()
{
freeNode(root);
}
void Tree::freeNode(Node* leaf)
{
if (leaf != NULL)
{
freeNode(leaf->getLeft());
freeNode(leaf->getRight());
delete leaf;
}
}
void Tree::addNode(int key)
{
if (root == NULL)
{
cout << "add root node ..." << key << endl;
Node* temp = new Node();
temp->setKey(key);
root = temp;
}
else
{
cout << "add other node ..." << key << endl;
addNode(key, root);
}
}
//add a node(private)
void Tree::addNode(int key, Node* leaf)
{
if (key <= leaf->getKey())
{
if (leaf->getLeft() != NULL)
addNode(key, leaf->getLeft());
else
{
Node* temp = new Node();
temp->setKey(key);
leaf->setLeft(temp);
}
}
else
{
if (leaf->getRight() != NULL)
addNode(key, leaf->getRight());
else
{
Node* temp = new Node();
temp->setKey(key);
leaf->setRight(temp);
}
}
}
void Tree::inOrder(Node* n)
{
if (n)
{
inOrder(n->getLeft());
cout << n->getKey() << "";
inOrder(n->getRight());
}
}
void Tree::preOrder(Node* n)
{
if (n)
{
cout << n->getKey() << "";
preOrder(n->getLeft());
preOrder(n->getRight());
}
}
void Tree::postOrder(Node* n)
{
if (n)
{
postOrder(n->getLeft());
postOrder(n->getRight());
cout << n->getKey() << "";
}
}
int main()
{
Tree* tree = new Tree();
tree->addNode(3);
tree->addNode(1);
tree->addNode(2);
tree->addNode(4);
tree->addNode(5);
cout << "InOrder Traversal " << endl;
tree->inOrder(tree->getRoot());
cout << endl;
cout << "preOrder Traversal " << endl;
tree->preOrder(tree->getRoot());
cout << endl;
cout << "postOrder Traversal " << endl;
tree->postOrder(tree->getRoot());
cout << endl;
}<file_sep>/lecture0101/lecture0101.c
//bubble sort
#include <stdio.h>
#define N 6
int main(void)
{
static int a[] = { 80,41,35,90,40,20 };
int t, i, j;
for (i = 0; i < N - 1; i++) {
for (j = N - 1; j > i; j--) {
if (a[j] < a[j - 1]) {
t = a[j]; a[j] = a[j - 1]; a[j - 1] = t;
}
}
}
for (i = 0; i < N; i++)
printf("%d ", a[i]);
printf("\n");
return 0;
}<file_sep>/lecture0008/lecture0008.c
#include <stdio.h>
int main(void)
{
FILE* stream;
char line[100];
if (fopen_s(&stream, "test.txt", "r" ) == 0 )
{
while (fgets(line, 100, stream))
{
printf("%s", line);
}
}
fclose(stream);
return 0;
}
//#include <stdio.h>
//
//int main(void)
//{
// FILE* stream;
// char line[100];
// if (fopen_s(&stream, "test.txt", "r") == 0)
// {
// while (1)
// {
// if (fgets(line, 100, stream) == NULL)
// {
// printf("Error");
// break;
// }
// else
// {
// printf("%s", line);
// }
//
// }
//
// }
// fclose(stream);
// return 0;
//}
<file_sep>/Project입출력/lecture0200HelloWorld.cpp
#include <iostream>
#include <string>
using namespace std;
int main()
{
string song("Falling in love with you");
string elvis("<NAME>");
string singer;
cout << song + " 를 부른 가수는";
cout << "Hint : " << elvis[0] << "?"<<endl;
getline(cin, singer);
if (singer == elvis)
cout << "Correct" << endl;
else
cout << "Wrong" << endl;
return 0;
}
//#include <iostream>
//using namespace std;
//
//int main()
//{
// std::cout << "Hello, world!" << std::endl;
// return 0;
//}
//
//#include <iostream>
//using namespace std;
//
//int main()
//{
// cout << "Hello, world!\n";
// cout << "Hello, world!"<<endl;
// return 0;
//}
//
//#include<string>
//int main()
//{
// string str("Hello World");
//
// cout << str << ".\n";
// return 0;
//}
//#include <iostream>
//#include <fstream>
//
//using namespace std;
//
//void TestWide()
//{
// int i = 0;
// wcout << L"Enter a number(L): ";
// wcin >> i;
// wcout << "wcout="<<i<<endl;
// cout << "cout="<<i<<endl;
// wcerr << L"test for wcerr" << endl;
// wclog << L"test for wclog" << endl;
//}
//
//int main()
//{
// int i = 0;
// cout << "Enter a number: ";
// cin >> i;
// wcout << "wcout=" << i << endl;
// cout << "cout=" << i << endl;
// cerr << "test for cerr" << endl;
// clog << "test for clog" << endl;
// TestWide();
//}<file_sep>/lecture0203기초성적표관리/lecture0203기초성적표관리.cpp
#include <iostream>
#include <iomanip>
using namespace std;
int main()
{
int S1_No, S2_No, S3_No; // 학번
int S1_Kor, S2_Kor, S3_Kor; // 국어 점수
int S1_Math, S2_Math, S3_Math; // 수학 점수
int S1_Eng, S2_Eng, S3_Eng; // 영어 점수
float S1_Ave, S2_Ave, S3_Ave; // 개인 평균
float TotalAve = 0.0f; // 전체 평균
int NumberOfStudent = 0; // 현재까지 입력된 학생수
while (1)
{
cout << " -------------메 뉴-------------" << endl;
cout << " 1. 학생 성적 추가" << endl;
cout << " 2. 전체 성적 보기" << endl;
cout << " Q. 프로그램 종료 " << endl;
cout << " -------------------------------" << endl;
cout << " 메 뉴의 번호를 입력하세요 : ";
char select; // 메뉴 번호를 위한 char변수
cin >> select;
switch (select)
{
/* 성적 추가 case */
case '1':
{
if (NumberOfStudent==3)
{
cout << "\n더이상 입력할수 없습니다.\n";
cout << "다른메뉴를 선택해 주세요 \n\n";
continue;
}
int Kor, Eng, Math;
cout << endl;
cout << '[' << NumberOfStudent + 1 << "] 번째 학생의 " << "국어, 영어, 수학점수를 입력해 주세요 : ";
cin >> Kor >> Eng >> Math;
float Ave = float(Kor + Eng + Math) / 3.0f; // 개인 평균
if (NumberOfStudent==0)
{
S1_No = NumberOfStudent + 1;
S1_Kor = Kor;
S1_Eng = Eng;
S1_Math = Math;
S1_Ave = Ave;
TotalAve = S1_Ave;
}
else if (NumberOfStudent==1)
{
S2_No = NumberOfStudent + 1;
S2_Kor = Kor;
S2_Eng = Eng;
S2_Math = Math;
S2_Ave = Ave;
TotalAve = (S1_Ave + S2_Ave) / 2;
}
else
{
S3_No = NumberOfStudent + 1;
S3_Kor = Kor;
S3_Eng = Eng;
S3_Math = Math;
S3_Ave = Ave;
TotalAve = (S1_Ave + S2_Ave + S3_Ave) / 3;
}
NumberOfStudent++;
cout << '[' << NumberOfStudent << "] 번째 학생의 성적이 입력 되었습니다\n\n\n";
break;
}
/* 성적 보기 case */
case '2':
{
cout.precision(2); // 실수 출력시 소수점 이하 두자리만 출력
cout << fixed; // 실수 출력시 소수점 이하 두자리만 출력
cout << "\n\n < 전체 성적보기 >\n\n";
cout << " 학번 국어 영어 수학 평균 \n";
int iCnt;
for (iCnt = 1; iCnt <= NumberOfStudent; ++iCnt)
{
if (1 == iCnt)
{
cout << setw(7) << '[' << S1_No << ']' << setw(7) << S1_Kor << setw(6) << S1_Eng << setw(6) << S1_Math << setw(8) << S1_Ave << endl;
}
if (2 == iCnt)
{
cout << setw(7) << '[' << S2_No << ']' << setw(7) << S2_Kor << setw(6) << S2_Eng << setw(6) << S2_Math << setw(8) << S2_Ave << endl;
}
if (3 == iCnt)
{
cout << setw(7) << '[' << S3_No << ']' << setw(7) << S3_Kor << setw(6) << S3_Eng << setw(6) << S3_Math << setw(8) << S3_Ave << endl;
}
}
cout << '\n' << setw(27) << "전체 평균 = " << TotalAve << endl;
}
/* 종료 case */
case 'Q':
{
cout << "\n ****프로그램을 종료 합니다.****\n" << endl;
return 0;
}
/* 잘못입력 case */
default:
{
cout << "\n잘못된 입력입니다. 다시입력 해주세요.\n" << endl;
break;
}
}
}
return 0;
}
<file_sep>/lecture0201stringreverse/lecture0201stringreverse.cpp
#include <iostream>
#include <string>
#include <algorithm>
#include <sstream>
using namespace std;
//stirng reverse와 숫자 12300을 321로 만드는 프로그램
int main()
{
string s;
int n;
getline(cin, s); //12300
reverse(s.begin(), s.end()); //00321
stringstream(s) >> n; //321
cout <<n<< endl; //321
//cout << s;
return 0;
}<file_sep>/lecture0206Employee/lecture0206Employee.cpp
#include <iostream>
#include <cstring>
#include "Employee.h"
using namespace std;
Employee::Employee()
{
strName = NULL; //** nullptr
strAddr = NULL;
}
Employee::Employee(char* pName, char* pAddr)
{
cout << endl;
cout << "Employee(pName, pAddr)" << endl;
strName = new char[strlen(pName) + 1]; //** strlen(pName)+1
strAddr = new char[strlen(pAddr) + 1];
strName = pName;
strAddr = pAddr;
}
Employee ::~Employee()
{
}
void Employee::DisplayEmployee()
{
cout << "이름:" << strName << endl;
cout << "주소:" << strAddr << endl << endl;
}
Regular::Regular()
{
}
Regular::Regular(char* pName, char* pAddr, double dSalary)
{
cout << "Regular 인자가 3개인 생성자 호출" << endl;
strName = pName;
strAddr = pAddr;
salary = dSalary;
cout << "이름:" << strName << endl;
cout << "주소:" << strAddr << endl;
}
Regular ::~Regular()
{
}
double Regular::PayCheck() const
{
return salary;
}
Temporary::Temporary()
{
}
Temporary::Temporary(char* pName, char* pAddr, double dDailyPayCheck, int nDays)
{
cout << "Temporary 인자가 4개인 생성자 호출" << endl;
strName = pName;
strAddr = pAddr;
dailyPayCheck = dDailyPayCheck;
days = nDays;
cout << "이름:" << pName << endl;
cout << "주소:" << pAddr << endl;
}
Temporary ::~Temporary()
{
}
double Temporary::PayCheck() const
{
return dailyPayCheck * days;
}
int main()
{
Employee emp("David", "State of Oregon");
emp.DisplayEmployee();
Regular rgl("Kim", "Kimhae", 300);
cout << "급여:" << rgl.PayCheck() << endl << endl;
Temporary tmp("Joe", "Seoul", 10, 20);
cout << "급여:" << tmp.PayCheck() << endl << endl;
return 0;
}
<file_sep>/lecture0017암호과제제출/lecture0017.cpp
#define _CRT_SECURE_NO_WARNINGS
#include<stdio.h>
int main() {
int seed;
int key1, key2, key3, cir = 0;
char sen[100];
int choose;
printf("암호화하려면 0, 해독하려면 1을 입력해주세요 : ");
scanf("%d", &choose);
switch (choose) {
case 0://암호화
printf("암호화할 문장을 입력해주세요 : ");
scanf("%s", sen);
printf("암호를 풀기 위한 시드 값을 입력해주세요(1000미만) :");
scanf("%d", &seed);
printf("\n시드 %d로 암호화할 문장은 %s입니다", seed, sen);
key1 = seed / 100;
key2 = (seed - key1 * 100) / 10;
key3 = seed - key1 * 100 - key2 * 10;
for (int i = 0; sen[i] != 0; i++) {
cir++;
if (cir > 3) cir = 1;
switch (cir) {
case 1:
sen[i] = sen[i] + key1;
break;
case 2:
sen[i] = sen[i] - key2;
break;
case 3:
sen[i] = sen[i] + key3;
break;
}
}
printf("\n\n암호화된 문장 \n");
for (int i = 0; sen[i] != 0; i++)
printf("%c", sen[i]);
printf("\n\n");
system("pause");
break;
case 1://해독
printf("해독할 문장을 입력해주세요 : ");
scanf("%s", sen);
printf("현재의 시드 값을 입력해주세요(1000미만) :");
scanf("%d", &seed);
printf("\n시드 %d로 해독할 문장은 %s입니다", seed, sen);
key1 = seed / 100;
key2 = (seed - key1 * 100) / 10;
key3 = seed - key1 * 100 - key2 * 10;
for (int i = 0; sen[i] != 0; i++) {
cir++;
if (cir > 3) cir = 1;
switch (cir) {
case 1:
sen[i] = sen[i] - key1;
break;
case 2:
sen[i] = sen[i] + key2;
break;
case 3:
sen[i] = sen[i] - key3;
break;
}
}
printf("\n\n해독된 암호 \n");
for (int i = 0; sen[i] != 0; i++)
printf("%c", sen[i]);
printf("\n\n");
system("pause");
break;
}
}<file_sep>/lecture0006파일입출력_합계/기존파일입출력.c
//#define _CRT_SECURE_NO_WARNINGS
//#include <stdio.h>
//int main() {
// FILE* fp, * fp2;
//
// int a;
//
// scanf("%d", &a);
//
// if ((fp = fopen("example.txt", "w")) == NULL) {
// printf("파일 읽기 오류! \n");
// return 0;
// }
//
// fputs("안녕하세요.\n 파일입출력테스트. \n", fp);
//
// if ((fp2 = fopen("example.txt", "a")) == NULL) {
// printf("파일 읽기 오류! \n");
// return 0;
// }
//
// fputs("이제 파일입출력이 이해됩니다.", fp2);
// return 0;
//}
<file_sep>/lecture0016/lecture0016.c
//* -----------------------------
//* 개선된 순위 매김 *
//* -----------------------------
#include <stdio.h>
#define Num 10
#define Max 100
#define Min 0
int main(void)
{
static int a[] = { 56,25,67,88,100,61,55,67,76,56 };
int i, rank[Max + 2];
for (i = Min; i <= Max; i++) // 0으로 초기화
rank[i] = 0;
for (i = 0; i < Num; i++) //각 점수를 첨자로 하는 배열 요소에 +1
rank[a[i]]++;
rank[Max + 1] = 1; // 바로 오른쪽 요소의 값을 더한다.
for (i = Max; i >= Min; i--)
rank[i] = rank[i] + rank[i + 1];
printf(" 점수 순위\n");
for (i = 0; i < Num; i++) {
printf("%6d%6d\n", a[i], rank[a[i]+1]); // 점수+1의 위치에 순위가 저장됨.
}
return 0;
}
///*
// * --------------------
// * 순위 매김 *
// * --------------------
// */
//
//#include <stdio.h>
//
//#define Num 10
//
//int main(void) /* 순위 매김 */
//{
// static int a[] = { 56,25,67,88,100,61,55,67,76,56 };
// int rank[Num];
// int i, j;
//
// for (i = 0; i < Num; i++) {
// rank[i] = 1;
// for (j = 0; j < Num; j++) {
// if (a[j] > a[i])
// rank[i]++;
// }
// }
//
// printf(" 점수 순위\n");
// for (i = 0; i < Num; i++) {
// printf("%6d%6d\n", a[i], rank[i]);
// }
//
// return 0;
//}
<file_sep>/퀴즈 삼각형만들기/퀴즈삼각형만들기.cpp
#include<stdio.h>
int n;
int solve()
{
int cnt = 0;
scanf_s("%d", &n);
//a<=b<=c, a+b>c
for(int a=1;a<=n;a++)
for(int b=a;b<=n;b++)//중복 제거
for(int c=b;c<=n;c++)//중복 제거
if (a + b + c == n && a + b > c)
{
printf("(%d, %d, %d) ", a, b, c);
cnt++;
}
return cnt;
}
int main()
{
printf("%d개\n", solve());
}<file_sep>/Project입출력/입출력.cpp
#include <iostream>
using namespace std;
int main()
{
cout << "Hello, world!\n";
return 0;
}
//
//#include <iostream>
//#include <fstream>
//
//using namespace std;
//
//void TestWide()
//{
// int i = 0;
// wcout << L"Enter a number(L): ";
// wcin >> i;
// wcout << "wcout="<<i<<endl;
// cout << "cout="<<i<<endl;
// wcerr << L"test for wcerr" << endl;
// wclog << L"test for wclog" << endl;
//}
//
//int main()
//{
// int i = 0;
// cout << "Enter a number: ";
// cin >> i;
// wcout << "wcout=" << i << endl;
// cout << "cout=" << i << endl;
// cerr << "test for cerr" << endl;
// clog << "test for clog" << endl;
// TestWide();
//}<file_sep>/lecture0019/lecture0019.c
//거듭 제곱 순환, 반복 알고리즘 비교
#include <stdio.h>
double power_iteration(double x, int n) { // x는 밑, n은 지수이다.
int i;
double r = 1.0;
for (i = 0; i < n; i++)
r = r * x;
return (r);
}
double power_recursion(double x, int n) { // x는 밑, n은 지수이다.
if (n == 0) return 1;
else if ((n % 2) == 0) // n이 짝수인 경우
return power_recursion(x * x, n / 2);
else return x * power_recursion(x * x, (n - 1) / 2); // n이 홀수인 경우
}
int main(void) {
double base = 2.0;
int exponent = 10;
double result;
result = power_iteration(base, exponent);
printf("계산 결과: %.2f\n", result);
result = power_recursion(base, exponent);
printf("계산 결과: %.2f\n", result);
return 0;
}
<file_sep>/lecture0020-1/lecture0020-1메모이제이션.c
#include<stdio.h>
#define MAX_SIZE 100
int fibonacci(int n) {
static int arr[MAX_SIZE];
if (n < 2) return n;
if (arr[n] > 0) return arr[n];
else return arr[n] = fibonacci(n - 1) + fibonacci(n - 2);
}
int main(void) {
int num = 10;
int result;
result = fibonacci(num);
printf("%d번째 fibonacci 수열 값: %d", num, result);
return 0;
}
<file_sep>/lecture0017/lecture0017암호해독.c
/*
* --------------------
* ¾دب£ اطµ¶ *
* --------------------
*/
#include <stdio.h>
int main(void)
{
static char table[] = { 'Q','W','E','R','T','Y','U','I','O','P',
'A','S','D','F','G','H','J','K','L','Z',
'X','C','V','B','N','M','?' };
char* p, * pass = "<PASSWORD>";
int index;
p = pass;
while (*p != '\0') {
if ('A' <= *p && *p <= 'Z')
index = *p - 'A';
else
index = 26;
putchar(table[index]);
p++;
}
printf("\n");
return 0;
}
<file_sep>/퀴즈회문암호/퀴즈회문암호.c
#include <stdio.h>
int rev(int n)
{
int s = 0;
while (n)
{
s = s * 10;
s = s + (n % 10);
n = n / 10;
}
return s;
}
int main()
{
//int sum, n;
int Че, n;
for (int i = 100; i <= 10000; i++)
{
Че = i + rev(i);
if (Че == rev(Че))//palindrome
printf("%d ",i);
}
return 0;
}
<file_sep>/lecture0023/lecture0023전치행렬기초.c
#include <stdio.h>
int n;
int a[1001][1001], c[1001][1001];
int main() {
printf("n*n matrix : n=");
scanf_s("%d", &n);
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
scanf_s("%d", &a[i][j]);
}
}
printf("\n");
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (!c[i][j]) //
{
int tmp = a[i][j];
a[i][j] = a[j][i];
a[j][i] = tmp;
c[j][i] = 1;
}
}
}
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
printf("%d ", a[i][j]);
}
printf("\n");
}
}<file_sep>/사전테스트_구간의수/사전테스트_구간의수.c
//[입출력 예시]
//두 수를 입력하세요. : 9 2
//2 3 4 5 6 7 8 9
//총합은 44입니다.
#include <stdio.h>
int main()
{
int a, b, temp, i,sum=0;
printf("***두 수 구간의 숫자 출력하기***\n");
printf("두 수를 입력하세요 :");
scanf_s("%d %d", &a, &b);
if (a > b)
{
temp = a;
a = b;
b = temp;
}
for (i = a; i <= b; i++)
{
printf("%d ", i);
sum += i;
}
printf("총합은 %d입니다.\n", sum);
return 0;
}<file_sep>/lecture0010/lecture0010.c
//¹è¿ÀÇ »ðÀÔ
#include<stdio.h>
int main()
{
char arr[5] = { 'A','B','D','E'};
char c = 'C';
int i, tmp, tmp2;
for (i = 0; i < 5; i++)
{
printf("%2c", arr[i]);
}
printf("\n");
for (i = 0; i < 5; i++)
{
if (arr[i] > c) break;
}
printf("\n");
tmp = arr[i];
arr[i++] = c;
for (; i < 5; i++)
{
tmp2 = arr[i];
arr[i] = tmp;
tmp = tmp2;
}
printf("1. %c", tmp);
printf("2. %c", tmp2);
for (i = 0; i < 5; i++)
{
printf("%2c", arr[i]);
}
}<file_sep>/lecture0020/lecture0020.c
//피보나치 수열, 순환, 반복 알고리즘 비교
//순환 알고리즘
#include <stdio.h>
int fibonacci(int n) {
if (n == 0) return 0;
else if (n == 1) return 1;
else return (fibonacci(n - 1) + fibonacci(n - 2));
}
int main(void) {
int num = 7;
int result;
result = fibonacci(num);
printf("%d번째 fibonacci 수열 값: %d", num, result);
return 0;
}
// 반복 알고리즘
//#include <stdio.h>
//
//int fibonacci1(int n) {
// if (n < 2) return n;
// else {
// int i, tmp, cur = 1, last = 0;
// for (i = 2; i <= n; i++) {
// tmp = cur;
// cur += last;
// last = tmp;
// }
// return cur;
// }
//}
//
//int fibonacci2(int n) {
// if (n < 2) return n;
// else {
// int i, tmp, cur = 1, last = 0;
// for (i = 2; i <= n; i++) {
// tmp = last;
// last = cur;
// cur += tmp;
// }
// return cur;
// }
//}
//
//int main(void) {
// int num = 7;
// int result1, result2;
//
// result1 = fibonacci1(num);
// result2 = fibonacci2(num);
// printf("%d번째 fibonacci1 수열 값: %d\n", num, result1);
// printf("%d번째 fibonacci2 수열 값: %d", num, result2);
// return 0;
//}
//피보나치 메모이제이션
//#include <stdio.h>
//#define MAX_SIZE 100
//
//int fibonacci(int n) {
// static int arr[MAX_SIZE];
// if (n < 2) return n;
// if (arr[n] > 0) return arr[n];
// else return arr[n] = fibonacci(n - 1) + fibonacci(n - 2);
//}
//
//int main(void) {
// int num = 10;
// int result;
//
// result = fibonacci(num);
// printf("%d번째 fibonacci 수열 값: %d", num, result);
// return 0;
//}<file_sep>/lecture0200/lecture0200클래스circle.cpp
//#include <iostream>
//using namespace std;
//struct Circle {
//public:
// int radius;
// double getArea();
//};
//double Circle::getArea() {
// return 3.14 * radius * radius;
//}
//struct Rectangle {
//public:
// int width, height;
// double getArea() {
// return (double)width * height;
// }
//};
//int main()
//{
// Circle donut;
// donut.radius = 10;
// double area = donut.getArea();
// cout << "donut = " << area << endl;
//
// Circle pizza;
// pizza.radius = 30;
// area = pizza.getArea();
// cout << "pizza = " << area << endl;
// Rectangle rect;
// rect.width = 3;
// rect.height = 5;
// cout << "Rectangle = " << rect.getArea() << endl;
//}
//#include <iostream>
//using namespace std;
//class Circle {
//public:
// int radius;
// double getArea();
//};
//double Circle::getArea() {
// return 3.14 * radius * radius;
//}
//class Rectangle {
//public:
// int width, height;
// double getArea() {
// return width * height;
// }
//};
//int main()
//{
// Circle donut;
// donut.radius = 1;
// double area = donut.getArea();
// cout << "donut =" << area << endl;
//
// Circle pizza;
// pizza.radius = 30;
// area = pizza.getArea();
// cout << "pizza=" << area << endl;
// Rectangle rect;
// rect.width = 3;
// rect.height = 5;
// cout << "Rectangle=" << rect.getArea() << endl;
//}
<file_sep>/lecture0021setcursor/lecture0021setcursor.cpp
// This example demonstrates the
// Console.CursorLeft and
// Console.CursorTop properties, and the
// Console.SetCursorPosition and
// Console.Clear methods.
using System;
class Sample
{
protected static int origRow;
protected static int origCol;
protected static void WriteAt(string s, int x, int y)
{
try
{
Console.SetCursorPosition(origCol + x, origRow + y);
Console.Write(s);
}
catch (ArgumentOutOfRangeException e)
{
Console.Clear();
Console.WriteLine(e.Message);
}
}
public static void Main()
{
// Clear the screen, then save the top and left coordinates.
Console.Clear();
origRow = Console.CursorTop;
origCol = Console.CursorLeft;
// Draw the left side of a 5x5 rectangle, from top to bottom.
WriteAt("+", 0, 0);
WriteAt("|", 0, 1);
WriteAt("|", 0, 2);
WriteAt("|", 0, 3);
WriteAt("+", 0, 4);
// Draw the bottom side, from left to right.
WriteAt("-", 1, 4); // shortcut: WriteAt("---", 1, 4)
WriteAt("-", 2, 4); // ...
WriteAt("-", 3, 4); // ...
WriteAt("+", 4, 4);
// Draw the right side, from bottom to top.
WriteAt("|", 4, 3);
WriteAt("|", 4, 2);
WriteAt("|", 4, 1);
WriteAt("+", 4, 0);
// Draw the top side, from right to left.
WriteAt("-", 3, 0); // shortcut: WriteAt("---", 1, 0)
WriteAt("-", 2, 0); // ...
WriteAt("-", 1, 0); // ...
//
WriteAt("All done!", 0, 6);
Console.WriteLine();
}
}
/*
This example produces the following results:
+---+
| |
| |
| |
+---+
All done!
*/<file_sep>/lecture0201cincout/lecture0201cincout.cpp
//#include <iostream>
//using namespace std;
//
//int main()
//{
// int i;
// cout << "Please enter an integer value: ";
// cin >> i;
// cout << "The value you entered is " << i;
// cout << " and its double is " << i * 2 << ".\n";
// return 0;
//}
#include <iostream>
using namespace std;
int main()
{
string str;
cin >> str;
cout <<str << endl;
cout <<"count : "<<str.length() << endl;
return 0;
}
// stringstreams
//#include <iostream>
//#include <string>
//#include <sstream>
//using namespace std;
//
//int main()
//{
// string mystr;
// float price = 0;
// int quantity = 0;
//
// cout << "Enter price: ";
// getline(cin, mystr);
// stringstream(mystr) >> price;
// cout << "Enter quantity: ";
// getline(cin, mystr);
// stringstream(mystr) >> quantity;
// cout << "Total price: " << price * quantity << endl;
// return 0;
//}<file_sep>/lecture0000문자열입력/lecture0000문자열입력.cpp
#include <stdio.h>
int main()
{
char pw[50];
int i;
printf("암호를 입력하시오. : ");
scanf_s("%s", pw, sizeof(pw));
//gets_s(pw, 50);
printf("%s", pw);
}
<file_sep>/lecture0009/소스.c
//#include <stdio.h>
//#include <stdlib.h>
//
//int main(void)
//{
// int count, total = 0;
// char buffer[100] = { '\0', };
// FILE* stream;
//
// fopen_s(&stream, "test.txt", "r");
// if (stream == NULL)
// exit(1);
//
// while(!feof(stream))
// {
// // Attempt to read in 100 bytes:
// count = fread(buffer, sizeof(char), 10, stream);
// printf("%s\n", buffer);
// if (ferror(stream)) {
// perror("Read error");
// break;
// //return 0;
// }
//
// // Total up actual bytes read
// total += count;
// }
// printf("Number of bytes read = %d\n", total);
//
// fclose(stream);
//}<file_sep>/lecture0200/lecture0200클래스완성.cpp
#include <iostream>
using namespace std;
class Circle {
private:
int radius;
public:
Circle();
~Circle() {
cout << "I am dying";
};
Circle(int radius);
double getArea();
};
//Circle::Circle()
//{
// radius = 1;
// cout << "기본 반지름 " << radius << "인 원 생성" << endl;
//};
Circle::Circle() :Circle(1) {}
Circle::Circle(int radius)
{
this->radius = radius;
cout << "반지름 " << radius << "인 원 생성" << endl;
};
double Circle::getArea() {
return 3.14 * radius * radius;
}
class Rectangle {
int width, height;
public:
Rectangle(int width, int height)
{
this->width = width;
this->height = height;
}
double getArea() {
return (double)width * height;
}
};
int main()
{
int n;
cout << "도넛의 반지름을 입력하세요 : ";
cin >> n;
Circle donut(n);
double area = donut.getArea();
cout << "donut =" << area << endl;
cout << "피자의 반지름을 입력하세요 : ";
cin >> n;
Circle pizza(n);
area = pizza.getArea();
cout << "pizza=" << area << endl;
int w, h;
cout << "사각형의 가로 길이, 세로 길이를 입력하세요 :";
cin >> w >> h;
Rectangle rect(w,h);
cout << "Rectangle=" << rect.getArea() << endl;
return 0;
}
<file_sep>/퀴즈고기잡이/퀴즈고기잡이.cpp
#define _CRT_SECURE_NO_WARNINGS
#include<stdio.h>
int data[101], N, W, max,maxi;
int main()
{
printf("냇가의 길이와 그물의 폭 입력 : ");
scanf("%d %d", &N, &W);
printf("물고기 배열 입력\n");
for (int i = 0; i < N; i++)
{
scanf("%d", data + i);
//printf("%d ", data[i]);
}
for (int i = 0; i < N + W - 1; i++)
{
int sum = 0, j;
for (j = 0; j < W; j++)
{
sum += data[i + j];
}
if (sum > max)
{
max = sum;
maxi = i;
}
}
printf("(");
for (int i = 0; i < W; i++)
{
printf("%d ", data[maxi + i]);
}
printf(")\n");
printf("최대 %d 마리", max);
return 0;
}
<file_sep>/lecture0006/source.c
#include<stdio.h>
int main()
{
FILE* fp='\0';
errno_t err;
char ch;
err=fopen_s(&fp,"test.c", "r");//Text.txt
if (err) return 0;
while ((ch = getc(fp)) != EOF)
putc(ch, stdout);
err=fclose(fp);
if (err) return 0;
}<file_sep>/사전테스트_윷놀이/사전테스트_윷놀이.c
#include <stdio.h>
#include <time.h>
#include <stdlib.h>
int yoot[4];
void Rand_yoot()
{
srand((unsigned)time(NULL));
int i;
for (i = 0; i < 4; i++)
{
yoot[i] = rand() % 2;
//printf("%d", yoot[i]);
}
}
int count_yoot()
{
int i,cnt=0;
for (i = 0; i < 4; i++)
if (yoot[i] == 1) cnt++;
return cnt;
}
void show_yoot()
{
for (int i = 0; i < 4; i++)
{
printf("%d", yoot[i]);
}
putchar('\n');
}
int main()
{
int n, n1=1;
//srand((unsigned)time(NULL));
printf("숫자를 입력하고 엔터를 치면 윷가락을 던집니다.\n");
printf("윷을 던질 횟수:");
scanf_s("%d", &n);
do {
printf("%d번째 시도\n", n1++);
Rand_yoot();
switch (count_yoot()) {
case 1: printf("도\n");
show_yoot();
break;
case 2: printf("개\n");
show_yoot();
break;
case 3: printf("걸\n");
break;
case 4: printf("윷\n");
break;
default: printf("모\n");
}
} while (--n);
return 0;
}
//
//#include <stdio.h>
//int f(int a, int b, int c, int d)
//{
// return a + b + c + d;
//}
//main()
//{
// int a, b, c, d, s;
// printf("4개의 윷 상태를 입력하세요.\n");
// printf("(0:뒤집어지지 않은 상태, 1:뒤집어진 상태):");
// scanf_s("%d %d %d %d", &a, &b, &c, &d);
// switch (f(a, b, c, d))
// {
// case 1: printf("도\n");
// break;
// case 2: printf("개\n");
// break;
// case 3: printf("걸\n");
// break;
// case 4: printf("윷\n");
// break;
// default: printf("모\n");
// }
//}
<file_sep>/Project입학성적산출/입학성적산출-turboc.cpp
#include<stdio.h>
#include<conio.h>
#include<stdlib.h>
#include<ctype.h>
#include<windows.h>
#ifdef IN
#undef IN
#endif
void gotoxy(int x, int y)
{
COORD Pos = { x - 1,y - 1 };
SetConsoleCursorPosition(GetStdHandle(STD_OUTPUT_HANDLE), Pos);
}
static int count = -1;
typedef struct pyo
{
int bunho; //수험번호
int p_jumsu; //필기점수
int c_jumsu; //체력점수
int m_jumsu; //면접점수
int tot; //총점
char* panjung; //판정
}IN[5];
void display()
{
gotoxy(5, 5); printf("수험번호 :\n");
gotoxy(5, 6); printf("필기시험점수 :\n");
gotoxy(5, 7); printf("체력시험점수 :\n");
gotoxy(5, 8); printf("면접시험점수 :\n");
gotoxy(40, 20); printf("[입력:1][출력:2]\n");
}
void input()
{
count++;
do
{
gotoxy(22, 5); clreol();
scanf("%d", &IN[count].bunho);//정수 한자리 입력
} while (IN[count].bunho > 9);
do
{
gotoxy(22, 6); clreol();
scanf("%d", &IN[count].p_jumsu);
} while (IN[count].p_jumsu < 0 || IN[count].p_jumsu>100);
do
{
gotoxy(22, 6); clreol();
scanf("%d", &IN[count].p_jumsu);
} while (IN[count].p_jumsu < 0 || IN[count].p_jumsu>100);
do
{
gotoxy(22, 7); clreol();
scanf("%d", &IN[count].c_jumsu);
} while (IN[count].c_jumsu < 0 || IN[count].c_jumsu>100);
do
{
gotoxy(22, 8); clreol();
scanf("%d", &IN[count].m_jumsu);
} while (IN[count].m_jumsu < 0 || IN[count].m_jumsu>100);
gotoxy(21, 5); clreol();
gotoxy(21, 6); clreol();
gotoxy(21, 7); clreol();
gotoxy(21, 8); clreol();
gotoxy(58, 20); clreol();
}
void pro()
{
IN[count].tot = IN[count].p_jumsu + IN[count].c_jumsu + IN[count].m_jumsu;
if (IN[count].tot >= 200)
IN[count].panjung = "Pass";
else
IN[count].panjung = "Fail";
IN[count].panjung = (IN[count].p_jumsu > 60 && IN[count].c_jumsu > 60 && IN[count].m_jumsu > 60) ? "Pass" : "Fail";
}
void output()
{
struct pyo temp;
int i, j;
clrscr();
gotoxy(5, 1); printf("비번호 : 1234\n");
gotoxy(5, 3); printf(" 번호 필기 체력 면접 총점 판정 \n");
gotoxy(5, 17); printf("입력자료\n");
for (i = 0; i <= count; i++)
{
gotoxy(5, 18 + i);
printf("%3d %3d %3d %3d\n", IN[i].bunho, IN[i].p_jumsu, IN[i].c_jumsu, IN[i].m_jumsu);
}
for (i = 0; i < count; i++)
{
for (j = i; j <= count; j++)
{
if (IN[i].bunho > IN[j].bunho)
{
temp = IN[i];
IN[i] = IN[j];
IN[j] = temp;
}
}
}
for (i = 0; i <= count; i++)
{
gotoxy(5, 5 + i);
printf("%5d %7d %7d %7d %7d %10s\n", IN[i].bunho, IN[i].p_jumsu, IN[i].c_jumsu, IN[i].m_jumsu, IN[i].tot, IN[i].panjung);
}
}
int main(void)
{
char ch;
clrscr();
display();
do
{
gotoxy(59, 20);
ch = getchar();
if (ch == '1')
{
input();
pro();
}
else if (ch == '2')
{
output();
exit(1);
}
if (cout >= 4)
{
gotoxy(45, 22);
printf("최대 레코드 수는 5개입니다.");
gotoxy(58, 20);
ch = getche();
output();
exit(1);
}
} while (1);
}
<file_sep>/lecture0007/소스.c
#include<stdio.h>
int main()
{
FILE* fp, * fp1;
errno_t err, err1;
char ch;
err = fopen_s(&fp, "test.txt", "r");
err1 = fopen_s(&fp1, "result.txt", "w");
if (err) return 0;
while ((ch = getc(fp)) != EOF)
fputc(ch, fp1);
err = _fcloseall();
if (err) return 0;
}<file_sep>/중간테스트_추의합/중간테스트_추의합.c
#include <stdio.h>
int main() {
int gram, totalWeight, count;
int i, j, k;
do {
count = 0, gram = 0;
printf("원하는 그램을 입력하세요 :");
scanf_s("%d", &gram);
if ((gram < 10 || gram>100) && gram != 0) {
printf("INPUT ERROR! (DATA RANGE 10 - 100) \n\n");
continue;
}
if (gram == 0)
break;
printf(" 2g 3g 5g");
for (i = 1; i <= 10; i++)
for (j = 1; j <= 10; j++)
for (k = 1; k <= 10; k++) {
totalWeight = (i * 2) + (j * 3) + (k * 5);
if (totalWeight == gram) {
printf("\n");
printf("%4d %4d %4d", i, j, k);
count++;
}
}
printf(" 경우의 수 = %d\n\n", count);
} while (gram != 0);
return 0;
}<file_sep>/lecture203클래스성적표관리/lecture0203클래스성적표관리생성자소멸자.cpp
#include<iostream>
#include<string>
#include<iomanip>
using namespace std;
class Name {
string name;
public:
Name() {
cout << "이름을 입력할 준비가 되었습니다." << endl;
}
~Name() {
cout << "이름이 소멸 되었습니다." << endl;
}
void setname(string name) {
this->name = name;
}
string getname() {
return name;
}
};
class Subject {
int subsco;
public:
void setsubsco(int subsco) {
this->subsco = subsco;
}
int getsubsco() {
return subsco;
}
};
class Score {
Name na;
Subject mat;
Subject eng;
Subject kor;
public:
void setmatsco(int sco) {
mat.setsubsco(sco);
}
int getmatsco() {
return mat.getsubsco();
}
void setengsco(int sco) {
eng.setsubsco(sco);
}
int getengsco() {
return eng.getsubsco();
}
void setkorsco(int sco) {
kor.setsubsco(sco);
}
int getkorsco() {
return kor.getsubsco();
}
void setname(string name) {
na.setname(name);
}
string getname() {
return na.getname();
}
int getsum() {
return getmatsco() + getengsco() + getkorsco();
}
float getavg() {
return this->getsum() / 3.f;
}
};
void main() {
Score* aa;
int num;
cout << "몇 명 입력하시겠습니까?";
cin >> num;
aa = new Score[num];
for (int i = 0; i < num; i++) { //성적 입력 num만큼 입력
string sn;
cout << i + 1 << "번째 이름을 입력하시오 : ";
cin >> sn;
aa[i].setname(sn);
int sco;
cout << "국어성적을 입력하시오 : ";
cin >> sco;
aa[i].setkorsco(sco);
cout << "영어성적을 입력하시오 : ";
cin >> sco;
aa[i].setengsco(sco);
cout << "수학성적을 입력하시오 : ";
cin >> sco;
aa[i].setmatsco(sco);
}
cout << "====================================================================\n";
cout << "= 이름 == 국어 == 영어 == 수학 == 총점 == 평균 =\n";
cout << "====================================================================\n";
for (int i = 0; i < num; i++) { //성적 출력 num만큼 출력
cout << " " << setw(3) << aa[i].getname();
for (int j = 0; j < 3; j++) {
if (j == 0) {
cout << setw(11) << aa[i].getkorsco();
}
if (j == 1) {
cout << setw(11) << aa[i].getengsco();
}
if (j == 2) {
cout << setw(12) << aa[i].getmatsco();
}
}
cout << setw(11) << aa[i].getsum();
cout << setw(11) << aa[i].getavg() << " " << "\n";
}
delete[] aa; //메모리 할당 해제
}
<file_sep>/시험코드따라기가0002반복패턴/시험코드따라기가0002반복패턴.cpp
#include<stdio.h>
int main()
{
int num = 1, cnt = 0;
while (num <= 2016)
{
num = num * 10 - 8;
cnt++;
}
printf("%d", cnt);
}
<file_sep>/퀴즈 약수/퀴즈약수.c
#include<stdio.h>
int cnt;
int solve(int n)
{
int ans = 0;
printf("약수는 ");
for (int i = 1; i <= n; i++)
{
if (n % i == 0)
{
printf("%d ", i);
ans += i;
cnt++;
}
}
return ans;
}
int main()
{
int n;
scanf_s("%d", &n);
printf("약수의 합=%d, 약수의 개수=%d\n", solve(n), cnt);
}<file_sep>/lecture0016-1/lecture0016-1.cpp
/*
* -------------------------------------------
* 음수 데이터 처리가 가능한 순위 매김 *
* -------------------------------------------
*/
#include <stdio.h>
#define Num 10
#define Max 36
#define Min -20
#define Bias 1-(Min) /* 최소값을 배열 요소의 1에 위치시킨다. */
int main(void)
{
static int a[] = { -3,2,3,-1,-2,-6,2,-1,1,5 };
int i, rank[Max + Bias + 1];
for (i = Min + Bias; i <= Max + Bias; i++)
rank[i] = 0;
for (i = 0; i < Num; i++)
rank[a[i] + Bias]++;
rank[0] = 1;
for (i = Min + Bias; i <= Max + Bias; i++)
rank[i] = rank[i] + rank[i - 1];
printf(" 점수 순위\n");
for (i = 0; i < Num; i++) {
printf("%6d%6d\n", a[i], rank[a[i] + Bias - 1]);
}
return 0;
}
<file_sep>/lecture0103DP벽돌타일/lecture0103DP벽돌타일.c
//2*n 크기의 직사각형을 1*2, 2*1 벽돌로 채우는 방법의 수를 구하는 프로그램을 작성하자.
//입력 : 1<=n<=1000
//출력 : 2이면 2가 출력
#include<stdio.h>
int d[1001];
int dp(int x)
{
if (x == 1) return 1;
if (x == 2) return 2;
if (d[x] !=0) return d[x];
return d[x] = (dp(x - 1) + dp(x - 2));
}
int main()
{
int n;
printf("2*n : 구하고자 하는 n의 개수는? ");
scanf_s("%d", &n);
//scanf_s("%d", &n, sizeof(n));
printf("n= %d 인 경우 방법의 수는 %d이다.", n, dp(n));
}<file_sep>/lecture0011/lecture0011-1.c
//연결리스트의 삽입, 삭제
#include<stdio.h>
#include<stdlib.h>
typedef struct _NODE
{
char str;
struct _NODE* Next;
}NODE;
NODE* head = '\0', * end = '\0', * temp = '\0';
NODE* temp1= '\0', * temp2 = '\0', * temp3 = '\0', * temp4 = '\0';
void Initialize(void);
void InsertNode(NODE*);
void DeleteNode(NODE*);
int main()
{
NODE* ptr;
int i = 0;
Initialize();
//데이터 출력
ptr = head->Next;
for (i = 0; i < 4; i++)
{
printf("%2c", ptr->str);
ptr = ptr->Next;
}
printf("\n");
temp = (NODE*)malloc(sizeof(NODE));
temp->str = 'C';
InsertNode(temp);
ptr = head->Next;
for (i = 0; i < 5; i++)
{
printf("%2c", ptr->str);
ptr = ptr->Next;
}
DeleteNode(temp);
ptr = head->Next;
for (i = 0; i < 4; i++)
{
printf("%2c", ptr->str);
ptr = ptr->Next;
}
return 0;
}
void Initialize(void)
{
NODE* ptr;
head = (NODE*)malloc(sizeof(NODE));
end = (NODE*)malloc(sizeof(NODE));
temp1 = (NODE*)malloc(sizeof(NODE));
temp1->str = 'A';
head->Next = temp1;
temp1->Next = end;
end->Next = end;
ptr = temp1;
temp2 = (NODE*)malloc(sizeof(NODE));
temp2->str = 'B';
ptr->Next = temp2; //temp1->Next=temp2;
temp2->Next = end;
ptr = temp2;
temp3 = (NODE*)malloc(sizeof(NODE));
temp3->str = 'D';
ptr->Next = temp3;
temp3->Next = end;
ptr = temp3;
temp4 = (NODE*)malloc(sizeof(NODE));
temp4->str = 'E';
ptr->Next = temp4;
temp4->Next = end;
ptr = temp4;
}
void InsertNode(NODE* newNode)
{
NODE* indexptr;
for (indexptr = head; indexptr != end; indexptr = indexptr->Next)
{
if ((indexptr->Next->str) > (newNode->str))
break;
}
newNode->Next = indexptr->Next;
indexptr->Next = newNode;
}
void DeleteNode(NODE* delptr)
{
NODE* indexptr;
NODE* deleteptr='\0';
for (indexptr = head; indexptr != end; indexptr->Next)
{
if (indexptr->Next->str == delptr->str)
{
deleteptr = indexptr->Next;
break;
}
}
indexptr->Next = indexptr->Next->Next;
free(deleteptr);
}<file_sep>/사전테스트_배열_인덱스/사전테스트_배열_인덱스.c
#include <stdio.h>
#define M 10
#define N 5
int main(void) {
static int a[] = { 2,4,5,7,8,10,15,20,30,40 },
b[] = { 6,11,25,33,35 },
c[M + N];
int i, j, p;
i = j = p = 0;
while (i < M && j < N) {
if (a[i] <= b[j])
c[p++] = a[i++];
else
c[p++] = b[j++];
}
while (i < M)
c[p++] = a[i++];
while (j < N)
c[p++] = b[j++];
for (i = 0; i < M + N; i++)
printf("%d ", c[i]);
printf("\n");
return 0;
}<file_sep>/lecture0000기초2/lecture0000기초2.cpp
//Ctrl + F7, Ctrl + B, Ctrl + F5
#include<stdio.h>
int main()
{
float a, b;
//scanf_s("%f %e", &a, &b, sizeof(a), sizeof(b));
scanf_s("%f %e", &a, &b);
printf("a=%f, b=%e\n", a, b);
return 0;
}
<file_sep>/lecture0026queue/LinkedQueue.h
//#pragma once
//
// LinkedQueue.h
// SortingAlgorithm
#include<stdio.h>
#include <stdlib.h>
#ifndef LinkedQueue_h
#define LinkedQueue_h
//큐 노드
typedef struct QueueNode {
int item;
struct QueueNode* link;
} QueueNode;
//큐 타입
typedef struct {
QueueNode* front, * rear;
} QueueType;
//에러 처리 함수
void puts(char* message)
{
fprintf(stderr, "%s\n", message);
exit(1);
}
//초기화 함수
void initQueue(QueueType* q)
{
q->front = q->rear = 0;
}
//공백상태 검사
int isEmpty(QueueType* q)
{
return (q->front == NULL);
}
//포화상태 검사
int isFull(QueueType* q)
{
return 0;
}
//삽입함수
void enqueue(QueueType* q, int item)
{
QueueNode* temp = (QueueNode*)malloc(sizeof(QueueNode));
if (temp == NULL)
{
puts("memory allocation is failed\n");
}
else
{
temp->item = item;
temp->link = NULL;
if (isEmpty(q))
{
q->front = temp;
q->rear = temp;
}
else
{
q->rear->link = temp;
q->rear = temp;
}
}
}
//삭제함수
int dequeue(QueueType* q)
{
QueueNode* temp = q->front;
int item;
if (isEmpty(q))
{
puts("queue is empty\n");
}
else
{
item = temp->item;
q->front = q->front->link;
if (q->front == NULL)
q->rear = NULL;
free(temp);
return item;
}
}
//peek 함수
int peek(QueueType* q)
{
if (isEmpty(q))
puts("queue is empty\n");
else
{
int item = q->front->item;
return item;
}
}
#endif
<file_sep>/lecture0100/lecture0100.c
#include <stdio.h>
#define N 6
int main(int argc, char* argv[])
{
static int a[] = { 80,41,35,90,40,20 };
int min, s, t, i, j;
for (i = 0; i < N - 1; i++) {
min = a[i];
s = i;
for (j = i + 1; j < N; j++) {
if (a[j] < min) {
min = a[j];
s = j;
}
}
t = a[i]; a[i] = a[s]; a[s] = t;
}
for (i = 0; i < N; i++)
printf("%d ", a[i]);
printf("\n");
return 0;
}<file_sep>/lecture0026queue/lecture0026queue.cpp
#include <stdio.h>
#include <stdlib.h>
typedef int element;
typedef struct QueueNode
{
element item;
QueueNode* link;
}QueueNode;
typedef struct
{
QueueNode* front, * rear;
}QueueType;
void Init(QueueType* q)
{
q->front = q->rear = NULL;
}
int is_empty(QueueType* q)
{
return (q->front == NULL);
}
int is_full(QueueType* q)
{
return 0;
}
void Enqueue(QueueType* q, element item)
{
QueueNode* temp = (QueueNode*)malloc(sizeof(QueueNode));
temp->item = item;
temp->link = NULL;
if (is_empty(q))
{
q->front = temp;
q->rear = temp;
}
else
{
q->rear->link = temp;
q->rear = temp;
}
}
element Dequeue(QueueType* q)
{
QueueNode* temp = q->front;
element item;
if (is_empty(q))
{
printf("큐가 비어있습니다\n");
return -1;
}
item = temp->item;
q->front = q->front->link;
if (q->front == NULL)
q->rear = NULL;
free(temp);
return item;
}
int main()
{
QueueType q;
int i;
Init(&q);
for (i = 0; i < 5; i++)
{
Enqueue(&q, i);
printf("큐 삽입 :%d \n", i);
}
for (i = 0; i < 6; i++)
printf("큐 삭제 :%d\n", Dequeue(&q));
return 0;
}<file_sep>/퀴즈 삼각형만들기/삼각형만들기재귀.c
#include<stdio.h>
int cnt, chk[21][21][21];
void solve(int n, int a, int b, int c)
{
if (a + b + c == n)
{
if (a <= b && b <= c && a + b > c && chk[a][b][c] == 0)
{
printf("(%d %d %d)", a, b, c);
cnt++;
chk[a][b][c] = 1;
}
return;
}
solve(n, a, b, c + 1);
solve(n, a, b + 1, c);
solve(n, a + 1, b, c);
}
int main()
{
int n;
scanf_s("%d", &n);
solve(n, 1, 1, 1);
printf("%d°³", cnt);
}<file_sep>/최대공약수구하기/최대공약수유클리드호제법.c
//큰수가 먼저
#include<stdio.h>
int gcd(int a, int b);
int main()
{
int m, n;
scanf_s("%d %d", &m, &n);
//큰수가 앞으로 나오게 하기
printf("%d", gcd(m, n));
return 0;
}
int gcd(int a, int b) ///////////오류발생 체크해보기
{
//if(!b) return a;
if(b==0) return a;
gcd(b, a % b);
}
//#include<stdio.h>
//
//int main()
//{
// int m, n, r, g;
// scanf_s("%d %d", &m, &n);
// g = m * n;
// for (; r = m % n; n = r, m = n);
// printf("최대공약수: %d, 최소공배수 %d", n, g / n);
//}<file_sep>/lecture0012/소스.c
//이분 탐색
#include<stdio.h>
#include<stdlib.h>
int S[100], n, k;
int find(int s, int e)
{
while (s <= e)
{
int m = (int)(s + e) / 2;//2
if (S[m] == k)
return m;
if (S[m] > k)
e = m - 1;
else s = m + 1;
}
return -1;
}
int main()
{
printf("Binary Search, n, k : 숫자 개수, 찾을 숫자");
scanf_s("%d %d", &n, &k); //n개의 배열, 찾을 수 k
for (int i = 0; i < n; i++)
scanf_s("%d", &S[i]);
printf("%d번째 위치에서 발견\n", find(0, n - 1)+1);
return 0;
}
|
bd63c0569dd910bc0f2fe0c27524f5c7923265d9
|
[
"C",
"C++"
] | 67 |
C
|
dgswAlgorithm/KimDongGyun
|
772bf553234eb695964397c881e6a41772f8dc31
|
2e6ab4ced59060639d9c036b66a621bd1425241e
|
refs/heads/master
|
<repo_name>pch8388/study-object<file_sep>/src/main/java/me/study/cooperationPhone/policy/FixedFeeCondition.java
package me.study.cooperationPhone.policy;
import me.study.cooperationPhone.Call;
import me.study.cooperationPhone.DateTimeInterval;
import java.util.List;
import static java.util.Collections.singletonList;
public class FixedFeeCondition implements FeeCondition {
@Override
public List<DateTimeInterval> findTimeIntervals(Call call) {
return singletonList(call.getInterval());
}
}
<file_sep>/src/test/java/me/study/cooperationPhone/PhoneTest.java
package me.study.cooperationPhone;
import me.study.cooperationPhone.policy.BasicRatePolicy;
import me.study.cooperationPhone.policy.FeePerDuration;
import me.study.cooperationPhone.policy.FeeRule;
import me.study.cooperationPhone.policy.FixedFeeCondition;
import me.study.movie.domain.Money;
import org.junit.jupiter.api.Test;
import java.time.Duration;
import java.time.LocalDateTime;
import static org.assertj.core.api.Assertions.assertThat;
class PhoneTest {
@Test
void calculateFee() {
Phone phone = new Phone(
new BasicRatePolicy(
new FeeRule(
new FixedFeeCondition(), new FeePerDuration(Money.wons(10), Duration.ofMinutes(10))
)));
assertThat(phone).isNotNull();
phone.call(new Call(
LocalDateTime.of(2020, 1, 10, 10, 10),
LocalDateTime.of(2020, 1, 10, 10, 20)));
final Money fee = phone.calculateFee();
assertThat(fee.isGreaterThanOrEqual(Money.wons(10))).isTrue();
}
}<file_sep>/src/main/java/me/study/cooperationPhone/policy/RatePolicy.java
package me.study.cooperationPhone.policy;
import me.study.cooperationPhone.Phone;
import me.study.movie.domain.Money;
public interface RatePolicy {
Money calculateFee(Phone phone);
}
<file_sep>/src/main/java/me/study/movie/domain/Customer.java
package me.study.movie.domain;
public class Customer {
}
<file_sep>/src/main/java/me/study/cooperationPhone/policy/FeeCondition.java
package me.study.cooperationPhone.policy;
import me.study.cooperationPhone.Call;
import me.study.cooperationPhone.DateTimeInterval;
import java.util.List;
// 적용조건
public interface FeeCondition {
List<DateTimeInterval> findTimeIntervals(Call call);
}
<file_sep>/src/main/java/me/study/cooperationPhone/policy/DayOfWeekFeeCondition.java
package me.study.cooperationPhone.policy;
import me.study.cooperationPhone.Call;
import me.study.cooperationPhone.DateTimeInterval;
import java.time.DayOfWeek;
import java.util.Arrays;
import java.util.List;
import static java.util.stream.Collectors.toList;
public class DayOfWeekFeeCondition implements FeeCondition {
private List<DayOfWeek> dayOfWeeks;
public DayOfWeekFeeCondition(DayOfWeek... dayOfWeeks) {
this.dayOfWeeks = Arrays.asList(dayOfWeeks);
}
@Override
public List<DateTimeInterval> findTimeIntervals(Call call) {
return call.getInterval().splitByDay()
.stream()
.filter(each -> dayOfWeeks.contains(each.getFrom().getDayOfWeek()))
.collect(toList());
}
}
<file_sep>/settings.gradle
rootProject.name = 'study-object'
|
067aaf9b9fa2c154a0689478070ebd365bc61bdd
|
[
"Java",
"Gradle"
] | 7 |
Java
|
pch8388/study-object
|
c9e81821a0a59cde5abaa09af33df57699930b36
|
964e5b33c137d9b3adde4bf960de7e5e312d976b
|
refs/heads/master
|
<repo_name>ShethJay/react-template<file_sep>/src/store/rootReducer.js
import { combineReducers } from 'redux';
import { reducer as formReducer } from 'redux-form';
import dashboardReducer from '../modules/redux/reducer';
const appReducer = combineReducers({
form: formReducer,
dashboard: dashboardReducer,
});
const rootReducer = (state, action) => {
// Flush redux store at logout
if (action.type === 'LOGOUT_SUCCESS') {
return appReducer(undefined, action);
}
return appReducer(state, action);
};
export default rootReducer;
<file_sep>/src/App.js
import React from 'react';
import { Provider } from 'react-redux';
import { ToastContainer } from 'react-toastify';
import './assets/styles/app.scss';
import ExtendedBrowserRouter from './ExtendedBrowserRouter';
import Routes from './Routes';
import reduxStore from './store';
function App() {
return (
<Provider store={reduxStore}>
<ExtendedBrowserRouter>
<div className="App">
<ToastContainer id="forToast" />
<Routes />
</div>
</ExtendedBrowserRouter>
</Provider>
);
}
export default App;
<file_sep>/src/modules/redux/reducer.js
import * as actionTypes from './actionTypes';
import RequestStates from '../../utils/request-states';
const INITIAL_STATE = {
text: '',
usersLoading: RequestStates.init,
users: [],
usersPageNo: 1,
usersPageSize: 5,
usersError: null,
};
export default (state = INITIAL_STATE, action) => {
switch (action.type) {
case actionTypes.SET_TEXT:
return {
...state,
text: action.payload,
};
case actionTypes.GET_USERS_LOADING:
return {
...state,
usersLoading: RequestStates.loading,
usersError: null,
};
case actionTypes.GET_USERS_SUCCESS: {
return {
...state,
usersLoading: RequestStates.success,
users: action.payload.data.data,
usersPageNo: action.payload.data.page,
usersPageSize: action.payload.data.per_page,
usersError: null,
};
}
case actionTypes.GET_USERS_ERROR:
return {
...state,
usersLoading: RequestStates.error,
usersError: action.payload,
};
default:
return state;
}
};
<file_sep>/src/utils/api.js
import axios from 'axios';
import { BASE_URL } from '../config';
import { showNotification } from './Notifications';
import { history } from '../ExtendedBrowserRouter';
function makeHeaders() {
let headerObj = {};
if (localStorage.getItem('access_token')) {
const token = `Bearer ${localStorage.getItem('access_token')}`;
headerObj = {
Authorization: token,
};
}
return headerObj;
}
const axiosApi = axios.create({
withCredentials: false, // use true for login apis
baseURL: `${BASE_URL}/`,
});
axiosApi.interceptors.request.use((request) => {
request.headers = makeHeaders();
return request;
});
axiosApi.interceptors.response.use((response) => response, (err) => {
if (err.response && err.response.status === 401) {
localStorage.clear();
showNotification(err.response.data && err.response.data.message, 'error', 5000);
history.push('/login');
// if you don't return here, then an error will be thrown and you will see a loader infinitely
return true;
}
if (err.response && err.response.status === 403) {
showNotification(err.response.data && err.response.data.message, 'error', 5000);
}
if (err.response && err.response.status === 500) {
showNotification('Unknown server error', 'error', 5000);
}
return Promise.reject(err);
});
export default axiosApi;
<file_sep>/src/ExtendedBrowserRouter.js
import { BrowserRouter } from 'react-router-dom';
import { createBrowserHistory } from 'history';
export const history = createBrowserHistory();
export default class ExtendedBrowserRouter extends BrowserRouter {
constructor() {
super();
this.returnHistory = () => history;
this.returnHistory();
}
}
<file_sep>/README.md
# React Template
## For Setup
```
1 create-react-app app-name
2 npm run eject
```
## For SCSS
```
1 npm i node-sass
2 Crate `app.scss` file into src/assets/styles
3 import `app.scss` to `App.js`
```
## For Eslint
```
1 For Eslint copy `.eslintrc` file to root
2 npm i eslint eslint-config-airbnb
3 Add `/* eslint-disable */` at top of `serviceWorker.js`
```
## For Prop Types
```
1 npm i prop-types
USE :
ComponentName.propTypes = {
};
ComponentName.defaultProps = {
};
```
## For Routing
```
1 npm i react-router-dom react-router history
2 Create `ExtendedBrowserRouter.js` and add to `App.js`
3 Create `Routes.js` file and add to `App.js` inside `ExtendedBrowserRouter` component
```
## For Notification
```
1 npm i react-toastify
2 Add `ToastContainer` component to `App.js`
3 Create `Notifications.js` into utils folder
```
## For Axios
```
1 npm i axios
2 Create `index.js` into config folder
3 Create `api.js` into utils folder
```
## For Redux, Thunk and Promise middleware
```
1 npm i redux react-redux redux-promise-middleware redux-thunk
2 Add `Provider` component into `App.js`
3 Create `reduxStore and rootReducer` into store folder
4 Add reduxStore to `Provider` in `App.js`
```
## For Redux Form
```
1 npm i redux-form
2 Add redux form to `rootReducer.js`
```
## For Testing
```
npm i -D jest enzyme jest-enzyme enzyme-adapter-react-16
```
|
7522bb39d15540b1b0b345dbea249df23ec354b8
|
[
"JavaScript",
"Markdown"
] | 6 |
JavaScript
|
ShethJay/react-template
|
55cf31931d3bac052cdc0f1242ab6726a9402aa4
|
fdb1a40452b3e6345e75fb5159f48baee66b9f86
|
refs/heads/master
|
<file_sep># Based on guide by <NAME>
import json
import random
#------------------------------Definitions--------------------------------
warzone_maps = ["Verdansk", "Rebirth Island", "Contrato", "Rebirth", "Alcatraz", "Contratos", "Contract"]
locations_verdansk = ["Dam", "Military Base", "Quarry", "Airport", "TV Station", "Storage Town", "Superstore", "Stadium", "Lumber", "Farmland", "Boneyard",
"Train Station", "Hospital", "Downtown", "Promenade East", "Promenade West", "Hills", "Park", "Port", "Prison", "Aborda el tren"]
locations_rebirth = ["Bioweapon Labs", "Decon Zone", "Chemical Eng.", "Harbor", "Prison Block", "Shore", "Construction Site", "Headquarters",
"Nova 6 Factory", "Living Quarters", "Security Area"]
contract_list = ["Bounty", "Scavenger", "Supply Run", "Recon", "Most Wanted"]
#------------------------------Lambda--------------------------------
def lambda_handler(event, context):
if event['session']['new']:
on_start()
if event['request']['type'] == "LaunchRequest":
return on_launch(event)
elif event['request']['type'] == "IntentRequest":
return intent_scheme(event)
elif event['request']['type'] == "SessionEndedRequest":
return on_end()
#------------------------------Request Handler Functions--------------------------------
def on_start():
print("Session Started.")
def on_launch(event):
onlunch_MSG = "Bienvenido a Estrategias Warzone."
reprompt_MSG = "¿Quieres saber dónde iniciar o qué contrato tomar?"
card_TEXT = "Elige una modalidad, ya sea mapa o contrato."
card_TITLE = "Elegir modalidad"
return output_json_builder_with_reprompt_and_card(onlunch_MSG, card_TEXT, card_TITLE, reprompt_MSG, False)
def on_end():
print("Session Ended.")
#-----------------------------Intent Request-------------------------------
def intent_scheme(event):
intent_name = event['request']['intent']['name']
if intent_name == "startMap":
return start_map(event)
elif intent_name in ["AMAZON.NoIntent", "AMAZON.StopIntent", "AMAZON.CancelIntent"]:
return stop_the_skill(event)
elif intent_name == "AMAZON.HelpIntent":
return assistance(event)
elif intent_name == "AMAZON.FallbackIntent":
return fallback_call(event)
#---------------------------Intent Handler-------------------------------
def start_map(event):
name=event['request']['intent']['slots']['locWarzone']['value']
warzone_maps_lower=[w.lower() for w in warzone_maps]
if name.lower() in warzone_maps_lower:
reprompt_MSG = "¿Quieres saber dónde iniciar o qué contrato tomar?"
card_TEXT = "Elegiste " + name.lower()
card_TITLE = "Elegiste " + name.lower()
if name.lower() == "verdansk":
rNum = random.randint(0, 20)
mapResponse = locations_verdansk[rNum]
elif name.lower() == "rebirth island" or name.lower() == "rebirth" or name.lower() == "alcatraz":
rNum = random.randint(0, 10)
mapResponse = locations_rebirth[rNum]
elif name.lower() == "contrato" or name.lower() == "contratos" or name.lower() == "contract":
rNum = random.randint(0, 4)
mapResponse = contract_list[rNum]
return output_json_builder_with_reprompt_and_card(mapResponse, card_TEXT, card_TITLE, reprompt_MSG, True)
else:
wrongname_MSG = "Lo siento, no entendí. Por favor, vuelve a intentar."
reprompt_MSG = "¿Quieres saber dónde iniciar o qué contrato tomar?"
card_TEXT = "Elección no válida."
card_TITLE = "Elección no válida."
return output_json_builder_with_reprompt_and_card(wrongname_MSG, card_TEXT, card_TITLE, reprompt_MSG, False)
def stop_the_skill(event):
stop_MSG = "Gracias por usar Estrategias Warzone."
reprompt_MSG = ""
card_TEXT = "Gracias."
card_TITLE = "Gracias."
return output_json_builder_with_reprompt_and_card(stop_MSG, card_TEXT, card_TITLE, reprompt_MSG, True)
def assistance(event):
assistance_MSG = "Te puedo ayudar a elegir una ubicación dónde empezar o un contrato si tú y tu equipo están indecisos. Puedes elegir entre los mapas Verdansk y Rebirth Island, o bien preguntar por un contrato."
reprompt_MSG = "¿Quieres saber dónde iniciar o qué contrato tomar?"
card_TEXT = "Texto de referencia"
card_TITLE = "Ayuda"
return output_json_builder_with_reprompt_and_card(assistance_MSG, card_TEXT, card_TITLE, reprompt_MSG, False)
def fallback_call(event):
fallback_MSG = "Lo siento, no entendí. Por favor, vuelve a intentar."
reprompt_MSG = "¿Quieres saber dónde iniciar o qué contrato tomar?"
card_TEXT = "Elección no válida."
card_TITLE = "Elección no válida."
return output_json_builder_with_reprompt_and_card(fallback_MSG, card_TEXT, card_TITLE, reprompt_MSG, False)
#------------------------------Responses--------------------------------
def plain_text_builder(text_body):
text_dict = {}
text_dict['type'] = 'PlainText'
text_dict['text'] = text_body
return text_dict
def reprompt_builder(repr_text):
reprompt_dict = {}
reprompt_dict['outputSpeech'] = plain_text_builder(repr_text)
return reprompt_dict
def card_builder(c_text, c_title):
card_dict = {}
card_dict['type'] = "Simple"
card_dict['title'] = c_title
card_dict['content'] = c_text
return card_dict
def response_field_builder_with_reprompt_and_card(outputSpeach_text, card_text, card_title, reprompt_text, value):
speech_dict = {}
speech_dict['outputSpeech'] = plain_text_builder(outputSpeach_text)
speech_dict['card'] = card_builder(card_text, card_title)
speech_dict['reprompt'] = reprompt_builder(reprompt_text)
speech_dict['shouldEndSession'] = value
return speech_dict
def output_json_builder_with_reprompt_and_card(outputSpeach_text, card_text, card_title, reprompt_text, value):
response_dict = {}
response_dict['version'] = '1.0'
response_dict['response'] = response_field_builder_with_reprompt_and_card(outputSpeach_text, card_text, card_title, reprompt_text, value)
return response_dict
|
33f7b91700a0c623fcfdfdf6b321cfae0a6914ff
|
[
"Python"
] | 1 |
Python
|
CarlosCC07/AlexaLambdas
|
98ff2a1277668f42ca181b0dd6852b354990f14c
|
035186cd6fa0663821cd860ed6f118c4563c2878
|
refs/heads/master
|
<file_sep>/**
* 指定したscreenNameの最近20件のつぶやきを取得する
*/
function getTweets(screenName, callback) {
$.getJSON('https://api.twitter.com/1/statuses/user_timeline.json?screen_name=' + screenName + '&callback=?', function (json) {
callback(json.reverse());
});
}
/**
* 指定したscreenNameのフォロワーネットワーク取得する
*/
function getNetwork(screenName, callback) {
var nodes = {}, links = [];
$.getJSON('https://api.twitter.com/1/users/show.json?screen_name=' + screenName + '&callback=?').then(function (json) {
nodes[json.screen_name] = json
return getFollowers(screenName);
}).then(function (screenName, json) {
var deferredObjects = [];
for (var i = 0; i < json.slice(0, 4).length; i++) {
var node = json[i];
nodes[node.screen_name] = node;
links.push({source: nodes[screenName], target: node});
if (!node.protected) {
deferredObjects.push(getFollowers(node.screen_name));
}
}
return $.when.apply(null, deferredObjects);
}).done(function () {
for (var i = 0; i < arguments.length; i++) {
for (var j = 0; j < arguments[i][1].slice(0, 4).length; j++) {
var node = arguments[i][1][j];
nodes[node.screen_name] = nodes[node.screen_name] || node;
links.push({source: nodes[arguments[i][0]], target: nodes[node.screen_name]})
}
}
callback($.map(nodes, function(v, k) {return v;}), links);
})
}
/**
* 指定したscreenNameのフォロワー情報を取得する(deferred)
*/
function getFollowers(screenName) {
var d = new $.Deferred;
$.getJSON('https://api.twitter.com/1/statuses/followers.json?screen_name=' + screenName + '&callback=?', function(json) {
d.resolve(screenName, json);
});
return d.promise();
}
|
0b4e4368bdaca8cb635ef437a265e8176bcd4044
|
[
"JavaScript"
] | 1 |
JavaScript
|
kuzira55/html5etc-6
|
ce11d12628d0289ac3ff6dce3d6e0815cf996794
|
294ecf0478f20d66ada3ad822a1e8bcb5aae89da
|
refs/heads/master
|
<file_sep>export class Anuncio {
id;
titulo;
transaccion;
descripcion;
precio;
num_wc;
num_estacionamiento;
num_dormitorio;
constructor(object) {
this.id = object.id ? object.id : null;
this.titulo = object.titulo;
this.transaccion = object.transaccion;
this.descripcion = object.descripcion;
this.precio = object.precio;
this.num_wc = object.num_wc;
this.num_estacionamiento = object.num_estacionamiento;
this.num_dormitorio = object.num_dormitorio;
}
}
<file_sep>"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Mascota = void 0;
var Anuncio = /** @class */ (function () {
function Anuncio(object) {
this.id = object.id ? object.id : null;
this.titulo = object.titulo;
this.transaccion = "Venta";
this.descripcion = object.descripcion;
this.precio = object.precio;
}
return Anuncio;
}());
var Mascota = /** @class */ (function (_super) {
__extends(Mascota, _super);
function Mascota(object) {
var _this = _super.call(this, object) || this;
_this.animal = object.animal;
_this.raza = object.raza;
_this.fecha_nacimiento = object.fecha_nacimiento;
_this.vacuna = object.vacuna;
return _this;
}
return Mascota;
}(Anuncio));
exports.Mascota = Mascota;
<file_sep>export class Anuncio {
public id: number | null;
public titulo: string;
public transaccion: string;
public descripcion: string;
public precio: string;
constructor(object: Anuncio) {
this.id = object.id ? object.id : null;
this.titulo = object.titulo;
this.transaccion = "Venta";
this.descripcion = object.descripcion;
this.precio = object.precio;
}
}
export class Mascota extends Anuncio {
public animal: string;
public raza: string;
public fecha_nacimiento: string;
public vacuna: string;
constructor(object: Mascota) {
super(object);
this.animal = object.animal;
this.raza = object.raza;
this.fecha_nacimiento = object.fecha_nacimiento;
this.vacuna = object.vacuna;
}
}
<file_sep>import { Anuncio } from "./entidades.js";
import { RESPONSE } from "../constates/constantes.js";
import { showSpinner, hideSpinner } from "./spinner.js";
let arrayData = [];
let btnGuardar = document.getElementById("btnGuardar");
let btnEliminar = document.getElementById("btnEliminar");
let btnTraer = document.getElementById("btnTraer");
let btnCancelar = document.getElementById("btnCancelar");
let btnModificar = document.getElementById("btnModificar");
let selectedItem = {};
document.forms[0].addEventListener("submit", (event) => {
event.preventDefault();
});
btnCancelar.addEventListener("click", reset);
btnTraer.addEventListener("click", traerFetch);
btnGuardar.addEventListener("click", altaFetch);
btnEliminar.addEventListener("click", bajaFetch);
btnModificar.addEventListener("click", modifyFetch);
btnEliminar.style.visibility = "hidden";
btnModificar.style.visibility = "hidden";
function alta() {
let xhr = new XMLHttpRequest();
showSpinner();
xhr.onreadystatechange = () => {
if (xhr.readyState !== 4 || xhr.status !== 200) return;
let dataToJson = JSON.parse(xhr.responseText);
if (dataToJson.message === RESPONSE.ALTA_EXITOSA) {
hideSpinner();
traer();
}
};
xhr.open("POST", "http://localhost:3000/alta");
xhr.setRequestHeader("content-type", "application/json");
const dataToSend = getFormValues();
if (dataToSend) return xhr.send(JSON.stringify(dataToSend));
console.error("Error al dar la alta, disculpa");
}
function altaFetch() {
const dataToSend = getFormValues();
if (!dataToSend) return;
showSpinner();
fetch("http://localhost:3000/alta", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(dataToSend),
})
.then((responseText) => responseText.json())
.then((response) => {
if (response.message === RESPONSE.ALTA_EXITOSA) {
hideSpinner();
traer();
}
})
.catch((error) => console.log(error));
}
function baja() {
let xhr = new XMLHttpRequest();
showSpinner();
xhr.onreadystatechange = () => {
if (xhr.readyState !== 4 || xhr.status !== 200) return;
let dataToJson = JSON.parse(xhr.responseText);
if (dataToJson.message === RESPONSE.BAJA_EXITOSA) {
hideSpinner();
traer();
}
};
xhr.open("POST", "http://localhost:3000/baja");
xhr.setRequestHeader("content-type", "application/x-www-form-urlencoded");
if (selectedItem.id) return xhr.send(`id=${+selectedItem.id}`);
console.error("Error no selecciono ningun elemento para dar la baja");
}
function bajaFetch() {
const dataToSend = getFormValues();
if (!dataToSend) return;
showSpinner();
fetch("http://localhost:3000/baja", {
method: "POST",
headers: { "content-type": "application/x-www-form-urlencoded" },
body: `id=${+selectedItem.id}`,
})
.then((responseText) => responseText.json())
.then((response) => {
if (response.message === RESPONSE.BAJA_EXITOSA) {
hideSpinner();
traer();
}
})
.catch((error) => console.log(error));
}
function traer() {
reset();
let xhr = new XMLHttpRequest();
showSpinner();
xhr.onreadystatechange = () => {
if (xhr.readyState === 4 && xhr.status === 200) {
let response = JSON.parse(xhr.responseText);
arrayData = response.data;
makeTable(arrayData);
hideSpinner();
}
};
xhr.open("GET", "http://localhost:3000/traer");
xhr.send();
}
function traerFetch() {
reset();
showSpinner();
fetch("http://localhost:3000/traer", {
method: "GET",
})
.then((responseText) => responseText.json())
.then((response) => {
if (response.message === RESPONSE.CARGA_EXITOSA) {
arrayData = response.data;
makeTable(arrayData);
hideSpinner();
}
})
.catch((error) => console.log(error));
}
function modify() {
let xhr = new XMLHttpRequest();
xhr.onreadystatechange = () => {
if (xhr.readyState !== 4 || xhr.status !== 200) return;
let dataToJson = JSON.parse(xhr.responseText);
if (dataToJson.message === RESPONSE.MOD_EXITOSA) traer();
};
xhr.open("POST", "http://localhost:3000/modificar");
xhr.setRequestHeader("content-type", "application/json");
const formValues = getFormValues();
if (formValues && selectedItem.id) {
formValues.id = selectedItem.id;
return xhr.send(JSON.stringify(formValues));
}
console.error("Error al modificar, verifique los datos");
}
function modifyFetch() {
const formValues = getFormValues();
if (formValues && selectedItem.id) {
formValues.id = selectedItem.id;
} else {
return console.error("Error al querer modificar, chequear los datos ");
}
showSpinner();
fetch("http://localhost:3000/modificar", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(formValues),
})
.then((responseText) => responseText.json())
.then((response) => {
if (response.message === RESPONSE.MOD_EXITOSA) {
hideSpinner();
traer();
}
})
.catch((error) => console.log(error));
}
function makeTable(array) {
let table = document.getElementById("table");
table.innerHTML = "";
table.appendChild(createHeaders(array));
for (let item of array) {
let row = document.createElement("tr");
for (let property in item) {
let cell = document.createElement("td");
cell.addEventListener("click", getItemId);
let text = document.createTextNode(item[property]);
cell.appendChild(text);
row.appendChild(cell);
}
table.appendChild(row);
}
}
function createHeaders(array) {
let row = document.createElement("tr");
for (let key in array[0]) {
let th = document.createElement("th");
let header = document.createTextNode(key);
th.appendChild(header);
row.appendChild(th);
}
return row;
}
function getItemId(event) {
const cell = event.target;
const row = cell.parentNode;
const id = row.firstElementChild.textContent;
btnEliminar.style.visibility = "visible";
btnModificar.style.visibility = "visible";
setFormData(id);
}
function setFormData(id) {
const object = arrayData.find((item) => +item.id === +id);
selectedItem = { ...object };
document.getElementById("titulo").value = object.titulo;
document.getElementById("radio").checked =
object.transaccion === "Venta" ? true : false;
document.getElementById("radio2").checked =
object.transaccion === "Venta" ? false : true;
document.getElementById("descripcion").value = object.descripcion;
let precio = object.precio.replace("$", "");
document.getElementById("precio").value = +precio.split(",").join("");
document.getElementById("baños").value = +object.num_wc;
document.getElementById("autos").value = +object.num_estacionamiento;
document.getElementById("dormitorios").value = +object.num_dormitorio;
}
function getFormValues() {
const radioVenta = document.getElementById("radio").checked;
const radioAlquiler = document.getElementById("radio2").checked;
if (radioVenta || radioAlquiler) {
const object = {
titulo: document.getElementById("titulo").value,
transaccion: radioVenta ? "Venta" : "Alquiler",
descripcion: document.getElementById("descripcion").value,
precio: document.getElementById("precio").value,
num_wc: document.getElementById("baños").value,
num_estacionamiento: document.getElementById("autos").value,
num_dormitorio: document.getElementById("dormitorios").value,
};
if (checkProperties(object)) return new Anuncio(object);
}
console.error("Error al rellenar el formulario.");
}
function reset() {
btnEliminar.style.visibility = "hidden";
btnModificar.style.visibility = "hidden";
selectedItem = {};
document.getElementById("titulo").value = "";
document.getElementById("radio").checked = false;
document.getElementById("radio2").checked = false;
document.getElementById("descripcion").value = "";
document.getElementById("precio").value = "";
document.getElementById("baños").value = "";
document.getElementById("autos").value = "";
document.getElementById("dormitorios").value = "";
}
function checkProperties(obj) {
for (let key in obj) {
if (obj[key] === null || obj[key] === "") return false;
}
return true;
}
<file_sep>export const RESPONSE = {
ALTA_EXITOSA: "Alta Exitosa",
CARGA_EXITOSA: "Carga Exitosa",
BAJA_EXITOSA: "Baja Exitosa",
MOD_EXITOSA: "Modificacion Exitosa",
};
<file_sep>const spinner = document.getElementById("spinner");
const img = document.createElement("img");
img.setAttribute("src", "../img/spinner.gif");
img.setAttribute("alt", "Loading...");
img.className = "loading-image";
export function showSpinner() {
spinner.className = "show-spinner";
spinner.appendChild(img);
}
export function hideSpinner() {
spinner.className = "";
spinner.removeChild(img);
}
<file_sep># UTN---Laboratorio-3
<file_sep>export class Anuncio {
constructor(object) {
this.id = object.id ? object.id : null;
this.titulo = object.titulo;
this.transaccion = "Venta";
this.descripcion = object.descripcion;
this.precio = object.precio;
}
}
export class Mascota extends Anuncio {
constructor(object) {
super(object);
this.animal = object.animal;
this.raza = object.raza;
this.fecha_nacimiento = object.fecha_nacimiento;
this.vacuna = object.vacuna;
}
}
|
9d2bc033f0dcea4c257120f1de5701f3b6cbf36b
|
[
"JavaScript",
"TypeScript",
"Markdown"
] | 8 |
JavaScript
|
ThiagoCorta/UTN---Laboratorio-3
|
9582d6684c7dc265cf6bc06c2340d33d1562b799
|
252f2ae6be7f64880362f2fc64129be7097d73bf
|
refs/heads/master
|
<file_sep>source "http://rubygems.org"
# Specify your gem's dependencies in mongoid_orderable.gemspec
gemspec
case version = ENV['MONGOID_VERSION'] || "~> 3.0.0.rc"
when /2/
gem "mongoid", "~> 2.4.0"
else
gem "mongoid", version
end
<file_sep>module MongoidOrderable
def self.mongoid2?
Mongoid.const_defined? :Contexts
end
end
require 'mongoid'
require 'mongoid_orderable/version'
if MongoidOrderable.mongoid2?
require 'mongoid_orderable/mongoid/contexts/mongo'
require 'mongoid_orderable/mongoid/contexts/enumerable'
require 'mongoid_orderable/mongoid/criteria'
else
require 'mongoid_orderable/mongoid/contextual/memory'
end
require 'mongoid/orderable'
<file_sep>module MongoidOrderable #:nodoc:
module Mongoid #:nodoc:
module Contextual #:nodoc:
module Memory #:nodoc:
def inc attribute, value
each do |document|
document.inc(attribute, value)
end
end
end
end
end
end
Mongoid::Contextual::Memory.send :include, MongoidOrderable::Mongoid::Contextual::Memory
|
05d5944ab974044826f81f16340a4806f4abd99c
|
[
"Ruby"
] | 3 |
Ruby
|
johnnyshields/mongoid_orderable
|
721f9592a128930787acd08041a469aba4bc660a
|
e4785e0dbc0e7ebe139af60903eccb5d9c4879e7
|
refs/heads/master
|
<repo_name>fnx-research/Repo-Downloader<file_sep>/RepoDownloader.php
<?PHP
class RepoDownloader{
private function exec_redirects($ch, &$redirects) {
$data = curl_exec($ch);
$http_code = curl_getinfo($ch, CURLINFO_HTTP_CODE);
if ($http_code == 301 || $http_code == 302) {
list($header) = explode("\r\n\r\n", $data, 2);
$matches = array();
preg_match("/(Location:|URI:)[^(\n)]*/", $header, $matches);
$url = trim(str_replace($matches[1], "", $matches[0]));
$url_parsed = parse_url($url);
if (isset($url_parsed)) {
curl_setopt($ch, CURLOPT_URL, $url);
$redirects++;
return $this->exec_redirects($ch, $redirects);
}
}
if ($curlopt_header) {
return $data;
} else {
list(, $body) = explode("\r\n\r\n", $data, 2);
return $body;
}
}
public function download($opts){
extract($opts);
$url = $repo;
$file = $saveAs;
$endpoint = 'https://api.github.com/repos/'.$user.'/'.$repo.'/zipball/master';
$ch = curl_init($endpoint);
curl_setopt($ch, CURLOPT_HTTPHEADER, array('Authorization: token '.$token));
curl_setopt($ch, CURLOPT_HEADER, true);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$data = $this->exec_redirects($ch, $out);
curl_close($ch);
file_put_contents($file, $data);
}
}
|
018e973ddd5024594f6f155e389fafcd345b1945
|
[
"PHP"
] | 1 |
PHP
|
fnx-research/Repo-Downloader
|
017dc7393f8e0c6dc8fe7c395043fdb55f9222b7
|
64ae573e9dd6bfa53312edb4e8b931acb33d82e5
|
refs/heads/master
|
<repo_name>hoangganhap2/circleAndCylinder<file_sep>/src/CylinderTest.java
public class CylinderTest {
public static void main(String[] args) {
Cylinder cylinder = new Cylinder();
System.out.println(cylinder+ "Volume = "+cylinder.getVolume());
cylinder = new Cylinder(2.2,3.5,"red",true);
System.out.println(cylinder+" Volume = "+cylinder.getVolume());
}
}
|
c194e6a3952c0569872a55657af0de1b0b022676
|
[
"Java"
] | 1 |
Java
|
hoangganhap2/circleAndCylinder
|
454d71bff3700c58410d3e8beff8d69124638f1c
|
68dc05a438852cc976a67752e21fab751329dd27
|
refs/heads/master
|
<file_sep>import { Component } from '@angular/core';
import { AboutPage } from '../about/about';
import { PoliticiansPage } from '../politicians/politicians';
import { ProjectsPage } from '../projects/projects';
@Component({
templateUrl: 'tabs.html'
})
export class TabsPage {
// Candidate numbers
PoliticiansTab = PoliticiansPage;
// List of Projects
ProjectsTab = ProjectsPage;
// About the initiative
AboutTab = AboutPage;
constructor() {
}
}
<file_sep>import { Injectable } from '@angular/core';
/*
Generated class for the UserDataProvider provider.
See https://angular.io/guide/dependency-injection for more info on providers
and Angular DI.
*/
@Injectable()
export class UserDataProvider {
public static get() {
let user = JSON.parse(localStorage['auth']).user;
if(!user) user = {};
return user;
}
public static set(auth) {
localStorage['auth'] = JSON.stringify(auth);
}
public static getProfile() {
return JSON.parse(localStorage['profile']);
}
public static setProfile(profile) {
localStorage['profile'] = JSON.stringify(profile);
}
}
<file_sep># GovZero
Propostas para um Governo com pouco gasto ou até mesmo a CUSTO ZERO
## Set it up
It is an ionic start project, so you might run the installations bellow, which are not versionated on git
1. Install NPM dependencies
```shell
$ npm install
```
2. Install Bower dependencies
```shell
$ bower install
```
3. Install ionic cordova dependencies
```shell
$ ionic state restore
```
## Firebase cloud functions
Run all those commands at root folder (not inside the functions folder)
1. LogIn
```shell
$ firebase login
```
1. Skip it if there is a function folder
```shell
$ firebase init functions
```
1. Running the functions locally (can throw an error if promise without catch)
```shell
$ cd functions
$ npm run-script lint
$ npm run-script build
$ firebase serve --only functions
```
1. Deploy functions editions (outside functions folder)
```shell
$ firebase deploy --only functions
```
## Production Commands
- if no release sign key, generate it (Sign pass for it app (govzero) = zerougov):
```shell
$ keytool -genkey -v -keystore my-release-key.keystore -alias alias_name -keyalg RSA -keysize 2048 -validity 10000
```
- Release command: project_root_path
```shell
$ ionic cordova build --release android
```
- Sign command: root_path/platforms/android/app/build/outputs/apk/release
```shell
$ cd platforms/android/app/build/outputs/apk/release/
$ jarsigner -verbose -sigalg SHA1withRSA -digestalg SHA1 -keystore release.keystore app-release-unsigned.apk alias_name
```
- Verify the signature command: root_path/platforms/android/build/outputs/apk/
```shell
$ rm -rf govzero.apk && zipalign -v 4 app-release-unsigned.apk govzero.apk
```
- if no zipaling set, add it to bash profile to create the alias command (MacOS X):
```shell
$ nano ~/.bash_profile
>>> alias zipalign='~/Library/Android/sdk/build-tools/VERSION/zipalign'
$ source ~/.bash_profile
```
- Generate HashKeys
```shell
$ keytool -exportcert -alias alias_name -keystore release.keystore | openssl sha1 -binary | openssl base64
password: <PASSWORD> (zer<PASSWORD>)
$ keytool -exportcert -alias alias_name -keystore release.keystore | openssl sha1 -binary | openssl base64
password: <PASSWORD>
```
## String Formats
[Humanize Strings](https://github.com/HubSpot/humanize), more readable and a helper for Firebase Searches.
## Diagnostic Settings
[CordovaDiagnostic](https://github.com/dpa99c/cordova-diagnostic-plugin), automatic check if the device settings are enabled
## PUG
PUG is a HTML pre-compiler, which means that it have a specific semantic to generate the final HTML files.
[Check it Documentation](https://pugjs.org)
## Solve Google Service
1. Download Google.plist & Google.json from Firebase (native config)
1. Add google-services.json to platform/android/app/
1. Add GoogleService-Info.plist to platform/ios/
## GoogleMap PlugIn
1. create a project on [Console Developers Google](https://console.developers.google.com/apis/credentials)
2. Execute it command, with the generated keys
```bash
$ cordova plugin add cordova-plugin-googlemaps --variable API_KEY_FOR_ANDROID="ANDROID_KEY" --variable API_KEY_FOR_IOS="IOS_KEY"
```
3. IF get any error, like on MAC, Add JAVA_HOME:
```bash
$ echo "export JAVA_HOME=`/usr/libexec/java_home`" >> ~/.profile
```
## Cordova build error [SOLVED]
MacOS install Gradle
```bash
$ brew install gradle
```
Export cordova Gradle version to use as local var:
```bash
$ nano ~/.bash_profile
>>> export CORDOVA_ANDROID_GRADLE_DISTRIBUTION_URL=http\\://services.gradle.org/distributions/gradle-4.4-all.zip
```
If still not working, throwing facebook strings error (add the following into platforms/android/app/src/main/res/values/strings.xml):
```xml
<android>
<string name="fb_app_id">314361069388630</string>
<string name="fb_app_name">GovZero</string>
</android>
```
<file_sep>import { NgModule } from '@angular/core';
import { GlobalGoalsComponent } from './global-goals/global-goals';
@NgModule({
declarations: [GlobalGoalsComponent],
imports: [],
exports: [GlobalGoalsComponent]
})
export class ComponentsModule {}
<file_sep>import { NgModule, ErrorHandler } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { IonicApp, IonicModule, IonicErrorHandler } from 'ionic-angular';
import { MyApp } from './app.component';
import { AboutPage } from '../pages/about/about';
import { PoliticiansPage } from '../pages/politicians/politicians';
import { ProjectsPage } from '../pages/projects/projects';
import { HomePage } from '../pages/home/home';
import { TabsPage } from '../pages/tabs/tabs';
import { StatusBar } from '@ionic-native/status-bar';
import { SplashScreen } from '@ionic-native/splash-screen';
import { ProjectCategoriesProvider } from '../providers/project-categories/project-categories';
import { GlobalGoalsComponent } from '../components/global-goals/global-goals';
import {LoginPage} from "../pages/login/login";
import {Facebook} from "@ionic-native/facebook";
import {GooglePlus} from "@ionic-native/google-plus";
import {AngularFireModule} from "@angular/fire";
import {AngularFireAuthModule} from "@angular/fire/auth";
import {AngularFirestoreModule} from "@angular/fire/firestore";
import {AngularFireStorageModule} from "@angular/fire/storage";
import {AlertServiceProvider} from "../providers/alert-service/alert-service";
let config = {
apiKey: "<KEY>",
authDomain: "govzero-app.firebaseapp.com",
databaseURL: "https://govzero-app.firebaseio.com",
projectId: "govzero-app",
storageBucket: "govzero-app.appspot.com",
messagingSenderId: "1016755266372"
};
@NgModule({
declarations: [
MyApp,
LoginPage,
PoliticiansPage,
ProjectsPage,
AboutPage,
HomePage,
TabsPage,
GlobalGoalsComponent
],
imports: [
BrowserModule,
IonicModule.forRoot(MyApp),
AngularFireModule.initializeApp(config),
AngularFireAuthModule,
AngularFirestoreModule,
AngularFireStorageModule,
AngularFirestoreModule.enablePersistence() // to work offline
],
bootstrap: [IonicApp],
entryComponents: [
MyApp,
AboutPage,
PoliticiansPage,
ProjectsPage,
LoginPage,
HomePage,
TabsPage
],
providers: [
Facebook,
GooglePlus,
StatusBar,
SplashScreen,
{provide: ErrorHandler, useClass: IonicErrorHandler},
ProjectCategoriesProvider,
AlertServiceProvider
]
})
export class AppModule {}
|
c6dbaa4c175d175802dd97d96685a3ca9c1592f1
|
[
"Markdown",
"TypeScript"
] | 5 |
TypeScript
|
TonGarcia/GovZero
|
2cb2a5ee52fadd8809f50950a16ebed2753bf87d
|
7acfa2ed7249fc3bfdfa84a63607151e1b7fa5f7
|
refs/heads/master
|
<file_sep>package com.example.flori.groupea07_mobile.Model;
public class SoldObject {
private int idObject;
private int finalPrice;
private String nameObject;
private String catObject;
public SoldObject(int idO, int price, String name, String cat){
this.idObject = idO;
this.finalPrice = price;
this.nameObject = name;
this.catObject = cat;
}
public int getIdObject(){ return idObject;}
public void setIdObject(int idO){ idObject = idO;}
public int getFinalPrice(){ return finalPrice;}
public void setFinalPrice(int price){ finalPrice = price;}
public String getNameObject(){ return nameObject;}
public void setNameObject(String name){ nameObject = name;}
public String getCatObject(){return catObject;}
public void setCatObject(String cat){ catObject = cat;}
}
<file_sep>package com.example.flori.groupea07_mobile.Model;
public class SellerUser {
private int idSeller;
private String username;
private int nbSales;
private int positiveVote;
private int negativeVote;
private int idUser;
public SellerUser(int idS, String user, int nb, int pVote, int nVote, int idU){
this.idSeller = idS;
this.username = user;
this.nbSales = nb;
this.positiveVote = pVote;
this.negativeVote = nVote;
this.idUser = idU;
}
public int getIdSeller(){ return idSeller;}
public void setIdSeller(int idS){ idSeller = idS;}
public String getUsername() { return username;}
public void setUsername(String user){ username = user;}
public int getNbSales(){ return nbSales;}
public void setNbSales(int nb){ nbSales = nb;}
public int getPositiveVote(){ return positiveVote;}
public void setPositiveVote(int pVote){ positiveVote = pVote;}
public int getNegativeVote(){ return negativeVote;}
public void setNegativeVote(int nVote){ negativeVote = nVote;}
public int getIdUser(){ return idUser;}
public void setIdUser(int idU){ idUser = idU;}
}
<file_sep>package com.example.flori.groupea07_mobile.Model;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
public class RetrofitInstance {
private static Retrofit retrofit;
private static final String BASE_URL = "https://groupea07.azurewebsites.net/";
/**
* Create an instance of Retrofit object
* */
public static Retrofit getRetrofitInstance() {
OkHttpClient client=new OkHttpClient();
client = new OkHttpClient.Builder()
.build();
if (retrofit == null) {
retrofit = new retrofit2.Retrofit.Builder()
.baseUrl(BASE_URL)
.client(client)
.addConverterFactory(GsonConverterFactory.create())
.build();
}
return retrofit;
}
}
<file_sep>package com.example.flori.groupea07_mobile.Model;
public class Moderator {
private int idModerator;
private int idUser;
public Moderator(int idM, int idU){
this.idModerator = idM;
this.idUser = idU;
}
public int getIdModerator(){ return idModerator;}
public void setIdModerator(int idM){ idModerator = idM;}
public int getIdUser(){ return idUser;}
public void setIdUser(int idU){ idUser = idU;}
}
<file_sep>package com.example.flori.groupea07_mobile;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Gravity;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.example.flori.groupea07_mobile.Model.Member;
import com.example.flori.groupea07_mobile.Model.RetrofitInstance;
import com.example.flori.groupea07_mobile.Service.GetDataService;
import java.util.List;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class LoginActivity extends AppCompatActivity implements View.OnClickListener{
private Button _btn_login, _btn_reset;
private EditText _et_username, _et_pwd;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
_btn_login = (Button) findViewById(R.id.bt_login_login);
_btn_login.setOnClickListener(this);
_btn_reset = (Button) findViewById(R.id.bt_log_reset);
_btn_reset.setOnClickListener(this);
_et_username = (EditText) findViewById(R.id.et_log_username);
_et_pwd = (EditText) findViewById(R.id.et_log_password);
// Toolbar
Toolbar toolBar = (Toolbar) findViewById(R.id.login_tool_bar);
toolBar.setTitle(getResources().getText(R.string.txt_login));
toolBar.setNavigationIcon(R.drawable.ic_arrow_back_black_24dp);
toolBar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
}
@Override
public void onClick(View v) {
switch(v.getId()){
case R.id.bt_login_login :
authentification();
break;
case R.id.bt_log_reset :
resetData();
break;
}
}
@Override
public void onResume(){
super.onResume();
//et_username.setText("test");
}
private void resetData(){
_et_username.setText("");
_et_pwd.setText("");
}
private void authentification(){
GetDataService service = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<List<Member>> call = service.groupList();
call.enqueue(new Callback<List<Member>>() {
@Override
public void onResponse(Call<List<Member>> call, Response<List<Member>> response) {
Boolean found = false;
for(Member m : response.body()){
if(m.getUsername().toLowerCase().equals(_et_username.getText().toString().toLowerCase()) && m.getUserPwd().equals(_et_pwd.getText().toString())){
Toast toast = Toast.makeText(getApplicationContext(),"Login in...",Toast.LENGTH_LONG);
toast.setGravity(Gravity.CENTER,0,0);
toast.show();
found = true;
SharedPreferences loginData = getSharedPreferences("memberInfo", Context.MODE_PRIVATE);
SharedPreferences.Editor editor = loginData.edit();
editor.putInt("idUser", m.getIdUser());
editor.putString("userName", m.getUsername());
editor.putInt("admin", m.getUserAdmin());
editor.apply();
setResult(LoginActivity.RESULT_OK);
finish();
break;
}
}
if(found==false){
Toast toast = Toast.makeText(getApplicationContext(),"Username and password doesn't match !",Toast.LENGTH_LONG);
toast.setGravity(Gravity.CENTER,0,0);
toast.show();
}
}
@Override
public void onFailure(Call<List<Member>> call, Throwable t) {
Toast.makeText(LoginActivity.this, "Something went wrong...Error message: " + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
}
<file_sep>package com.example.flori.groupea07_mobile;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.widget.Toast;
import com.example.flori.groupea07_mobile.Model.AuctionedObject;
import com.example.flori.groupea07_mobile.Model.RetrofitInstance;
import com.example.flori.groupea07_mobile.Service.AuctionedObjectListAdapter;
import com.example.flori.groupea07_mobile.Service.GetDataService;
import java.util.ArrayList;
import java.util.List;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class AuctionedObjectActivity extends AppCompatActivity {
private AuctionedObjectListAdapter adapter;
private RecyclerView recyclerView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.card_layout_object_and_member);
GetDataService service = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<List<AuctionedObject>> call = service.groupObjectList();
call.enqueue(new Callback<List<AuctionedObject>>() {
@Override
public void onResponse(Call<List<AuctionedObject>> call, Response<List<AuctionedObject>> response) {
generateAuctionedObjectList(new ArrayList<>(response.body()));
}
@Override
public void onFailure(Call<List<AuctionedObject>> call, Throwable t) {
Toast.makeText(AuctionedObjectActivity.this, "Something went wrong...Error message: " + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
/** Method to generate List of notice using RecyclerView with custom adapter*/
private void generateAuctionedObjectList(ArrayList<AuctionedObject> auctionedObjectArrayList ) {
recyclerView = findViewById(R.id.recycler_view_object_list);
adapter = new AuctionedObjectListAdapter(auctionedObjectArrayList);
RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(AuctionedObjectActivity.this);
recyclerView.setLayoutManager(layoutManager);
//recyclerView.setHasFixedSize(true);
recyclerView.setAdapter(adapter);
}
}
<file_sep>package com.example.flori.groupea07_mobile;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.InputType;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.example.flori.groupea07_mobile.Model.AuctionedObject;
import com.example.flori.groupea07_mobile.Model.RetrofitInstance;
import com.example.flori.groupea07_mobile.Model.SellerUser;
import com.example.flori.groupea07_mobile.Service.GetDataService;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class AddObjectActivity extends AppCompatActivity{
private EditText mNameObjectView, mPriceObjectView, mDescriptionView, mCatView;
private Button btnAddObject;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_add_object);
mNameObjectView = (EditText) findViewById(R.id.et_log_nameobject);
mPriceObjectView = (EditText) findViewById(R.id.et_log_priceobject);
mPriceObjectView.setInputType(InputType.TYPE_CLASS_NUMBER);
mDescriptionView = (EditText) findViewById(R.id.et_log_descriptionobject);
mCatView = (EditText) findViewById(R.id.et_log_catobject);
btnAddObject = (Button) findViewById(R.id.btn_addobject);
btnAddObject.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
SharedPreferences loginData = getSharedPreferences("memberInfo", Context.MODE_PRIVATE);
final int idUser = loginData.getInt("idUser",0);
if(mNameObjectView.getText().toString().equals("") ||
mPriceObjectView.getText().toString().equals("") ||
mDescriptionView.getText().toString().equals("") ||
mCatView.getText().toString().equals("")
){
Toast.makeText(getApplicationContext(),"At least one field is empty", Toast.LENGTH_LONG).show();
}
else {
GetDataService service = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<List<SellerUser>> call = service.groupSellerUserList();
call.enqueue(new Callback<List<SellerUser>>() {
@Override
public void onResponse(Call<List<SellerUser>> call, Response<List<SellerUser>> response) {
boolean selUser = false;
for (SellerUser m : response.body()) {
if (m.getIdUser() == idUser) {
GetDataService service = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
SellerUser sell = new SellerUser(m.getIdSeller(), m.getUsername(), m.getNbSales() + 1, m.getPositiveVote(), m.getNegativeVote(), m.getIdUser());
Call<ResponseBody> callUpdate = service.updateSale(Integer.toString(m.getIdSeller()), sell);
callUpdate.enqueue(new Callback<ResponseBody>() {
@Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
}
});
selUser = true;
break;
}
}
if (selUser == false) {
SharedPreferences loginData = getSharedPreferences("memberInfo", Context.MODE_PRIVATE);
String logUsername = loginData.getString("userName", null);
GetDataService serviceSellUser = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<SellerUser> callSellUser = serviceSellUser.createSellerUser(new SellerUser(0, logUsername, 1, 0, 0, idUser));
callSellUser.enqueue(new Callback<SellerUser>() {
@Override
public void onResponse(Call<SellerUser> call, Response<SellerUser> response) {
}
@Override
public void onFailure(Call<SellerUser> call, Throwable t) {
}
});
}
}
@Override
public void onFailure(Call<List<SellerUser>> call, Throwable t) {
}
});
GetDataService serviceSold = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<AuctionedObject> callSold = serviceSold.createAuctionedObject(new AuctionedObject(0, mNameObjectView.getText().toString(), mDescriptionView.getText().toString(), Integer.parseInt(mPriceObjectView.getText().toString()), idUser, mCatView.getText().toString()));
callSold.enqueue(new Callback<AuctionedObject>() {
@Override
public void onResponse(Call<AuctionedObject> call, Response<AuctionedObject> response) {
finish();
}
@Override
public void onFailure(Call<AuctionedObject> call, Throwable t) {
}
});
}
}
});
// Toolbar
Toolbar toolBar = (Toolbar) findViewById(R.id.add_object_toolbar);
toolBar.setTitle(getResources().getText(R.string.txt_add_object));
toolBar.setNavigationIcon(R.drawable.ic_arrow_back_black_24dp);
toolBar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
}
}
<file_sep>package com.example.flori.groupea07_mobile.Service;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import com.example.flori.groupea07_mobile.Model.AuctionedObject;
import com.example.flori.groupea07_mobile.Model.RetrofitInstance;
import com.example.flori.groupea07_mobile.Model.SoldObject;
import com.example.flori.groupea07_mobile.R;
import java.util.ArrayList;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class AuctionedObjectListAdapter extends RecyclerView.Adapter<AuctionedObjectListAdapter.ObjectViewHolder> {
private ArrayList<AuctionedObject> dataList;
public AuctionedObjectListAdapter(ArrayList<AuctionedObject> dataList) {
this.dataList = dataList;
}
@Override
public ObjectViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
LayoutInflater layoutInflater = LayoutInflater.from(parent.getContext());
View view = layoutInflater.inflate(R.layout.single_row_object, parent, false);
return new ObjectViewHolder(view);
}
@Override
public void onBindViewHolder(ObjectViewHolder holder, int position) {
holder.txtIdObject.setText(dataList.get(position).getIdObject()+"");
holder.txtNameObject.setText(dataList.get(position).getNameObject());
holder.txtPriceObject.setText(dataList.get(position).getPriceObject()+"");
holder.txtDescObject.setText(dataList.get(position).getDescriptionObject());
holder.txtCatObject.setText(dataList.get(position).getCatObject()+"");
}
@Override
public int getItemCount() {
return dataList.size();
}
class ObjectViewHolder extends RecyclerView.ViewHolder {
int positionMember, i;
TextView txtIdObject, txtNameObject, txtPriceObject, txtCatObject, txtDescObject;
TextView int_id, int_name, int_price, int_cat, int_desc;
Button btn_Buy;
ObjectViewHolder(final View itemView) {
super(itemView);
txtIdObject = itemView.findViewById(R.id.txt_id_object);
txtIdObject.setVisibility(View.GONE);
txtNameObject = itemView.findViewById(R.id.txt_name_object);
txtPriceObject = itemView.findViewById(R.id.txt_price_object);
txtDescObject = itemView.findViewById(R.id.txt_desc_object);
txtCatObject = itemView.findViewById(R.id.txt_cat_object);
btn_Buy = (Button) itemView.findViewById(R.id.bt_row_buy);
btn_Buy.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
GetDataService serviceSold = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<SoldObject> callSold = serviceSold.createSoldObject(new SoldObject(Integer.parseInt(txtIdObject.getText().toString()), Integer.parseInt(txtPriceObject.getText().toString()), txtNameObject.getText().toString(), txtCatObject.getText().toString()));
callSold.enqueue(new Callback<SoldObject>() {
@Override
public void onResponse(Call<SoldObject> call, Response<SoldObject> response) { }
@Override
public void onFailure(Call<SoldObject> call, Throwable t) { }
});
i = 0;
GetDataService service = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<List<AuctionedObject>> call = service.groupObjectList();
call.enqueue(new Callback<List<AuctionedObject>>() {
@Override
public void onResponse(Call<List<AuctionedObject>> call, Response<List<AuctionedObject>> response) {
for(AuctionedObject m : response.body()){
if(Integer.toString(m.getIdObject()).equals(txtIdObject.getText().toString())){
setPositionMember(i);
break;
}
i++;
}
}
@Override
public void onFailure(Call<List<AuctionedObject>> call, Throwable t) {}
});
GetDataService serviceDelete = RetrofitInstance.getRetrofitInstance().create(GetDataService.class);
Call<ResponseBody> callDelete = serviceDelete.deleteObject(txtIdObject.getText().toString());
callDelete.enqueue(new Callback<ResponseBody>() {
@Override
public void onResponse(Call<ResponseBody> call1, Response<ResponseBody> response) {
dataList.remove(getPositionMember());
notifyItemRangeRemoved(getPositionMember(), 1);
notifyItemRemoved(getPositionMember());
notifyItemRangeChanged(getPositionMember(), dataList.size());
notifyDataSetChanged();
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {}
});
}
});
}
private void setPositionMember(int positionMember){
this.positionMember = positionMember;
}
private int getPositionMember(){
return positionMember;
}
}
}
<file_sep>package com.example.flori.groupea07_mobile;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MenuActivity extends AppCompatActivity implements View.OnClickListener {
private Button _menu_login, _menu_register, _menu_objects, _menu_users, _menu_logout, _menu_sold_objects, _menu_addobjects, _menu_to_go_website;
private Toolbar toolbar;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_menu);
_menu_login = (Button) findViewById(R.id.menu_login);
_menu_login.setOnClickListener(this);
_menu_register = (Button) findViewById(R.id.menu_register);
_menu_register.setOnClickListener(this);
_menu_addobjects = (Button) findViewById(R.id.menu_addObject);
_menu_addobjects.setOnClickListener(this);
_menu_addobjects.setVisibility(View.GONE);
_menu_objects = (Button) findViewById(R.id.menu_objects);
_menu_objects.setOnClickListener(this);
_menu_objects.setVisibility(View.GONE);
_menu_users = (Button) findViewById(R.id.menu_users);
_menu_users.setOnClickListener(this);
_menu_users.setVisibility(View.GONE);
_menu_logout = (Button) findViewById(R.id.menu_logout);
_menu_logout.setOnClickListener(this);
_menu_logout.setVisibility(View.GONE);
_menu_sold_objects = (Button) findViewById(R.id.menu_sold_objects);
_menu_sold_objects.setOnClickListener(this);
_menu_to_go_website = (Button) findViewById(R.id.menu_go_to_website);
_menu_to_go_website.setOnClickListener(this);
toolbar = (Toolbar) findViewById(R.id.menu_toolbar);
toolbar.setTitle("Please login or register");
}
// Manage all buttons.onClick
@Override
public void onClick(View v) {
SharedPreferences loginData = getSharedPreferences("memberInfo", Context.MODE_PRIVATE);
Intent intent;
switch(v.getId()){
//Switch to LoginActivity
case R.id.menu_login :
intent = new Intent(this, LoginActivity.class);
startActivityForResult(intent,1);
break;
//Switch to registerActivity
case R.id.menu_register :
intent = new Intent(this, RegisterActivity.class);
startActivity(intent);
break;
//Switch to AddObjectActivity
case R.id.menu_addObject :
intent = new Intent(this, AddObjectActivity.class);
startActivity(intent);
break;
//Switch to AuctionedObjectActivity
case R.id.menu_objects :
intent = new Intent(this, AuctionedObjectActivity.class);
startActivity(intent);
break;
//Switch to SoldObjActivity
case R.id.menu_sold_objects:
intent = new Intent(this, SoldObjActivity.class);
startActivity(intent);
break;
//Switch to MembersActivity
case R.id.menu_users :
intent = new Intent(this,MemberActivity.class);
int admin = loginData.getInt("admin",0);
if (admin == 0) { Toast.makeText(this,"Access denied",Toast.LENGTH_LONG).show();}
else {startActivity(intent); }
break;
case R.id.menu_logout:
SharedPreferences.Editor editor = loginData.edit();
editor.clear();
editor.commit();
Toast toast = Toast.makeText(getApplicationContext(),"Successfully logged out !",Toast.LENGTH_LONG);//toast.setGravity(Gravity.CENTER,0,0);
toast.show();
_menu_logout.setVisibility(View.GONE);
_menu_login.setVisibility(View.VISIBLE);
_menu_register.setVisibility(View.VISIBLE);
_menu_addobjects.setVisibility(View.GONE);
_menu_objects.setVisibility(View.GONE);
_menu_users.setVisibility(View.GONE);
String logUsername = loginData.getString("userName",null);
toolbar.setTitle("Please login or register");
break;
case R.id.menu_go_to_website:
Uri webpage = Uri.parse("https://groupea07.azurewebsites.net");
intent = new Intent(Intent.ACTION_VIEW, webpage);
if (intent.resolveActivity(getPackageManager()) != null) { startActivity(intent);}
else{Toast.makeText(getApplicationContext(),"Web site doesn't exist", Toast.LENGTH_LONG).show();}
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == 1) {
if(resultCode == Activity.RESULT_OK){
_menu_logout.setVisibility(View.VISIBLE);
_menu_login.setVisibility(View.GONE);
_menu_register.setVisibility(View.GONE);
_menu_addobjects.setVisibility(View.VISIBLE);
_menu_users.setVisibility(View.VISIBLE);
_menu_objects.setVisibility(View.VISIBLE);
SharedPreferences loginData = getSharedPreferences("memberInfo", Context.MODE_PRIVATE);
String logUsername = loginData.getString("userName",null);
toolbar.setTitle(logUsername);
}
}
}
}
<file_sep>package com.example.flori.groupea07_mobile.Model;
public class AuctionedObject {
private int idObject;
private String nameObject;
private String descriptionObject;
private int priceObject;
private int idUser;
private String catObject;
public AuctionedObject(int id, String nameO, String dObject, int price, int user, String cat){
this.idObject = id;
this.nameObject = nameO;
this.descriptionObject = dObject;
this.priceObject = price;
this.idUser = user;
this.catObject = cat;
}
public int getIdObject(){ return idObject;}
public void setIdObject(int id){ this.idObject = id;}
public String getNameObject(){ return nameObject;}
public void setNameObject(String name){ this.nameObject = name;}
public String getDescriptionObject(){return descriptionObject;}
public void setDescriptionObject(String descri){this.descriptionObject = descri;}
public int getPriceObject(){ return priceObject;}
public void setPriceObject(int price){ priceObject = price;}
public int getIdUser(){ return idUser;}
public void setIdUser(int id){ idUser = id;}
public String getCatObject(){ return catObject;}
public void setCatObject(String cat){ catObject = cat;}
@Override
public String toString() {
return "AuctionedObject{" +
"idObject=" + idObject +
", nameObject='" + nameObject + '\'' +
", descriptionObject='" + descriptionObject + '\'' +
", priceObject=" + priceObject +
", idUser=" + idUser +
", catObject='" + catObject + '\'' +
'}';
}
}
|
e3978552afb458fbb87a86c4648098a3ab9a7a33
|
[
"Java"
] | 10 |
Java
|
Drakwars/GroupeA07_Android
|
e248ee1f55d1086dcb713c1bfd3763d98982bb7b
|
49f97dbc6c7fc2f8ce0e307364fb46367629043e
|
refs/heads/main
|
<repo_name>blavkjay/Intent_Test<file_sep>/IntentTakeHomeTest/Networking/URLSessionHttpClient.swift
//
// URLSessionHttpClient.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
final class URLSessionHttpClient: ApiClient {
let session = URLSession.shared
func get(url: String, params: [String:Any] ,completion: @escaping (Result<Data?, Error>) -> Void) {
guard var urlComponents = URLComponents(string: url) else {
return
}
//add params to the url
urlComponents.queryItems = params.map { (arg) -> URLQueryItem in
let (key, value) = arg
return URLQueryItem(name: key, value: "\(value)")
}
urlComponents.percentEncodedQuery = urlComponents.percentEncodedQuery?.replacingOccurrences(of: "+", with: "%2B")
guard let url = urlComponents.url else {
return
}
var urlRequest = URLRequest(url: url)
urlRequest.setValue("application/json", forHTTPHeaderField: "Content-Type")
urlRequest.setValue("application/vnd.github.v3+json", forHTTPHeaderField: "Accept")
//MARK:- ADD TOKEN
urlRequest.setValue("token ", forHTTPHeaderField: "Authorization")
urlRequest.httpMethod = "GET"
session.dataTask(with: urlRequest) { data, response, error in
if let httpResponse = response as? HTTPURLResponse {
print("error \(httpResponse.statusCode)")
}
if let error = error {
completion(.failure(error))
}
completion(.success(data))
}.resume()
}
}
<file_sep>/IntentTakeHomeTestTests/RepoSearchListServiceTest.swift
//
// RepoSearchListServiceTest.swift
// IntentTakeHomeTestTests
//
// Created by Admin on 09/10/2021.
//
import Foundation
import XCTest
@testable import IntentTakeHomeTest
class RepoSearchListServiceTest: XCTestCase {
func test_init_doesNotRequestDataFromURL() {
let (_, client) = makeSUT()
XCTAssertTrue(client.requestedURLs.isEmpty)
}
func test_getRepositoriesFromSearch_requestDataFromURLRequest() {
//Given
let (sut, client) = makeSUT()
//When
sut.getRepositoriesFromSearch(queryString: "word", page: 1, perPageNumber: 20) { _ in }
//Then
XCTAssertEqual(client.requestedURLs.count, 1, "client was called")
}
func test_sut_CalledClientWithCorrectData() {
let (sut, client) = makeSUT()
let passedQuery: [String: Any] = ["q": "word",
"page": 1,
"per_page": 20]
sut.getRepositoriesFromSearch(queryString: passedQuery["q"] as! String, page: passedQuery["page"] as! Int, perPageNumber: passedQuery["per_page"] as! Int) { _ in }
XCTAssertEqual(passedQuery["q"] as! String, client.paramPassed[0]["q"] as! String)
XCTAssertEqual(passedQuery["page"] as! Int, client.paramPassed[0]["page"] as! Int)
XCTAssertEqual(passedQuery["per_page"] as! Int, client.paramPassed[0]["per_page"] as! Int)
}
func test_getRepositoriesFromSearch_deliversResponseOn200Response() {
//Given
let (sut, client) = makeSUT()
//stub
let owner = Owner(login: "just saying", htmlUrl: "https://any-url.com", avatarURL: "https://any-url1.com")
let repo = Repository(name: "Juwon", owner: owner, description: "just describe")
let repoList = [repo]
//When
var expectedRepoList: RepoListResponse?
let exp = expectation(description: "wait for load completion")
sut.getRepositoriesFromSearch(queryString: "Swift", page: 1, perPageNumber: 20) { result in
switch result {
case .success(let response):
expectedRepoList = response
default:
XCTFail("expectation failed")
}
exp.fulfill()
}
//Then
let json1 : [String: Any] = [ "total_count": 7,
"incomplete_results": false, "items": [["name": "Juwon","description": "just describe", "owner": [
"login": "just saying", "html_url": "https://any-url.com", "avatar_url": "https://any-url1.com"
]]]]
let jsonData = makeRepoJSON(json1)
client.complete(withStatusCode: 200, data: jsonData)
wait(for: [exp], timeout: 5.0)
XCTAssertEqual(expectedRepoList?.items, repoList, "repo data gotten")
}
func test_getCountriesAndCapital_deliversErrorOnNon200HTTPResponse() {
let (sut, client) = makeSUT()
let errorCodes = [199,201,300,400,500]
let localError: NSError = NSError(domain: "test", code: 0)
let expectedError : RepoSearchError = RepoSearchError.init(message: localError.localizedDescription)
var returnedError : RepoSearchError?
for _ in errorCodes {
let exp = expectation(description: "wait for load completion")
sut.getRepositoriesFromSearch(queryString: "Swift", page: 1, perPageNumber: 20) { result in
switch result {
case .failure(let error):
returnedError = error
default:
XCTFail("expectation failed")
}
}
client.complete(with: localError)
exp.fulfill()
wait(for: [exp], timeout: 1.0)
}
XCTAssertEqual(expectedError.localizedDescription, returnedError?.localizedDescription)
}
private func makeSUT() -> (sut: RepoSearchLoader, client: HTTPClientSpy) {
let client = HTTPClientSpy()
let sut = RepoSearchListService(apiClient: client)
return (sut,client)
}
private func makeRepoJSON(_ repo: [String: Any]) -> Data {
let json = repo
return try! JSONSerialization.data(withJSONObject: json)
}
}
public class HTTPClientSpy: ApiClient {
private var messages = [(url: String, params: [String: Any], completion: (Result<Data?, Error>) -> Void)]()
var requestedURLs : [String] {
return messages.map { $0.url }
}
var paramPassed: [[String: Any]] {
return messages.map {$0.params}
}
public func get(url: String, params: [String : Any], completion: @escaping (Result<Data?, Error>) -> Void) {
messages.append((url, params,completion))
}
func complete(with error: Error, at index: Int = 0 ) {
messages[index].completion(.failure(error))
}
func complete(withStatusCode code: Int,data: Data, at index: Int = 0 ) {
messages[index].completion(.success(data))
}
}
<file_sep>/IntentTakeHomeTest/Search/api/RepoSearchListService.swift
//
// RepoSearchListService.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
public final class RepoSearchListService: RepoSearchLoader {
private let apiClient: ApiClient
public init(apiClient: ApiClient) {
self.apiClient = apiClient
}
public func getRepositoriesFromSearch(queryString: String, page: Int, perPageNumber: Int, completion: @escaping (RepoSearchLoader.Result) -> Void) {
let url = "https://api.github.com/search/repositories"
let params: [String:Any] = ["q": queryString,
"page": page,
"per_page": perPageNumber]
apiClient.get(url: url, params: params) { result in
switch result {
case .success(let data):
guard let data = data else {
return
}
let decodeData = try? JSONDecoder().decode(RemoteRepoListResponse.self, from: data)
guard let response = decodeData else { return }
completion(.success(response.toModel()))
case .failure(let error):
completion(.failure(.init(message: error.localizedDescription)))
}
}
}
}
<file_sep>/IntentTakeHomeTestTests/RepoSearchViewModelTest.swift
//
// RepoSearchViewModelTest.swift
// IntentTakeHomeTestTests
//
// Created by Admin on 09/10/2021.
//
import Foundation
import XCTest
@testable import IntentTakeHomeTest
class RepoSearchViewModelTest: XCTestCase {
func test_init_doesntMakeAnyCall() {
let (_, repoSearchLoaderSpy) = makeSUT()
XCTAssertTrue(repoSearchLoaderSpy.messageCount == 0)
}
func test_onLoad_isCalled() {
let (sut, _) = makeSUT()
var onLoadCalled = false
sut.onload = {
onLoadCalled = true
}
sut.searchGit(queryString: "swift")
XCTAssertTrue(onLoadCalled)
}
func test_onSuccess_calledTheCorrectCallbackInTheRightOrder() {
let (sut,repoSearchLoaderSpy) = makeSUT()
var invocationOrder: [String] = []
sut.onload = {
invocationOrder.append("load")
}
let exp = expectation(description: "wait for completion")
sut.onSuccess = { data in
invocationOrder.append("success")
exp.fulfill()
}
sut.searchGit(queryString: "swift")
repoSearchLoaderSpy.completeSuccess()
wait(for: [exp], timeout: 1.0)
XCTAssertEqual(invocationOrder, ["load", "success"])
}
func test_onError_calledTheCorrectCallbackInTheRightOrder() {
let (sut,repoSearchLoaderSpy) = makeSUT()
var invocationOrder: [String] = []
sut.onload = {
invocationOrder.append("load")
}
let exp = expectation(description: "wait for completion")
sut.onError = { data in
invocationOrder.append("error")
exp.fulfill()
}
sut.searchGit(queryString: "swift")
repoSearchLoaderSpy.completeWithError()
wait(for: [exp], timeout: 1.0)
XCTAssertEqual(invocationOrder, ["load", "error"])
}
private func makeSUT() -> (sut: RepoSearchViewModel, repoSearchLoaderSpy: RepoSearchLoaderSpy) {
let repoSearchLoaderSpy = RepoSearchLoaderSpy()
let sut = RepoSearchViewModel(repoSearchLoader: repoSearchLoaderSpy)
return (sut,repoSearchLoaderSpy)
}
}
public class RepoSearchLoaderSpy: RepoSearchLoader {
private var message = [(queryString: String,page: Int, perPageNumber: Int, completion: (RepoSearchLoader.Result) -> Void)]()
public var messageCount: Int {
return message.count
}
public func getRepositoriesFromSearch(queryString: String, page: Int, perPageNumber: Int, completion: @escaping (RepoSearchLoader.Result) -> Void) {
message.append((queryString,page,perPageNumber,completion))
}
func completeWithError(index: Int = 0 ) {
message[index].completion(.failure(.init(message: "")))
}
func completeSuccess(index: Int = 0) {
message[index].completion(.success(RepoListResponse(totalCount: 0, incompleteResults: false, items: [Repository(name: "", owner: Owner(login: "", htmlUrl: "", avatarURL: ""), description: "")])))
}
}
<file_sep>/IntentTakeHomeTest/Search/api/RemoteRepoListResponse.swift
//
// RemoteRepoListResponse.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
struct RemoteRepoListResponse : Codable {
let totalCount: Int
let incompleteResults: Bool
let items: [RemoteRepository]
enum CodingKeys: String, CodingKey {
case totalCount = "total_count"
case incompleteResults = "incomplete_results"
case items
}
func toModel() -> RepoListResponse {
return RepoListResponse(totalCount: totalCount, incompleteResults: incompleteResults, items: items.map{ $0.toModel()})
}
}
struct RemoteRepository: Codable {
let name: String?
let owner: RemoteOwner?
let description: String?
enum CodingKeys: String, CodingKey {
case name
case owner
case description
}
func toModel() -> Repository {
return Repository(name: name, owner: owner?.toModel() , description: description)
}
}
struct RemoteOwner: Codable {
let login: String?
let avatarURL: String?
let htmlUrl: String?
enum CodingKeys: String, CodingKey {
case login
case avatarURL = "avatar_url"
case htmlUrl = "html_url"
}
func toModel() -> Owner {
return Owner(login: login,htmlUrl: htmlUrl, avatarURL: avatarURL)
}
}
<file_sep>/IntentTakeHomeTest/Search/presentation/ui/custom cell/SearchResultTableViewCell.swift
//
// SearchResultTableViewCell.swift
// IntentTakeHomeTest
//
// Created by Admin on 09/10/2021.
//
import Foundation
import UIKit
final class SearchResultTableViewCell: UITableViewCell {
let containerView: UIView = {
let v = UIView()
v.layer.cornerRadius = 8
v.layer.borderWidth = 1
v.layer.shadowOffset = CGSize(width: 0, height: 3)
v.layer.shadowRadius = 3
v.layer.shadowOpacity = 0.3
v.layer.shadowPath = UIBezierPath(roundedRect: v.bounds, byRoundingCorners: .allCorners, cornerRadii: CGSize(width: 8, height: 8)).cgPath
v.layer.shouldRasterize = true
v.layer.rasterizationScale = UIScreen.main.scale
v.translatesAutoresizingMaskIntoConstraints = false
return v
}()
let userImageView: UIImageView = {
let v = UIImageView()
v.translatesAutoresizingMaskIntoConstraints = false
return v
}()
var userNameLabel : UILabel = {
let v = UILabel()
v.translatesAutoresizingMaskIntoConstraints = false
v.text = "Nigeria"
v.textAlignment = .left
v.font = UIFont.systemFont(ofSize: 18, weight: .medium)
v.numberOfLines = 0
return v
}()
var titleLabel : UILabel = {
let v = UILabel()
v.translatesAutoresizingMaskIntoConstraints = false
v.text = "Nigeria"
v.textAlignment = .left
v.font = UIFont.systemFont(ofSize: 18, weight: .medium)
v.numberOfLines = 0
return v
}()
var descriptionLabel : UILabel = {
let v = UILabel()
v.translatesAutoresizingMaskIntoConstraints = false
v.text = "Nigeria"
v.textAlignment = .left
v.font = UIFont.systemFont(ofSize: 15, weight: .regular)
v.numberOfLines = 0
return v
}()
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
setupViewLayout()
userImageView.backgroundColor = .lightGray
descriptionLabel.numberOfLines = 0
userImageView.contentMode = .scaleAspectFill
isAccessibilityElement = true
}
override func layoutSubviews() {
super.layoutSubviews()
userImageView.layer.cornerRadius = 15
userImageView.layer.masksToBounds = true
}
private func setupViewLayout(){
addSubview(containerView)
containerView.addSubview(userImageView)
containerView.addSubview(userNameLabel)
containerView.addSubview(titleLabel)
containerView.addSubview(descriptionLabel)
NSLayoutConstraint.activate([
containerView.topAnchor.constraint(equalTo: topAnchor,constant: 10),
containerView.leadingAnchor.constraint(equalTo: leadingAnchor,constant: 10),
containerView.trailingAnchor.constraint(equalTo: trailingAnchor,constant: -10),
containerView.bottomAnchor.constraint(equalTo: bottomAnchor,constant: -10),
userImageView.topAnchor.constraint(equalTo: containerView.topAnchor,constant: 20),
userImageView.leadingAnchor.constraint(equalTo: containerView.leadingAnchor,constant: 20),
userImageView.heightAnchor.constraint(equalToConstant: 30),
userImageView.widthAnchor.constraint(equalToConstant: 30),
userNameLabel.leadingAnchor.constraint(equalTo: userImageView.trailingAnchor, constant: 15),
userNameLabel.trailingAnchor.constraint(equalTo: containerView.trailingAnchor, constant: -10),
userNameLabel.topAnchor.constraint(equalTo: userImageView.centerYAnchor, constant: -15),
titleLabel.leadingAnchor.constraint(equalTo: userImageView.leadingAnchor),
titleLabel.trailingAnchor.constraint(equalTo: userNameLabel.trailingAnchor),
titleLabel.topAnchor.constraint(equalTo: userImageView.bottomAnchor, constant: 15),
descriptionLabel.leadingAnchor.constraint(equalTo: titleLabel.leadingAnchor),
descriptionLabel.trailingAnchor.constraint(equalTo: titleLabel.trailingAnchor),
descriptionLabel.topAnchor.constraint(equalTo: titleLabel.bottomAnchor, constant: 10),
descriptionLabel.bottomAnchor.constraint(equalTo: containerView.bottomAnchor,constant: -15)
])
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func configureCell(with data: Repository?) {
ImageCacheManager.fetchImageData(from: data?.owner?.avatarURL ?? "") { imageData in
DispatchQueue.main.async { [weak self] in
let image: UIImage = UIImage(data: imageData as Data) ?? UIImage()
self?.userImageView.image = image
}
}
userNameLabel.text = data?.name
titleLabel.text = data?.owner?.login
descriptionLabel.text = data?.description
}
}
public extension UITableView {
func setEmptyMessage(_ message: String) {
let emptyView = UIView(frame: CGRect(x: self.center.x, y: self.center.y, width: self.bounds.size.width, height: self.bounds.size.height))
let messageLabel = UILabel()
messageLabel.translatesAutoresizingMaskIntoConstraints = false
messageLabel.text = message
messageLabel.textColor = .darkGray
messageLabel.numberOfLines = 0;
messageLabel.textAlignment = .center;
//messageLabel.font = Font().getBoldFont(ofSize: 24)
messageLabel.sizeToFit()
emptyView.addSubview(messageLabel)
messageLabel.centerYAnchor.constraint(equalTo: emptyView.centerYAnchor).isActive = true
messageLabel.centerXAnchor.constraint(equalTo: emptyView.centerXAnchor).isActive = true
self.backgroundView = emptyView
}
func restore() {
self.backgroundView = nil
}
}
<file_sep>/IntentTakeHomeTest/Search/presentation/composer/RepoSearchComposer.swift
//
// RepoSearchComposer.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
import UIKit
public final class RepoSearchComposer {
private init(){}
public static func composedWith(repoSearchLoader: RepoSearchLoader) -> RepoSearchViewController {
let viewModel = RepoSearchViewModel(repoSearchLoader: repoSearchLoader)
let viewController = RepoSearchViewController(viewModel: viewModel)
return viewController
}
}
<file_sep>/IntentTakeHomeTest/ImageCacheManager.swift
//
// ImageCacheManager.swift
// IntentTakeHomeTest
//
// Created by Admin on 09/10/2021.
//
import Foundation
import UIKit
class ImageCacheManager {
static let cache = NSCache<NSString, NSData>()
static func fetchImageData(from url: String, completion: @escaping (NSData) -> (Void)){
let key = NSString(string: url)
if let cachedVersion = cache.object(forKey: key) {
DispatchQueue.global(qos: .background).async {
completion(cachedVersion)
}
} else {
DispatchQueue.global(qos: .background).async {
guard let url = URL(string: url) else {return}
guard let ddata = try? Data(contentsOf: url) else {return}
let ns = NSData(data: ddata)
cache.setObject(ns, forKey: key)
completion(ns)
}
}
}
}
<file_sep>/IntentTakeHomeTest/Search/presentation/ui/RepoSearchViewController.swift
//
// RepoSearchViewController.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
import UIKit
final public class RepoSearchViewController: UIViewController {
var layout = RepoSearchLayout()
var viewModel: RepoSearchViewModel?
convenience init(viewModel: RepoSearchViewModel) {
self.init()
self.viewModel = viewModel
view.addSubview(layout)
}
public override func viewDidLoad() {
super.viewDidLoad()
view.addSubview(layout)
layout.translatesAutoresizingMaskIntoConstraints = false
setupView()
title = "Search GitHub"
bindView()
}
private func bindView() {
layout.didSelectRepo = { url in
if let link = URL(string: url) {
UIApplication.shared.open(link)
}
}
layout.searchQueryEntered = { [weak self] query in
guard let self = self else { return }
self.viewModel?.searchGit(queryString: query)
}
viewModel?.onload = { [weak self] in
guard let self = self else {return}
self.layout.isLoading = true
}
viewModel?.onSuccess = { [weak self] data in
guard let self = self else { return }
self.layout.isLoading = false
self.layout.data = data
}
viewModel?.onError = { [weak self] errorMessage in
guard let self = self else { return }
self.layout.isLoading = false
DispatchQueue.main.async {
self.showErrorAlert(message: errorMessage)
}
}
}
private func setupView() {
NSLayoutConstraint.activate([
layout.topAnchor.constraint(equalTo: view.safeAreaLayoutGuide.topAnchor),
layout.leadingAnchor.constraint(equalTo: view.leadingAnchor),
layout.trailingAnchor.constraint(equalTo: view.trailingAnchor),
layout.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
}
}
extension UIViewController {
func showErrorAlert(message: String) {
let alert = UIAlertController(title: "Message", message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "ok", style: .default, handler: { [weak self] _ in
self?.dismiss(animated: true, completion: nil)
}))
self.present(alert, animated: true, completion: nil)
}
}
<file_sep>/IntentTakeHomeTest/Search/presentation/ui/RepoSearchLayout.swift
//
// RepoSearchLayout.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
import UIKit
public final class RepoSearchLayout: UIView {
var searchQueryEntered: ((String) -> Void)?
var didSelectRepo: ((String) -> Void)?
var data : [Repository]?{
didSet {
DispatchQueue.main.async {
self.tableView.reloadData()
}
}
}
var isLoading: Bool = false {
didSet {
DispatchQueue.main.async {
if self.isLoading {
self.activityIndicator.startAnimating()
} else {
self.activityIndicator.stopAnimating()
}
}
}
}
var activityIndicator: UIActivityIndicatorView = UIActivityIndicatorView()
let searchInput : SearchInput = {
let v = SearchInput()
v.translatesAutoresizingMaskIntoConstraints = false
v.textInput.placeholder = "Enter github repo"
return v
}()
let tableView: UITableView = UITableView()
override init(frame: CGRect) {
super.init(frame: frame)
searchInput.textInput.delegate = self
arrangeLayout()
addLayout()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func arrangeLayout() {
addSubview(activityIndicator)
addSubview(searchInput)
addSubview(tableView)
tableView.translatesAutoresizingMaskIntoConstraints = false
activityIndicator.center = self.center
activityIndicator.translatesAutoresizingMaskIntoConstraints = false
tableView.delegate = self
tableView.dataSource = self
tableView.register(SearchResultTableViewCell.self, forCellReuseIdentifier: "\(SearchResultTableViewCell.self)")
tableView.estimatedRowHeight = 50
tableView.rowHeight = UITableView.automaticDimension
tableView.separatorStyle = .none
}
func addLayout() {
NSLayoutConstraint.activate([
activityIndicator.centerXAnchor.constraint(equalTo: self.centerXAnchor),
activityIndicator.centerYAnchor.constraint(equalTo: self.centerYAnchor),
searchInput.topAnchor.constraint(equalTo: topAnchor),
searchInput.leadingAnchor.constraint(equalTo: leadingAnchor),
searchInput.trailingAnchor.constraint(equalTo: trailingAnchor),
searchInput.heightAnchor.constraint(equalToConstant: 60),
tableView.topAnchor.constraint(equalTo: searchInput.bottomAnchor),
tableView.leadingAnchor.constraint(equalTo: leadingAnchor),
tableView.trailingAnchor.constraint(equalTo: trailingAnchor),
tableView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
}
}
extension RepoSearchLayout: UITextFieldDelegate {
public func textFieldDidChangeSelection(_ textField: UITextField) {
guard let text = textField.text else {
return
}
if text.count >= 5 {
searchQueryEntered?(text)
}
}
}
extension RepoSearchLayout: UITableViewDelegate, UITableViewDataSource {
public func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return data?.count ?? 0
}
public func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
guard let cell = tableView.dequeueReusableCell(withIdentifier: "\(SearchResultTableViewCell.self)", for: indexPath) as? SearchResultTableViewCell else {
return UITableViewCell()}
cell.configureCell(with: data?[indexPath.row])
return cell
}
public func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
tableView.deselectRow(at: indexPath, animated: true)
guard let url = data?[indexPath.row].owner?.htmlUrl else { return }
didSelectRepo?(url)
}
}
<file_sep>/IntentTakeHomeTest/Search/feature/RepoSearchLoader.swift
//
// RepoSearchLoader.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
public struct RepoSearchError : Error {
let message : String
}
public protocol RepoSearchLoader {
typealias Result = Swift.Result<RepoListResponse,RepoSearchError>
func getRepositoriesFromSearch(queryString : String, page : Int , perPageNumber : Int, completion : @escaping (Result) -> Void)
}
<file_sep>/IntentTakeHomeTest/Search/presentation/ui/component/SearchInput.swift
//
// SearchInput.swift
// IntentTakeHomeTest
//
// Created by Admin on 09/10/2021.
//
import Foundation
import UIKit
class SearchInput : UIView {
let textInput : UITextField = {
let v = UITextField()
v.translatesAutoresizingMaskIntoConstraints = false
v.font = UIFont.systemFont(ofSize: 16, weight: .medium)
v.returnKeyType = .done
v.textColor = UIColor.black
return v
}()
let wrapper : UIView = {
let v = UIView()
v.translatesAutoresizingMaskIntoConstraints = false
v.backgroundColor = .white
v.layer.borderWidth = 1
return v
}()
let searchIcon : UIImageView = {
let v = UIImageView()
v.translatesAutoresizingMaskIntoConstraints = false
v.contentMode = .scaleAspectFit
v.image = UIImage(named: "search icon")
return v
}()
override init(frame: CGRect) {
super.init(frame: frame)
addLayout()
arrangelayout()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
extension SearchInput {
func addLayout() {
addSubview(wrapper)
addSubview(searchIcon)
addSubview(textInput)
}
func arrangelayout() {
NSLayoutConstraint.activate([
wrapper.topAnchor.constraint(equalTo: topAnchor),
wrapper.leadingAnchor.constraint(equalTo: leadingAnchor),
wrapper.trailingAnchor.constraint(equalTo: trailingAnchor),
wrapper.bottomAnchor.constraint(equalTo: bottomAnchor),
searchIcon.leadingAnchor.constraint(equalTo: wrapper.leadingAnchor, constant: 16),
searchIcon.centerYAnchor.constraint(equalTo: wrapper.centerYAnchor),
searchIcon.heightAnchor.constraint(equalToConstant: 15),
searchIcon.widthAnchor.constraint(equalToConstant: 15),
textInput.leadingAnchor.constraint(equalTo: searchIcon.trailingAnchor, constant: 20),
textInput.trailingAnchor.constraint(equalTo: wrapper.trailingAnchor, constant: -10),
textInput.centerYAnchor.constraint(equalTo: searchIcon.centerYAnchor)
])
}
}
<file_sep>/IntentTakeHomeTest/Networking/ApiClient.swift
//
// ApiClient.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
public protocol ApiClient {
func get(url: String, params: [String:Any] ,completion: @escaping (Result<Data?, Error>) -> Void)
}
<file_sep>/IntentTakeHomeTest/Search/presentation/viewModel/RepoSearchViewModel.swift
//
// RepoSearchViewModel.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
public class RepoSearchViewModel {
let repoSearchLoader: RepoSearchLoader
init(repoSearchLoader: RepoSearchLoader){
self.repoSearchLoader = repoSearchLoader
}
var onload: (() -> Void)?
var onSuccess: (([Repository]) -> Void)?
var onError: ((String) -> Void)?
func searchGit(queryString: String) {
onload?()
repoSearchLoader.getRepositoriesFromSearch(queryString: queryString, page: 1, perPageNumber: 20) { [weak self] result in
switch result {
case let .success(model):
self?.onSuccess?(model.items)
case let .failure(error):
self?.onError?(error.message)
}
}
}
}
<file_sep>/IntentTakeHomeTest/SceneDelegate.swift
//
// SceneDelegate.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import UIKit
class SceneDelegate: UIResponder, UIWindowSceneDelegate {
var window: UIWindow?
func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) {
guard let scene = (scene as? UIWindowScene) else { return }
window = UIWindow(frame: scene.coordinateSpace.bounds)
window?.windowScene = scene
window?.rootViewController = viewController()
window?.makeKeyAndVisible()
}
func viewController() -> UIViewController {
let loader = RepoSearchListService(apiClient: URLSessionHttpClient())
let viewController = RepoSearchComposer.composedWith(repoSearchLoader: loader)
let navigationController = UINavigationController(rootViewController: viewController)
navigationController.navigationBar.barTintColor = .white
return navigationController
}
}
<file_sep>/IntentTakeHomeTest/Search/feature/RepoListResponse.swift
//
// RepoListResponse.swift
// IntentTakeHomeTest
//
// Created by Admin on 08/10/2021.
//
import Foundation
public struct RepoListResponse {
let totalCount: Int
let incompleteResults: Bool
let items: [Repository]
}
public struct Repository: Equatable {
let name: String?
let owner: Owner?
let description: String?
public static func == (lhs: Repository, rhs: Repository) -> Bool {
return lhs.name == rhs.name && lhs.owner == rhs.owner && lhs.description == rhs.description
}
}
public struct Owner: Equatable {
let login: String?
let htmlUrl: String?
let avatarURL: String?
}
|
9a1402ae564f561e84f5474d183639ee720354bf
|
[
"Swift"
] | 16 |
Swift
|
blavkjay/Intent_Test
|
94df2ac531222fe5246376ab5e3e6c813fa5b351
|
f53455ce848bb8bda48a600c2f66f42ea99f5e5d
|
refs/heads/master
|
<file_sep>import { Component, OnInit } from '@angular/core';
import { DemoComponent } from '../classes/demoComponent.class';
import { Dropdown } from '@fourjs/ng-library';
@Component({
selector: 'app-alert-demo',
templateUrl: './alert-demo.component.html',
styleUrls: ['./alert-demo.component.scss']
})
export class AlertDemoComponent extends DemoComponent implements OnInit {
hideAlert: boolean;
dropdownOption: Dropdown[] = [
{
label: 'info',
value: 'info'
},
{
label: 'success',
value: 'success'
},
{
label: 'error',
value: 'error'
},
{
label: 'warn',
value: 'warn'
}
];
dropdownValue: string = 'info';
ngOnInit(): void {
this.compSyntax = [
`<t-alert type="success" message="message........"></t-alert>`,
`<t-alert type="error" title="Title Error" message="message........"></t-alert>`,
`<t-alert type="info" title="Title Info" message="message........" [enableClose]="true"></t-alert>`,
`<t-alert type="info" title="Title Info" message="message........" [hideIcon]="true"></t-alert>`
];
this.options = {
name: 't-alert',
options: [
{
parameter: 'type',
type: `success | error | warn | info`,
desc: 'Alert type'
},
{
parameter: 'title',
type: `string`,
desc: 'Alert title'
},
{
parameter: 'message',
type: `string`,
desc: 'Alert message'
},
{
parameter: 'hideIcon',
type: `boolean`,
desc: 'Hide alert icon',
default: false
},
{
parameter: 'enableClose',
type: `boolean`,
desc: 'Dispaly close button',
default: false
}
],
methods: [
{
method: 'onCloseClick',
param: ['boolean'],
desc: `Event triggered on click of close button,
close button is just place holder actual logic need's to be implemented using this`
}
]
};
}
onClose(status: boolean): void {
this.hideAlert = status;
}
}
<file_sep>import { Component } from '@angular/core';
import Detect from 'tutility/detect';
@Component({
selector: 'app-utility',
templateUrl: './utility.component.html',
styleUrls: ['./utility.component.scss']
})
export class UtilityComponent {
deviceDetail: any;
detecBrowserHandler(): void {
const detect = new Detect();
this.deviceDetail = detect.parse(navigator.userAgent);
console.info(this.deviceDetail);
}
}
<file_sep>import { IOptions } from './../../common/demo-wrapper/demo-wrapper.component';
import { Component, ViewEncapsulation } from '@angular/core';
import { ToastService } from '@fourjs/ng-library';
@Component({
selector: 'app-toast',
templateUrl: './toast.component.html',
styleUrls: ['./toast.component.scss'],
encapsulation: ViewEncapsulation.None
})
export class ToastComponent {
options: IOptions = {
name: 'ToastService',
componentType: 'Service',
methods: [
{
method: 'show',
param: ['params: ToastParameters'],
desc: 'Display single toast message'
},
{
method: 'showAll',
param: ['params: ToastParameters[]'],
desc: 'Display multiple toast message'
}
],
options: [
{
parameter: 'title',
type: 'string',
desc: 'Notification title'
},
{
parameter: 'message',
type: 'string',
desc: 'Notification message'
},
{
parameter: 'type',
type: 'string',
default: 'success',
desc: 'Specifies type of notification to show'
},
{
parameter: 'timeOut',
type: 'number',
default: '4000',
desc: 'Timeout for toast auto close'
},
{
parameter: 'closeButton',
type: 'boolean',
default: false,
desc: 'Display close button'
},
{
parameter: 'id',
type: 'any',
desc: 'Identifier of the toast'
},
{
parameter: 'sticky',
type: 'boolean',
desc: 'Whether the toast should be closed automatically based on life property or kept visible.'
}
]
};
toastPositions: string[] = ['top-right', 'top-left', 'bottom-right', 'bottom-left', 'top-center', 'bottom-center', 'center'];
toastComp: string = '<t-toast baseZIndex="1050" position="top-right"></t-toast>';
parameterCol: any[] = [
{
label: 'Name',
value: 'name',
width: '20%'
},
{
label: 'Type',
value: 'type',
width: '20%'
},
{
label: 'Default',
value: 'default',
width: '20%'
},
{
label: 'Desc',
value: 'desc',
width: '40%'
}
];
paramData: any[] = [
{
name: 'position',
type: 'string',
default: 'top-right',
desc: `Position of the component, valid values are "top-right", "top-left",
"bottom-left", "bottom-right", "top-center, "bottom-center" and "center".`
},
{
name: 'baseZIndex',
type: 'number',
default: 0,
desc: 'Base zIndex value to use in layering.'
}
];
constructor(private toastService: ToastService) { }
showToast(type: 'success' | 'error' | 'warn' | 'info'): void {
this.toastService.show({ title: 'Title', message: 'message', type, closeButton: true });
}
multipleToast(): void {
this.toastService.showMultiple([
{ title: 'Title', message: 'message', type: 'success' },
{ title: 'Title', message: 'message', type: 'warn' },
{ title: 'Title', message: 'message', type: 'info' }
]);
}
}
<file_sep>import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { Common } from './demo/common/common.module';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { UtilityComponent } from './demo/utility/utility.component';
import { ReferenceComponent } from './demo/reference/reference.component';
import { HomeComponent } from './home/home.component';
import { ToastService } from '@fourjs/ng-library';
@NgModule({
declarations: [
AppComponent,
UtilityComponent,
ReferenceComponent,
HomeComponent
],
imports: [
BrowserModule,
BrowserAnimationsModule,
AppRoutingModule,
Common
],
providers: [
ToastService
],
bootstrap: [AppComponent]
})
export class AppModule { }
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { Routes, RouterModule } from '@angular/router';
import { Common } from '../common/common.module';
import { HttpComponent } from './http/http.component';
import { ToastComponent } from './toast/toast.component';
const routes: Routes = [
{ path: 'http', component: HttpComponent },
{ path: 'toast', component: ToastComponent },
{
path: '',
pathMatch: 'full',
redirectTo: 'http'
}
];
@NgModule({
declarations: [
HttpComponent,
ToastComponent
],
imports: [
CommonModule,
RouterModule.forChild(routes),
Common
]
})
export class ServicesModule { }
<file_sep>import { Component } from '@angular/core';
const packageJson = require('node_modules/@fourjs/ng-library/package.json');
@Component({
selector: 'app-setup',
templateUrl: './setup.component.html',
styleUrls: ['./setup.component.scss']
})
export class SetupComponent {
primeDependency: string;
angularCdkDependency: string;
constructor() {
console.info(packageJson);
try {
this.primeDependency = packageJson.peerDependencies.primeng;
this.angularCdkDependency = packageJson.peerDependencies['@angular/cdk'];
} catch (e) {
console.error(e);
}
}
}
<file_sep>import { Component } from '@angular/core';
import { IOptions } from '../../common/demo-wrapper/demo-wrapper.component';
@Component({
selector: 'app-checkbox',
templateUrl: './checkbox.component.html',
styleUrls: ['./checkbox.component.scss']
})
export class CheckboxComponent {
compSyntax: string[] =
[
`<t-checkbox name="checkbox" label="Checkbox" [(value)]="modelValue"></t-checkbox>`,
`<t-checkbox label="Checkbox" disabled="true"></t-checkbox>`
];
options: IOptions = {
name: 't-checkbox',
options: [
{
parameter: 'label',
type: 'string',
desc: 'Checkbox label'
},
{
parameter: 'name',
type: 'string',
desc: 'Checkbox name'
},
{
parameter: 'disabled',
default: 'false',
type: 'boolean',
desc: 'Disable component'
},
{
parameter: 'readonly',
default: 'false',
type: 'boolean',
desc: 'Component cannot be edited'
}
]
};
modelValue: boolean = true;
}
<file_sep><div class="home-container">
<div class="top-section">
<div class="content">
<div class="text1">The Custom Component Library</div>
<div class="text2">for Angular</div>
</div>
<button class="get-started" routerLink="/gettingStarted/setup">Get Started</button>
</div>
<div class="body-content">
<h5>Why NgLibrary</h5>
<p>NgLibrary is a collection of UI components for Angular. All Components are open source and free to use under
MIT License.
</p>
</div>
</div><file_sep>const gulp = require('gulp'),
del = require('del');
const buildPath = '../ui-lib/dist/ui-library-documentation';
function moveBuildFolder() {
return gulp.src(['dist/ui-library-demo/**.*'])
.pipe(gulp.dest(buildPath));
}
function clean() {
return del([buildPath], { force: true });
}
exports.default = gulp.series(clean, moveBuildFolder);
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { DemoWrapperComponent } from './demo-wrapper/demo-wrapper.component';
import { NgLibrary } from '@fourjs/ng-library';
import { HttpClientModule } from '@angular/common/http';
import { FormsModule } from '@angular/forms';
import { CommonNoteComponent } from './common-note/common-note.component';
@NgModule({
declarations: [
DemoWrapperComponent,
CommonNoteComponent
],
imports: [
CommonModule,
NgLibrary,
FormsModule,
HttpClientModule
],
exports: [
DemoWrapperComponent,
CommonNoteComponent,
NgLibrary,
FormsModule,
HttpClientModule
]
})
export class Common { }
<file_sep>import { Component } from '@angular/core';
import { IOptions } from '../../common/demo-wrapper/demo-wrapper.component';
import { Dropdown } from '@fourjs/ng-library';
@Component({
selector: 'app-tooltip-demo',
templateUrl: './tooltip-demo.component.html',
styleUrls: ['./tooltip-demo.component.scss']
})
export class TooltipDemoComponent {
compSyntax: string[] =
// tslint:disable-next-line: max-line-length
[`<div tTooltip="Content to be shown in the tooltip" [tooltipDisabled]="false" [tooltipAnimation]="true"> Element on which tooltip is added </div>`];
options: IOptions = {
name: 'tooltip',
componentType: 'Directive',
options: [
{
parameter: 'tooltip',
type: 'string | TooltipContent',
desc: 'Content to be displayed inside tooltip'
},
{
parameter: 'tooltipDisabled',
type: 'boolean',
desc: 'Disable tooltip'
},
{
parameter: 'tooltipAnimation',
type: 'boolean',
default: true,
desc: 'Enable animation for tooltip'
},
{
parameter: 'tooltipPlacement',
type: 'top | bottom | left | right',
default: 'bottom',
desc: 'Tooltip placement'
}
]
};
tooltipOption: Dropdown[] = [
{ label: 'top', value: 'top' },
{ label: 'right', value: 'right' },
{ label: 'bottom', value: 'bottom' },
{ label: 'left', value: 'left' }
];
tooltipPosition: string = 'top';
}
<file_sep>export const APPNAME = 'UI Library';
<file_sep>import { Component } from '@angular/core';
declare var require: any;
const packageJson = require('../../../../package.json');
@Component({
selector: 'app-reference',
templateUrl: './reference.component.html',
styleUrls: ['./reference.component.scss']
})
export class ReferenceComponent {
libLists: { name: string, url: string, icon: string }[] = [];
constructor() {
let angularversion = packageJson.dependencies['@angular/core'];
angularversion = angularversion.replace(/[~^]/gi, '');
this.libLists = [
{
name: `Angular (${angularversion})`,
url: 'https://angular.io/api',
icon: 'fa-font'
},
{
name: 'Grid system',
url: 'https://nimjetushar.github.io/grid-css/',
icon: 'fa-th'
},
{
name: 'Font awesome',
url: 'https://fontawesome.com/v4.7.0/icons/',
icon: 'fa-font-awesome'
},
{
name: 'ngx-toastr',
url: 'https://www.npmjs.com/package/ngx-toastr',
icon: 'fa-comments'
}
];
}
}
<file_sep>import { Routes, RouterModule } from '@angular/router';
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { Common } from '../common/common.module';
import { BadgeComponent } from './badge/badge.component';
import { ButtonComponent } from './button/button.component';
import { DynamicFieldsDemoComponent } from './dynamic-fields/dynamic-fields.component';
import { CheckboxComponent } from './checkbox/checkbox.component';
import { RadioDemoComponent } from './radio-demo/radio-demo.component';
import { DropdownDemoComponent } from './dropdown-demo/dropdown-demo.component';
import { ScrollTopDemoComponent } from './scroll-top-demo/scroll-top-demo.component';
import { AlertDemoComponent } from './alert-demo/alert-demo.component';
const routes: Routes = [
{ path: 'badge', component: BadgeComponent },
{ path: 'button', component: ButtonComponent },
{ path: 'dynamicFields', component: DynamicFieldsDemoComponent },
{ path: 'checkbox', component: CheckboxComponent },
{ path: 'radio', component: RadioDemoComponent },
{ path: 'dropdown', component: DropdownDemoComponent },
{ path: 'navigateTop', component: ScrollTopDemoComponent },
{ path: 'alert', component: AlertDemoComponent },
{
path: '',
pathMatch: 'full',
redirectTo: 'alert'
}
];
@NgModule({
declarations: [
BadgeComponent,
ButtonComponent,
DynamicFieldsDemoComponent,
CheckboxComponent,
RadioDemoComponent,
DropdownDemoComponent,
ScrollTopDemoComponent,
AlertDemoComponent
],
imports: [
CommonModule,
RouterModule.forChild(routes),
Common
]
})
export class ComponentsModule { }
<file_sep>import { Component } from '@angular/core';
import { IOptions } from '../../common/demo-wrapper/demo-wrapper.component';
@Component({
selector: 'app-http',
templateUrl: './http.component.html',
styleUrls: ['./http.component.scss']
})
export class HttpComponent {
options: IOptions = {
name: 'HttpService',
componentType: 'Service',
methods: [
{
method: 'getRequest',
param: ['url: string', 'param: any (optional)'],
desc: 'GET request takes url and param'
},
{
method: 'postRequest',
param: ['url: string', 'param: any'],
desc: 'POST request takes url and param'
},
{
method: 'putRequest',
param: ['url: string', 'param: any'],
desc: 'PUT request takes url and param'
},
{
method: 'deleteRequest',
param: ['url: string', 'param: any (optional)'],
desc: 'DELETE request takes url and param'
}
]
};
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { IOptions } from '../../common/demo-wrapper/demo-wrapper.component';
@Component({
selector: 'app-badge',
templateUrl: './badge.component.html',
styleUrls: ['./badge.component.scss'],
})
export class BadgeComponent implements OnInit {
compSyntax: string[];
options: IOptions;
ngOnInit(): void {
this.compSyntax = [
`<t-badge badge="fa-home"></t-badge>`,
`<t-badge badge="fa-font-awesome"></t-badge>`
];
this.options = {
name: 't-badge',
options: [
{
parameter: 'badge',
type: 'string',
desc: 'Icon class to render badge'
}
]
};
}
}
<file_sep>import {
Component, Input, OnInit, ViewEncapsulation,
ViewChild, ElementRef, AfterViewInit
} from '@angular/core';
import { ToastService } from '@fourjs/ng-library';
declare var PR: any;
export interface IColumn {
label: string;
value: string;
class?: string;
width?: string;
}
interface IDocOptions {
parameter: string;
desc: string;
type: string;
default?: string | boolean;
}
interface IMethodOptions {
method: string;
param: string[];
desc: string;
}
export interface IOptions {
name: string;
componentType?: 'Service' | 'Component' | 'Directive';
options?: IDocOptions[];
methods?: IMethodOptions[];
}
@Component({
// tslint:disable-next-line:component-selector
selector: 'demo-wrapper',
templateUrl: './demo-wrapper.component.html',
styleUrls: ['./demo-wrapper.component.scss'],
encapsulation: ViewEncapsulation.None
})
export class DemoWrapperComponent implements OnInit, AfterViewInit {
@Input() header: string;
@Input() code: string[];
@Input() set options(docData: IOptions) {
if (docData) {
this.enableOptions = true;
this.name = docData.name;
this.docOptions = docData.options;
this.methodOptions = docData.methods;
if (docData.componentType) {
this.componentType = docData.componentType;
}
}
}
@ViewChild('output', { static: true }) outputWrapper: ElementRef;
@ViewChild('ref', { static: true }) refWrapper: ElementRef;
isDemoContainer: boolean;
codeEle: { content: string; class: string }[];
enableOutput: boolean = true;
enableDoc: boolean = true;
enableOptions: boolean;
name: string;
docOptions: IDocOptions[];
methodOptions: IMethodOptions[];
componentType: string = 'Component';
docColumns: IColumn[] = [
{ label: 'Name', value: 'parameter', width: '20%' },
{ label: 'Type', value: 'type', width: '20%' },
{ label: 'Default', value: 'default', width: '20%' },
{ label: 'Description', value: 'desc', width: '40%' }
];
methodColumns: IColumn[] = [
{ label: 'Name', value: 'method', width: '20%' },
{ label: 'Parameters', value: 'param', width: '20%' },
{ label: 'Description', value: 'desc', width: '60%' }
];
constructor(private toast: ToastService) { }
ngOnInit(): void {
this.enableOutput = !this.outputWrapper.nativeElement.childNodes.length;
this.enableDoc = !this.refWrapper.nativeElement.childNodes.length;
}
ngAfterViewInit(): void {
PR.prettyPrint();
}
copyToClipboard(): void {
const copyEle = document.createElement('input');
document.body.appendChild(copyEle);
const codeSample = this.code.reduce((accumulator, currentValue) => accumulator + currentValue);
copyEle.setAttribute('value', codeSample);
copyEle.select();
document.execCommand('copy');
document.body.removeChild(copyEle);
this.toast.show({ message: 'Copied...', type: 'info' });
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { Routes, RouterModule } from '@angular/router';
import { Common } from '../common/common.module';
import { DeveloperComponent } from './developer/developer.component';
import { SetupComponent } from './setup/setup.component';
const routes: Routes = [
{ path: 'developer', component: DeveloperComponent },
{ path: 'setup', component: SetupComponent },
{
path: '',
pathMatch: 'full',
redirectTo: 'setup'
}
];
@NgModule({
declarations: [
DeveloperComponent,
SetupComponent
],
imports: [
CommonModule,
RouterModule.forChild(routes),
Common
]
})
export class GettingStartedModule { }
<file_sep>import { Common } from 'src/app/demo/common/common.module';
import { RouterTestingModule } from '@angular/router/testing';
export const comModules = [Common, RouterTestingModule];
<file_sep>import { Component, ViewEncapsulation } from '@angular/core';
import { IOptions } from '../../common/demo-wrapper/demo-wrapper.component';
@Component({
selector: 'app-button',
templateUrl: './button.component.html',
styleUrls: ['./button.component.scss'],
encapsulation: ViewEncapsulation.None
})
export class ButtonComponent {
compSyntax: string[] = ['<t-button label="Home" type="primary" badge="fa-home"></t-button>'];
options: IOptions = {
name: 't-button',
options: [
{
parameter: 'label',
type: 'string',
desc: 'Button label'
},
{
parameter: 'type',
type: 'string',
desc: 'Type of button, valid values are primary, secondary, tertiary',
default: 'primary'
},
{
parameter: 'badge',
type: 'string',
desc: 'Icon class to render badge'
},
{
parameter: 'badgePosition',
type: 'string',
default: 'left',
desc: 'Badge position in button, valid values are right, left'
},
{
parameter: 'isLarge',
type: 'boolean',
default: false,
desc: 'Increases button size when true'
},
{
parameter: 'disabled',
default: 'false',
type: 'boolean',
desc: 'Disabled button'
}
]
};
clickHandler(): void {
alert('button clicked !!!!');
}
}
<file_sep># Ui Library API documentation
[](https://nimjetushar.github.io/ui-lib/)
<br>
[](https://semaphoreci.com/nimjetushar/ui-library-document-website)
[](https://www.codacy.com/manual/tushar/ui-library-document-website?utm_source=github.com&utm_medium=referral&utm_content=nimjetushar/ui-library-document-website&utm_campaign=Badge_Grade)
[](https://dependabot.com)
<br>
[](https://github.com/nimjetushar/ui-library-document-website/blob/master/LICENSE)

Documentation for UI Library. Please visit site [Link](https://nimjetushar.github.io/ui-lib/) for details.
<file_sep>import { Component } from '@angular/core';
import { IOptions } from '../../common/demo-wrapper/demo-wrapper.component';
@Component({
selector: 'app-radio-demo',
templateUrl: './radio-demo.component.html',
styleUrls: ['./radio-demo.component.scss']
})
export class RadioDemoComponent {
groupVal: string = 'groupA';
individualVal: boolean;
compSyntax: string[] = [`<t-radio label="Radio" name="radio" [(model)]="model"></t-radio>`];
options: IOptions = {
name: 't-radio',
options: [
{
parameter: 'label',
type: 'string',
desc: 'Radio button label'
},
{
parameter: 'name',
type: 'string',
desc: 'Radio button name'
},
{
parameter: 'disabled',
default: 'false',
type: 'boolean',
desc: 'Disable component'
}
]
};
clearRadio(): void {
this.individualVal = undefined;
}
}
<file_sep>import { TestBed, async } from '@angular/core/testing';
import { RouterTestingModule } from '@angular/router/testing';
import { AppComponent } from './app.component';
import { Common } from './demo/common/common.module';
import { NgLibrary } from '@fourjs/ng-library';
import { sort, isDefined, deepCopy } from 'tutility/utils';
describe('AppComponent', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [
RouterTestingModule,
NgLibrary,
Common
],
declarations: [
AppComponent
],
}).compileComponents();
}));
it('should create the app', () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.debugElement.componentInstance;
expect(app).toBeTruthy();
});
it('should test Utilility methods', () => {
expect(sort).toBeTruthy();
expect(deepCopy).toBeTruthy();
expect(isDefined).toBeTruthy();
});
it('should test sort method of UTILITY', () => {
const list = [
{ name: 'z - test item', price: '99.99', priority: 0, reviews: 309, rating: 2 },
{ name: 'z - test item', price: '1.99', priority: 0, reviews: 11, rating: 0.5 },
{ name: 'y - test item', price: '99.99', priority: 1, reviews: 99, rating: 1 },
{ name: 'y - test item', price: '0', priority: 1, reviews: 394, rating: 3.5 },
{ name: 'x - test item', price: '0', priority: 2, reviews: 249, rating: 0.5 }
];
let sortedList = sort(list, ['reviews', 'price']);
expect(sortedList[0].reviews).toEqual(11);
expect(sortedList[0].price).toEqual('1.99');
sortedList = sort(list, ['price', 'reviews']);
expect(sortedList[0].reviews).toEqual(249);
expect(sortedList[0].price).toEqual('0');
});
});
<file_sep>import { Component, ViewChild } from '@angular/core';
import { IOptions as DemoOptions } from '../../common/demo-wrapper/demo-wrapper.component';
import {
DynamicFieldsComponent, DynamicFieldButtonOptions, DynamicFields,
DynamicFieldDropdownOptions, DynamicFieldDisabledOptions
} from '@fourjs/ng-library';
@Component({
selector: 'app-dynamic-fields',
templateUrl: './dynamic-fields.component.html',
styleUrls: ['./dynamic-fields.component.scss']
})
export class DynamicFieldsDemoComponent {
@ViewChild(DynamicFieldsComponent, { static: true }) dyFieldComp: DynamicFieldsComponent;
compSyntax: string[] =
// tslint:disable-next-line: max-line-length
[`<t-dynamic-fields [fields]="field" [data]="data" [dropdownOptions]="dropdownOptions" [disabledFields]="disabledOptions" [buttonOptions]="buttonOptions" (primaryHandler)="onSearch($event)" (secondaryHandler)="onReset($event)" ></t-dynamic-fields>`];
options: DemoOptions = {
name: 't-dynamic-fields',
options: [
{
parameter: 'fields',
type: 'DynamicFields[]',
desc: 'Array of object to display fields'
},
{
parameter: 'data',
type: 'DynamicFieldDataModel',
desc: 'Object which initialize the fields with default value.'
},
{
parameter: 'dropdownOptions',
type: 'DynamicFieldDropdownOptions',
desc: 'Dropdown options used to initalize dropdown if present in fields'
},
{
parameter: 'disabledFields',
type: 'DynamicFieldDisabledOptions',
desc: 'Disables field if specified true'
},
{
parameter: 'disableDefaultAction',
type: 'boolean',
desc: 'Hides default action buttons'
},
{
parameter: 'buttonOptions',
type: 'DynamicFieldButtonOptions',
desc: 'Customize button labels and its properties.'
}
],
methods: [
{
method: 'primaryHandler',
param: ['DynamicFieldDataModel'],
desc: 'Emits fields data to parent component'
},
{
method: 'secondaryHandler',
param: ['DynamicFieldDataModel'],
desc: 'Emits fields data to parent component'
}
]
};
field: DynamicFields[] = [
{
label: 'Car Name',
type: 'text',
model: 'name'
},
{
label: 'New model',
type: 'checkbox',
model: 'isNew'
},
{
label: 'Select Brand',
type: 'select',
model: 'brand'
}
];
dropdownOptions: DynamicFieldDropdownOptions = {
brand: [
{
label: 'Maruti',
value: 'maruti'
},
{
label: 'Hyundai',
value: 'hyundai'
},
{
label: 'Ford',
value: 'ford'
}
]
};
disabledOptions: DynamicFieldDisabledOptions = {
isNew: true
};
buttonOptions: DynamicFieldButtonOptions = {
primaryLabel: 'Submit'
};
output: any = {
isNew: true
};
displayOutput: boolean;
onSearch(param: { [key: string]: any }): void {
console.info(param);
this.displayOutput = true;
this.output = Object.assign({}, param);
}
onReset(param: { [key: string]: any }): void {
console.info(param);
this.output = {};
this.dyFieldComp.reset();
}
}
<file_sep>import { IOptions } from '../../common/demo-wrapper/demo-wrapper.component';
export class DemoComponent {
compSyntax: string[];
options: IOptions;
}
|
0f2d6b71420fff71dd0321b0a5ecee152f439d00
|
[
"JavaScript",
"TypeScript",
"HTML",
"Markdown"
] | 25 |
TypeScript
|
nimjetushar/ui-library-document-website
|
60cd55e1ff5ab647f697398aacb96fd63877f5d0
|
255488e8c31af5f6dd83b80e5fc9e82c41587467
|
refs/heads/master
|
<repo_name>MehdiHennaoui/symfony-tuto<file_sep>/src/Url/MyBundle/Controller/DefaultController.php
<?php
namespace Url\MyBundle\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route;
class DefaultController extends Controller
{
/**
* @Route("/testurl", name="test_url")
*/
public function indexAction()
{
$tab = ['Matthieu', 'Loic', 'Elodie'];
$name = $tab[rand(0, count($tab) - 1)];
return $this->render('UrlMyBundle:Default:index.html.twig', array(
"name"=>$name
));
}
/**
* @Route("/testurl/about", name="test_url_about")
*/
public function aboutAction()
{
return $this->render('UrlMyBundle:Default:about.html.twig');
}
/**
* @Route("/testurl/info/{name}", name="test_url_info")
*/
public function infoAction($name)
{
if($name == 'toto') {
return $this->redirectToRoute("test_url");
}
return $this->render('UrlMyBundle:Default:info.html.twig', array(
"name"=>$name
));
}
}
<file_sep>/src/Contact/Bundle/ContactBundle.php
<?php
namespace Contact\Bundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class ContactBundle extends Bundle
{
}
<file_sep>/URL/URL/Bundle/URLBundle.php
<?php
namespace URL\Bundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class URLBundle extends Bundle
{
}
<file_sep>/README.md
symfony-tuto
============
A Symfony project created on August 10, 2017, 9:58 am.
<file_sep>/src/AppBundle/Controller/FirstController.php
<?php
namespace AppBundle\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route;
class FirstController extends Controller
{
/**
* @Route("/", name="homepage")
*/
public function indexAction() {
return $this->render('AppBundle:First:index.html.twig');
}
/**
* @Route("/about", name="about")
*/
public function aboutAction()
{
$name = "serge";
$numero = mt_rand(1,50);
return $this->render('AppBundle:First:about.html.twig', [
"name" => $name,
"numero" => $numero
]);
}
/**
* @Route("/info/{id}/{name}", name="info", requirements={"id":"\d+", "name":"[a-z]+"})
*
*/
public function infoAction($id=0, $name="")
{
return $this->render('AppBundle:First:info.html.twig', array(
"id"=>$id,
"name"=> $name
));
}
/**
* * @Route("/list/{name}", name="list")
*/
public function listAction($name) {
// créer un tableau de termes
//
//passer dans la vue
$data = ["pseudo" =>"serge","nom" => "nico","surnom" => "flo", 2 => "mehdi"];
return $this->render('AppBundle:First:list.html.twig', array(
"data" => $data,
"name" => $name
));
}
}
|
d9a7609a3d65d61e674ae5920e04de08da503ccc
|
[
"Markdown",
"PHP"
] | 5 |
PHP
|
MehdiHennaoui/symfony-tuto
|
4efce7b837c67ca82c71ca1ad2f2b8ab29311e42
|
e32c4109b0b044cf875f025e888a79b014bcfe2e
|
refs/heads/master
|
<file_sep>package org.jnit.designpatterns.prototypeCreation;
import java.util.ArrayList;
import java.util.List;
public class EmployeePrototypeCreation implements Cloneable {
private List<String> empList;
public EmployeePrototypeCreation() {
empList = new ArrayList<String>();
}
public EmployeePrototypeCreation(List<String> list) {
this.empList = list;
}
public void loadData() {
empList.add("one");
empList.add("two");
empList.add("three");
empList.add("four");
empList.add("five");
}
public List<String> getEmpList() {
return empList;
}
@Override
public Object clone() throws CloneNotSupportedException {
List<String> temp = new ArrayList<String>();
for (String s : this.getEmpList()) {
temp.add(s);
}
return new EmployeePrototypeCreation(temp);
}
}
<file_sep>package org.jnit.designpatterns.dependencyInjectionTest;
import org.jnit.designpatterns.dependencyInjectionMisileneousCreation.Consumer;
import org.jnit.designpatterns.dependencyInjectionMisileneousCreation.EmailServiceInjector;
import org.jnit.designpatterns.dependencyInjectionMisileneousCreation.MessageServiceInjector;
import org.jnit.designpatterns.dependencyInjectionMisileneousCreation.SMSServiceInjector;
public class MyMessageDiTest {
public static void main(String args[]) {
String msg = "Hello World";
String rec = "<EMAIL>";
String recPhone = "6784642373";
MessageServiceInjector injector = null;
Consumer app = null;
injector = new EmailServiceInjector();
app = injector.getConsumer();
app.processMessage(rec, msg);
injector = new SMSServiceInjector();
app = injector.getConsumer();
app.processMessage(recPhone, msg);
}
}
<file_sep>package org.jnit.designpatterns.factoryAbstract;
public class ECEBooks extends Books {
private String bookName;
private String author;
private String title;
public ECEBooks(String bookName, String author, String title) {
this.bookName = bookName;
this.author = author;
this.title = title;
}
@Override
public String getBookName() {
return bookName;
}
@Override
public String getAuthor() {
return author;
}
@Override
public String getTitle() {
return title;
}
}
<file_sep>package org.jnit.designpatterns.abstractFactoryTest;
import org.jnit.designpatterns.factoryAbstract.Books;
import org.jnit.designpatterns.factoryAbstract.BooksFactory;
import org.jnit.designpatterns.factoryAbstract.CSCBooksFactory;
import org.jnit.designpatterns.factoryAbstract.ECEBooksFactory;
import org.jnit.designpatterns.factoryAbstract.ITBooksFactory;
public class AbstractDesignFactoryTest {
public static void main(String[] args){
testAbstractDesginFactory();
}
private static void testAbstractDesginFactory() {
Books CSE = BooksFactory.getBook(new CSCBooksFactory("c programming", "s chandh", "let's c"));
Books IT = BooksFactory.getBook(new ITBooksFactory("Information Technology", "s chandh", "growing technology"));
Books ECE = BooksFactory.getBook(new ECEBooksFactory("EDC", "SNR", "Electronic design system"));
System.out.println(CSE.getAuthor());
System.out.println(IT.getBookName());
System.out.println(ECE.getTitle());
}
}
<file_sep>package org.jnit.designpatterns.factoryAbstract;
public class ECEBooksFactory implements BooksAbstractFactory {
private String bookName;
private String author;
private String title;
public ECEBooksFactory(String bookName, String author, String title) {
this.bookName = bookName;
this.author = author;
this.title = title;
}
public Books createBooks() {
// TODO Auto-generated method stub
return new ECEBooks(bookName, author, title);
}
}
<file_sep>package org.jnit.designpatterns.factoryAbstract;
public class BooksFactory {
public static Books getBook(BooksAbstractFactory factory){
return factory.createBooks();
}
}
<file_sep>package org.jnit.designpatterns.factoryAbstract;
public class ITBooksFactory implements BooksAbstractFactory {
private String bookName;
private String author;
private String title;
public ITBooksFactory(String bookName, String author, String title) {
this.bookName = bookName;
this.author = author;
this.title = title;
}
public Books createBooks() {
return new ITBooks(bookName, author, title);
}
}
<file_sep>package org.jnit.designpatterns.dependencyInjectionExampleForSpring;
import java.util.List;
public interface Departments {
List<String> findEmployeeDetails();
}<file_sep>package org.jnit.designpatterns.dependencyInjectionMisileneousCreation;
public class SendMessageServiceImpl implements MessageService{
public void sendMessage(String msg, String rec) {
System.out.println("sms sent to rec"+rec+"sms message"+msg);
}
}
<file_sep>package org.jnit.designpatterns.dependencyInjectionExampleForSpring;
import java.util.ArrayList;
import java.util.List;
public class DevOpsDepartment implements Departments{
public static List<String> employees = new ArrayList<String>();
static {
employees.add("nitish devOps JNIT");
employees.add("chris devOps JNIT");
employees.add("charan buildAndRelease SINIST");
employees.add("shiva releaseDepartment JNIT");
}
/* (non-Javadoc)
* @see org.jnit.designpatterns.dependencyInjectionExampleForSpring.Departments#findEmployeeDetails()
*/
public List<String> findEmployeeDetails() {
return employees;
}
}
<file_sep>package org.jnit.designpatterns.withoutDependencyInjectionMisileneousCreation;
public class EmailService {
public void sendEmail(String msg, String rec){
System.out.println("Message is "+msg+"Receiver is "+rec);
}
}
<file_sep>package org.jnit.designpatterns.dependencyInjectionMisileneousCreation;
public class EmailServiceImpl implements MessageService{
public void sendMessage(String msg, String rec) {
System.out.println("Email sent to rec"+rec+"Email message"+msg);
}
}
<file_sep>package org.jnit.designpatterns.factoryAbstract;
public class CSCBooks extends Books {
private String bookName;
private String author;
private String title;
public CSCBooks(String bookName, String author, String title) {
this.bookName = bookName;
this.author = author;
this.title = title;
}
@Override
public String getBookName() {
return bookName;
}
@Override
public String getAuthor() {
return author;
}
@Override
public String getTitle() {
return title;
}
}
<file_sep>package org.jnit.designpatterns.factoryCreatetionTest;
import org.jnit.designpatterns.factoryCreation.Computer;
import org.jnit.designpatterns.factoryCreation.ComputerFactory;
public class ComputerFactoryTest {
public static void main(String args[]) {
Computer pc = ComputerFactory.getComputer("pc", "8GB", "500GB", "2.0Gz");
Computer server = ComputerFactory.getComputer("server", "16GB", "1TB", "3.5Gz");
System.out.println("PC Configuration"+pc);
System.out.println("Server Configuration"+server);
}
}
<file_sep>package org.jnit.designpatterns.factoryAbstract;
public interface BooksAbstractFactory {
public Books createBooks();
}
|
1881a37a974ff82852b0c15a2e3df481426a693d
|
[
"Java"
] | 15 |
Java
|
NitishVanaparthi/designPattersJava
|
7fda3f40de39509b4d78a950b523fe0b2dfbdcc7
|
529ecbb9b1e635bbc7411586c5cd34e7268bd4e7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.