branch_name
stringclasses 15
values | target
stringlengths 26
10.3M
| directory_id
stringlengths 40
40
| languages
sequencelengths 1
9
| num_files
int64 1
1.47k
| repo_language
stringclasses 34
values | repo_name
stringlengths 6
91
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
| input
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|
refs/heads/main | <file_sep># sistema_frete
Estudo de caso para a disciplina de Engenharia de Software, utilizando Bean Validation, Junit e Spring Boot
| fa14751869033ea6fb223b3b856f29eb5c4b9495 | [
"Markdown"
] | 1 | Markdown | vivialmeida/sistema_frete | e746915afe1895c3eeb1fb9ee7e45a331766685b | 478b7b315385637fb2295e15273a9ba163875ad3 | |
refs/heads/master | <file_sep>docker network create --scope=swarm proxy
echo 'Created docker proxy network'
echo 'Running chmod on acme.json'
chmod 600 ./acme.json
echo 'Starting services...'
docker stack deploy traefik --compose-file docker-compose.yml
echo 'All services up...i think...'
<file_sep>echo $(htpasswd -nb $1 $2) | sed -e s/\\$/\\$\\$/g
| 3503f6b3c5b126633ff46e6433607adbdcdb02c3 | [
"Shell"
] | 2 | Shell | dodevs/swarm-portainer-traefik | e4b815c7776f43b067bc033815d8deef62875f67 | 5a55699b047c8691f15a055aa7405fa775fcd292 | |
refs/heads/main | <repo_name>bedairhassan/Minimalistic-Social-Media<file_sep>/src/JS/tools.js
let shortDate = ()=>{
let date = new Date();
date = date+''
date = date.split(" ")
return date[0]+' '+date[1]+' '+date[2]+' '+date[3]+' '+date[4]
}
// export const shortDate;
module.exports={
shortDate
}<file_sep>/src/JS/firebase.js
import firebase from 'firebase/app'
import 'firebase/firestore'
var firebaseConfig = {
apiKey: "<KEY>",
authDomain: "facebook-9cc04.firebaseapp.com",
projectId: "facebook-9cc04",
storageBucket: "facebook-9cc04.appspot.com",
messagingSenderId: "542807902668",
appId: "1:542807902668:web:cd2a0d2b9b602ee3243569"
};
firebase.initializeApp(firebaseConfig);
export const db = firebase.firestore()<file_sep>/src/visuals/Admin/DeleteUser/tools.js
// import { db } from '../../../JS/firebase'
const deleteUser = (db,user) => {
db.collection("posts").onSnapshot((snap) => {
snap = snap.docs
.map((snap) => snap.data())
.filter((snap) => snap.user == user) // filter by selected user
.map((snap) => snap.id) // fetch array of ids
.map((doc) => db.collection("posts").doc(doc).delete());
});
db.collection("sign").onSnapshot((snap) => {
snap = snap.docs
.map((snap) => snap.data())
.filter((snap) => snap.username == user) // filter by selected user
.map((snap) => snap.username)
.map((doc) => db.collection("sign").doc(doc).delete());
});
db.collection("whisper").onSnapshot((snap) => {
snap = snap.docs
.map((snap) => snap.data())
.filter((snap) => snap.sender == user || snap.receiver == user) // filter by selected user
.map((snap) => snap.id)
.map((doc) => db.collection("whisper").doc(doc).delete());
});
db.collection("friends").onSnapshot((snap) => {
snap = snap.docs
.map((snap) => snap.data())
.filter((snap) => InList(snap.who.split(","), user)) // filter by selected user
.map((snap) => snap.who)
.map((doc) => db.collection("friends").doc(doc).delete());
});
};
module.exports = {
deleteUser
}<file_sep>/src/store/routeStore.js
import { writable } from 'svelte/store';
const WHISPER = 'whisper'
const FRIEND = 'friend'
const POST = 'post'
const LOGIN = 'login'
const route = writable(
{
render: [
WHISPER,
FRIEND,
POST
], routes: {
WHISPER,
FRIEND,
POST,
LOGIN
}
}
// obj:{
// }
)
export default route
| 70c09b8f37df4f0012cfeea064a515db51a0e005 | [
"JavaScript"
] | 4 | JavaScript | bedairhassan/Minimalistic-Social-Media | 13b6d76242817c268dbfdda0c5127085a77bbf00 | 9a76d2234386eb620ba2080aadb1b560c406fa8e | |
refs/heads/master | <repo_name>jreiss98/365<file_sep>/csc365a1/Similarity.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package csc365a1;
import java.util.ArrayList;
/**
*
* @author Tok
*/
public class Similarity {
private JsonParser parser = new JsonParser();
private HashTable similarBusissnes;
private double similarmagit;
private HashTable userEntry;
private double usermagit;
double cosine(HashTable s, HashTable b) throws Exception {
similarBusissnes=s;
userEntry=b;
double ab = top();
similarmagit = magmake(similarBusissnes);
usermagit = magmake(userEntry);
double value = ab / (usermagit * similarmagit);
System.out.println("Cosine done");
return value;
}
private double magmake(HashTable v) {
ArrayList<Integer> fre = new ArrayList<>();
ArrayList<Word> words = v.getAll();
for (int k = 0; k < words.size(); k++) {
fre.add(words.get(k).getCount());
}
double b = magitude(fre);
return b;
}
private double magitude(ArrayList<Integer> a) {
double count = 0;
double finalcount;
for (int i = 0; i < a.size(); i++) {
double k = a.get(i);
count += Math.pow(k, 2);
}
finalcount = Math.sqrt(count);
return finalcount;
}
private double top() {//computates the A*B
ArrayList<String> setA = new ArrayList();
ArrayList<String> setB = new ArrayList();
ArrayList<String> union = new ArrayList();
ArrayList<Word> similar;
ArrayList<Word> user;
HashTable a = similarBusissnes;
HashTable b = userEntry;
double sum = 0;
similar = a.getAll();
user = b.getAll();
for (int i = 0; i < similar.size(); i++) {
String bet = similar.get(i).word;
union.add(bet);
setA.add(bet);
}
for (int i = 0; i < user.size(); i++) {
String bet = user.get(i).word;
union.add(bet);
setB.add(bet);
}
union.retainAll(setA);
union.retainAll(setB);//gets all words in a that are in b
if (!union.isEmpty()) {
for (int i = 0; i < union.size(); i++) {
Word temp = a.get(union.get(i));
Word tem = b.get(union.get(i));
sum += ((temp.getCount()) * (tem.getCount()));
}//mult count of word in both a and b together and adding to sum
} else {
}
return sum;
}
int numberWebsites = 0;
}
| bffc4dc56e61724db8bee7227f92dc81b90ab2ae | [
"Java"
] | 1 | Java | jreiss98/365 | 6322d81e6c855224ffb65ee629715b0f71f562a8 | e9d23c99b410336308f1150ffee2459453aac138 | |
refs/heads/master | <repo_name>aleksandr5rikin/online-banking<file_sep>/src/ua/nure/petrikin/OnlineBanking/web/command/client/ConfirmPayCommand.java
package ua.nure.petrikin.OnlineBanking.web.command.client;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import ua.nure.petrikin.OnlineBanking.db.AccountDao;
import ua.nure.petrikin.OnlineBanking.db.MySqlDaoFactory;
import ua.nure.petrikin.OnlineBanking.db.NoticeDao;
import ua.nure.petrikin.OnlineBanking.db.PaymentDao;
import ua.nure.petrikin.OnlineBanking.db.entity.Account;
import ua.nure.petrikin.OnlineBanking.db.entity.Notice;
import ua.nure.petrikin.OnlineBanking.db.entity.PayStatus;
import ua.nure.petrikin.OnlineBanking.db.entity.Payment;
import ua.nure.petrikin.OnlineBanking.db.entity.User;
import ua.nure.petrikin.OnlineBanking.exception.AppException;
import ua.nure.petrikin.OnlineBanking.web.Path;
import ua.nure.petrikin.OnlineBanking.web.command.Command;
import ua.nure.petrikin.OnlineBanking.web.websocket.ClientWebsocket;
public class ConfirmPayCommand extends Command {
private static final Logger LOG = Logger.getLogger(ConfirmPayCommand.class);
private static final long serialVersionUID = -3224495433136774938L;
@Override
public String execute(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException, AppException {
LOG.debug("START");
String forward = Path.COMMAND_PAYMENT_LIST;
HttpSession session = request.getSession(false);
User user = (User) session.getAttribute("user");
int recAccId = Integer.parseInt(request.getParameter("recAccId"));
int payId = Integer.parseInt(request.getParameter("paymentId"));
AccountDao accountDao = MySqlDaoFactory.getInstance().getDao(AccountDao.class);
PaymentDao paymentDao = MySqlDaoFactory.getInstance().getDao(PaymentDao.class);
NoticeDao noticeDao = MySqlDaoFactory.getInstance().getDao(NoticeDao.class);
Account account = accountDao.get(recAccId);
Payment p = paymentDao.get(payId);
p.setPayStatusId(PayStatus.SUCEFULL.getId());
p = paymentDao.save(p);
if (p.getId() == null) {
forward = Path.PAGE_ERROR_PAGE;
} else {
Notice notice = new Notice();
notice.setUserId(account.getUserId());
notice.setMessage("TEST");
noticeDao.save(notice);
ClientWebsocket.notifySend(account.getUserId());
notice = new Notice();
notice.setUserId(user.getId());
notice.setMessage("TEST");
noticeDao.save(notice);
ClientWebsocket.notifySend(user.getId());
}
LOG.trace("Confirmed pay ID ====> " + payId);
LOG.debug("FINISH");
return forward;
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/web/websocket/ClientWebsocket.java
package ua.nure.petrikin.OnlineBanking.web.websocket;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.websocket.EncodeException;
import javax.websocket.OnClose;
import javax.websocket.OnOpen;
import javax.websocket.Session;
import javax.websocket.server.PathParam;
import javax.websocket.server.ServerEndpoint;
import ua.nure.petrikin.OnlineBanking.db.MySqlDaoFactory;
import ua.nure.petrikin.OnlineBanking.db.NoticeDao;
import ua.nure.petrikin.OnlineBanking.exception.AppException;
import ua.nure.petrikin.OnlineBanking.exception.DBException;
@ServerEndpoint("/notifications/{userId}")
public class ClientWebsocket {
private Integer id;
private static final Map<Integer, SessionWarper> sessions = Collections.synchronizedMap(new HashMap<>());
public static void notifySend(Integer userId) throws AppException {
SessionWarper sessionWarper = sessions.get(userId);
if(sessionWarper != null){
try {
Session s = sessionWarper.getSession();
Integer count = sessionWarper.getCount();
s.getBasicRemote().sendObject(++count);
} catch (IOException | EncodeException ex) {
sessions.remove(userId);
throw new AppException(ex.getMessage());
}
}
}
@OnOpen
public void onOpen(@PathParam("userId") Integer userId, Session session) throws IOException, EncodeException, DBException {
id = userId;
SessionWarper sessionWarper = new SessionWarper();
NoticeDao noticeDao = MySqlDaoFactory.getInstance().getDao(NoticeDao.class);
int count = noticeDao.getCount(userId);
sessionWarper.setCount(count);
sessionWarper.setSession(session);
session.getBasicRemote().sendObject(++count);
sessions.put(userId, sessionWarper);
}
@OnClose
public void onClose() throws IOException, EncodeException {
sessions.remove(id);
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/web/websocket/SessionWarper.java
package ua.nure.petrikin.OnlineBanking.web.websocket;
import javax.websocket.Session;
public class SessionWarper {
private Integer count;
private Session session;
public Integer getCount() {
return count;
}
public void setCount(Integer count) {
this.count = count;
}
public Session getSession() {
return session;
}
public void setSession(Session session) {
this.session = session;
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/exception/ConnectionNotAvailableException.java
package ua.nure.petrikin.OnlineBanking.exception;
public class ConnectionNotAvailableException extends AppException {
private static final long serialVersionUID = 530988069619320888L;
public ConnectionNotAvailableException(){
super();
}
public ConnectionNotAvailableException(Throwable cause){
super(cause);
}
public ConnectionNotAvailableException(String message){
super(message);
}
public ConnectionNotAvailableException(String message, Throwable cause){
super(message, cause);
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/db/pool/ConnectionReleaser.java
package ua.nure.petrikin.OnlineBanking.db.pool;
import java.sql.SQLException;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.log4j.Logger;
public class ConnectionReleaser implements Runnable
{
public static final Logger LOG = Logger.getLogger(ConnectionReleaser.class);
public static final int TIMEOUT = 30000;
private ConnectionPool pool;
private LinkedBlockingQueue<PooledConnection> busyConnections;
private long lastRun;
public ConnectionReleaser(ConnectionPool pool, LinkedBlockingQueue<PooledConnection> busyConnections) {
this.pool = pool;
this.busyConnections = busyConnections;
}
@Override
public void run() {
this.lastRun = System.currentTimeMillis();
LOG.debug("Time between runs: " + TIMEOUT);
while (true) {
if (this.pool.isClosed()) {
return;
}
if(((System.currentTimeMillis() - this.lastRun) > TIMEOUT)) {
if (this.busyConnections != null && this.busyConnections.size() > 0) {
for (int i = 0; i < this.busyConnections.size(); i++) {
PooledConnection conn = this.busyConnections.peek();
try {
if (conn != null && conn.isClosed()) {
this.pool.returnConnection(conn);
LOG.debug("Connection Released by ConnectionReleaser: " + conn);
}
} catch (SQLException e) {
LOG.error("Connection could not be released to the pool", e);
}
}
this.lastRun = System.currentTimeMillis();
}
}
}
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/exception/Messages.java
package ua.nure.petrikin.OnlineBanking.exception;
public final class Messages {
private Messages() {
}
public static final String ERR_SQL_CONNECTION = "Cannot obtain a connection from the pool";
public static final String ERR_SQL_SAVE_USER = "Cannot save new user";
public static final String ERR_SQL_SAVE_ACCOUNT = "Cannot save new account";
public static final String ERR_SQL_SAVE_PAYMENT = "Cannot save payment";
public static final String ERR_SQL_FIND_USER = "Cannot find user";
public static final String ERR_SQL_OBTAIN_USERS = "Cannot obtain user list";
public static final String ERR_SQL_OBTAIN_ACCOUNTS = "Cannot obtain account list";
public static final String ERR_SQL_OBTAIN_PAYMENTS = "Cannot obtain payments";
public static final String ERR_SQL_LOCK_ACCOUNT = "Cannot lock account";
public static final String ERR_LOCK_ACCOUNT = "Account is locked";
public static final String ERR_LIMIT = "limit is exceeded";
public static final String ERR_UNRESOLVED_PARAM = "Unreolved param!";
public static final String ERR_AUTH_PASS = "<PASSWORD>";
public static final String ERR_AUTH_UNCOMFIRMED = "User status ===> 'UNCOMFIRMED'";
public static final String ERR_SECUR_HASH_ALG = "No such hash algorithem";
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/web/filter/ValidationFilter.java
package ua.nure.petrikin.OnlineBanking.web.filter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.annotation.WebFilter;
import org.apache.log4j.Logger;
import ua.nure.petrikin.OnlineBanking.exception.AppException;
import ua.nure.petrikin.OnlineBanking.exception.Messages;
import ua.nure.petrikin.OnlineBanking.web.Path;
import ua.nure.petrikin.OnlineBanking.web.util.Param;
import ua.nure.petrikin.OnlineBanking.web.util.ParamContainer;
@WebFilter(servletNames = "Controller")
public class ValidationFilter implements Filter{
private static final Logger LOG = Logger.getLogger(ValidationFilter.class);
@Override
public void destroy() {
LOG.debug("DESTROY");
}
@Override
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain)
throws IOException, ServletException {
LOG.debug("START");
boolean valid = true;
String commandName = req.getParameter("command");
LOG.trace("Request command ====> " + commandName);
List<Param> requiredParams = ParamContainer.get(commandName);
LOG.debug("requiredParams ====> " + requiredParams);
try {
for(Param param : requiredParams){
String name = param.getName();
LOG.debug("Param ====> " + name);
String providetParam = req.getParameter(name);
if(providetParam == null){
LOG.warn("Providet param '" + name + "' == null");
throw new AppException(Messages.ERR_UNRESOLVED_PARAM);
}
Map<String, String> tests = param.getTests();
for(Map.Entry<String, String> entry : tests.entrySet()){
Pattern p = Pattern.compile(entry.getValue());
Matcher m = p.matcher(providetParam);
if(!m.find()){
String msg = entry.getKey() + ": " + providetParam;
LOG.warn(msg);
throw new AppException(msg);
}
}
LOG.warn("Providet param ['" + name + "' : '" +providetParam + "'] is valid");
}
} catch (AppException ex) {
valid = false;
req.setAttribute("errorMessage", ex.getMessage());
LOG.warn("Forward address ====> " + Path.PAGE_ERROR_PAGE);
req.getRequestDispatcher(Path.PAGE_ERROR_PAGE).forward(req, resp);
}
if(valid){
LOG.debug("FINISHED SUCEFULL");
chain.doFilter(req, resp);
}
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
LOG.debug("INIT");
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/db/entity/AccStatus.java
package ua.nure.petrikin.OnlineBanking.db.entity;
import java.io.Serializable;
public enum AccStatus implements Serializable{
UNLOCKED, LOCKED;
public static AccStatus getAccStatus(Account account) {
int accStatusId = account.getAccStatusId() - 1;
return AccStatus.values()[accStatusId];
}
public String getName() {
return name().toLowerCase();
}
public int getId() {
return ordinal() + 1;
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/web/command/client/AddAccountCommand.java
package ua.nure.petrikin.OnlineBanking.web.command.client;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import ua.nure.petrikin.OnlineBanking.db.AccountDao;
import ua.nure.petrikin.OnlineBanking.db.MySqlDaoFactory;
import ua.nure.petrikin.OnlineBanking.db.entity.AccStatus;
import ua.nure.petrikin.OnlineBanking.db.entity.Account;
import ua.nure.petrikin.OnlineBanking.db.entity.User;
import ua.nure.petrikin.OnlineBanking.exception.AppException;
import ua.nure.petrikin.OnlineBanking.web.Path;
import ua.nure.petrikin.OnlineBanking.web.command.Command;
public class AddAccountCommand extends Command {
private static final Logger LOG = Logger.getLogger(AddAccountCommand.class);
private static final long serialVersionUID = -4060341422635798377L;
@Override
public String execute(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException, AppException {
LOG.debug("START");
String forward = Path.PAGE_ERROR_PAGE;
HttpSession session = request.getSession(false);
String name = request.getParameter("name");
User user = (User) session.getAttribute("user");
Account account = new Account();
account.setName(name);
account.setUserId(user.getId());
account.setAccStatusId(AccStatus.UNLOCKED.getId());
AccountDao accountDao = MySqlDaoFactory.getInstance().getDao(AccountDao.class);
accountDao.save(account);
if (account.getId() == null) {
forward = Path.COMMAND_ACCOUNT_LIST;
}
LOG.trace("New Account ID ====> " + account.getId());
LOG.debug("FINISH");
return forward;
}
}<file_sep>/src/ua/nure/petrikin/OnlineBanking/web/command/client/AddPaymentCommand.java
package ua.nure.petrikin.OnlineBanking.web.command.client;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.Date;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import ua.nure.petrikin.OnlineBanking.db.AccountDao;
import ua.nure.petrikin.OnlineBanking.db.MySqlDaoFactory;
import ua.nure.petrikin.OnlineBanking.db.PaymentDao;
import ua.nure.petrikin.OnlineBanking.db.entity.Account;
import ua.nure.petrikin.OnlineBanking.db.entity.PayStatus;
import ua.nure.petrikin.OnlineBanking.db.entity.Payment;
import ua.nure.petrikin.OnlineBanking.exception.AppException;
import ua.nure.petrikin.OnlineBanking.exception.Messages;
import ua.nure.petrikin.OnlineBanking.web.Path;
import ua.nure.petrikin.OnlineBanking.web.command.Command;
public class AddPaymentCommand extends Command {
private static final Logger LOG = Logger.getLogger(AddPaymentCommand.class);
private static final long serialVersionUID = -7506166075358571214L;
@Override
public String execute(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException, AppException {
LOG.debug("START");
String forward = Path.PAGE_ERROR_PAGE;
LOG.debug(request.getParameter("accountId"));
LOG.debug(request.getParameter("reciverId"));
LOG.debug(request.getParameter("sum"));
LOG.debug(request.getParameter("found"));
LOG.debug(request.getParameter("comment"));
String found = request.getParameter("found");
int accId = Integer.parseInt(request.getParameter("accountId"));
int sum = Integer.parseInt(request.getParameter("sum"));
String comment = request.getParameter("comment");
AccountDao accountDao = MySqlDaoFactory.getInstance().getDao(AccountDao.class);
Account account = accountDao.get(accId);
if (account.getAccStatusId() == 2) {
throw new AppException(Messages.ERR_LOCK_ACCOUNT);
}
if (account.getLim() != 0) {
Timestamp t = account.getDate();
Calendar c = Calendar.getInstance();
c.setTimeInMillis(t.getTime());
c.add(Calendar.MONTH, 1);
long limitTime = c.getTimeInMillis();
long currentTime = new Date().getTime();
if (currentTime > limitTime) {
t = new Timestamp(limitTime);
account.setDate(t);
account.setSpent(0);
accountDao.save(account);
}
if (account.getLim() - account.getSpent() <= 0) {
throw new AppException(Messages.ERR_LIMIT);
}
}
Payment payment = new Payment();
payment.setAccId(accId);
payment.setSum(sum);
payment.setComment(comment);
if (found.equals("found")) {
payment.setReciverId(accId);
payment.setPayStatusId(PayStatus.SUCEFULL.getId());
} else {
int reciverId = Integer.parseInt(request.getParameter("reciverId"));
payment.setReciverId(reciverId);
payment.setPayStatusId(PayStatus.PREPEARED.getId());
}
PaymentDao paymentDao = MySqlDaoFactory.getInstance().getDao(PaymentDao.class);
paymentDao.save(payment);
if (payment.getId() != null) {
forward = Path.COMMAND_ACCOUNT_LIST;
}
LOG.trace("New Payment ID ====> " + payment.getId());
LOG.debug("FINISH");
return forward;
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/web/common/LogoutCommand.java
package ua.nure.petrikin.OnlineBanking.web.common;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import ua.nure.petrikin.OnlineBanking.web.Path;
import ua.nure.petrikin.OnlineBanking.web.command.Command;
public class LogoutCommand extends Command{
private static final long serialVersionUID = 3658358534929790935L;
@Override
public String execute(HttpServletRequest request, HttpServletResponse response) {
HttpSession session = request.getSession(false);
if (session != null) {
session.invalidate();
}
return Path.COMMAND_GET_LOGIN;
}
}
<file_sep>/src/ua/nure/petrikin/OnlineBanking/db/PaymentDao.java
package ua.nure.petrikin.OnlineBanking.db;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import ua.nure.petrikin.OnlineBanking.db.entity.Payment;
import ua.nure.petrikin.OnlineBanking.db.pool.ConnectionPool;
import ua.nure.petrikin.OnlineBanking.exception.DBException;
public class PaymentDao extends AbstractDao<Payment, Integer>{
private static final String SELECT_USER_PAY = "SELECT payments.* FROM payments, accounts "
+ "WHERE (payments.account_id = accounts.id "
+ "OR payments.reciver_id = accounts.id) AND accounts.user_id = ?";
public PaymentDao(ConnectionPool connection) {
super(connection);
}
@Override
protected String getSelectQuery() {
return "SELECT * FROM payments";
}
@Override
protected String getUpdateQuery() {
return "UPDATE payments SET pay_status_id = ? WHERE id = ?";
}
@Override
protected String getCreateQuery() {
return "INSERT INTO payments VALUE (DEFAULT, ?, ?, ?, ?, ?, DEFAULT)";
}
@Override
protected String getDeleteQuery() {
return "DELETE FROM payments WHERE id = ?";
}
@Override
protected Payment parseResultSet(ResultSet rs) throws SQLException {
Payment payment = new Payment();
payment.setId(rs.getInt(Fields.PAYMENT_ID));
payment.setAccId(rs.getInt(Fields.PAYMENT_ACCOUNT_ID));
payment.setReciverId(rs.getInt(Fields.PAYMENT_RECIVER_ID));
payment.setSum(rs.getInt(Fields.PAYMENT_SUM));
payment.setPayStatusId(rs.getInt(Fields.PAYMENT_STATUS_ID));
payment.setComment(rs.getString(Fields.PAYMENT_COMMENT));
payment.setDate(rs.getTimestamp(Fields.PAYMENT_DATE));
return payment;
}
@Override
protected void prepareUpdateStatement(Payment entity, PreparedStatement ps) throws SQLException {
ps.setInt(1, entity.getPayStatusId());
ps.setInt(2, entity.getId());
}
@Override
protected void prepareInsertStatement(Payment entity, PreparedStatement ps) throws SQLException {
ps.setInt(1, entity.getAccId());
ps.setInt(2, entity.getReciverId());
ps.setInt(3, entity.getSum());
ps.setInt(4, entity.getPayStatusId());
ps.setString(5, entity.getComment());
}
public List<Payment> findUserPayment(int userId) throws DBException{
Connection con = getConnectionFromPool();
List<Payment> payments = new ArrayList<>();
try {
PreparedStatement ps = con.prepareStatement(SELECT_USER_PAY);
ps.setInt(1, userId);
ResultSet rs = ps.executeQuery();
while(rs.next()){
payments.add(parseResultSet(rs));
}
} catch (SQLException ex) {
throw new DBException(ex.getMessage());
}finally {
try {
con.close();
} catch (SQLException e) {
throw new DBException(e.getMessage());
}
}
return payments;
}
} | 1dcc49a31fb7450f5cb2b0c9a9d884d14b634839 | [
"Java"
] | 12 | Java | aleksandr5rikin/online-banking | 5a1cb2bd04544fa72ebb3a8fa91289a6023e6888 | df115851d54fbb9a3474d62a43be980919c641e1 | |
refs/heads/master | <repo_name>jinzhu6/Realtime_Flaw_Object<file_sep>/Database.py
import sqlite3 as lite
from enum import Enum
from PyQt5.QtGui import QPixmap
class Database():
__instance = None
@staticmethod
def getInstance():
if Database.__instance is None:
Database()
return Database.__instance
def __init__(self):
if Database.__instance is not None:
raise Exception("This class is a singleton!")
else:
Database.__instance = self
self.con = lite.connect("database/Mainbase.db")
def getCaminfo(self,ID):
if ID is not None:
command = "select camid, coordinate_x, coordinate_y,rate,camip,camport,username,userpassword from camera where camid = '{}'".format(str(ID))
else:
command = "select camid, coordinate_x, coordinate_y,rate,camip,camport,username,userpassword from camera where camid = 'cam_01'"
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[4], row[5], row[6], row[7],row[0], row[3], row[1], row[2]) for row in rows]
cur.close()
return ret
def getCamurl(self,ID):
if ID is not None:
command = "select camurl from camera where camid = '{}'".format(str(ID))
else:
command = "select camurl from camera where camid = 'cam_01'"
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[0]) for row in rows]
cur.close()
return ret
def getOpenflag(self,ID):
if ID is not None:
command = "select isopen from camera where camid = '{}'".format(str(ID))
else:
command = "select isopen from camera where camid = 'cam_01'"
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[0]) for row in rows]
cur.close()
return ret
def updataCaminfo(self,camid='cam_01',coordinate_x=0.0,coordinate_y=0.0,rate=1,camip='',camport='',username='',userpassword=''):
sql= "update camera set coordinate_x=?,coordinate_y=?,rate=?,camurl=?,camip=?,camport=?,username=?,userpassword=? where camid='{}'".format(str(camid))
cur = self.con.cursor()
if camip=='':
camurl=''
else:
camurl="rtsp://" + str(username) + ":" + str(userpassword) + "@" + str(camip) + ":" + str(
camport) + "/h264/ch33/main/av_stream"
cur.execute(sql, (coordinate_x,coordinate_y,rate,camurl,camip,camport,username,userpassword))
cur.close()
self.con.commit()
def updataOpenflag(self,camid='cam_01',isopen = 0):
sql= "update camera set isopen= ? where camid='{}' ".format(str(camid))
cur = self.con.cursor()
cur.execute(sql, [(isopen)])
cur.close()
self.con.commit()
def insertIntoCamera(self,camid='cam_01',coordinate_x=0.0,coordinate_y=0.0,rate=1,camip='',camport='',username='',userpassword='',isopen=0):
sql = "INSERT INTO camera(camid,coordinate_x,coordinate_y,rate,camurl,camip,camport,username,userpassword,isopen) VALUES(?,?,?,?,?,?,?,?,?,?)"
cur = self.con.cursor()
camurl = ''
cur.execute(sql, (camid,coordinate_x,coordinate_y,rate,camurl,camip,camport,username,userpassword,isopen))
cur.close()
self.con.commit()
#对瑕疵表的操作
def insertIntoFlaw(self,flaw_id=1000,flaw_type='',camera_id='',coordinate_x=0.0,coordinate_y=0.0,width=0.0,highth=0.0,flaw_time='0000-00-00 00:00:00',cloth_type=''):
sql = "INSERT INTO Flaw(flaw_id,flaw_type,camera_id,coordinate_x,coordinate_y,width,highth,flaw_time,cloth_type) VALUES(?,?,?,?,?,?,?,?,?)"
cur = self.con.cursor()
cur.execute(sql,(flaw_id,flaw_type,camera_id,coordinate_x,coordinate_y,width,highth,flaw_time,cloth_type))
cur.close()
self.con.commit()
def getFlawCount(self):
command = 'SELECT flaw_id FROM Flaw'
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[0]) for row in rows]
cur.close()
return len(ret)
def getDataFromFlaw(self,flaw_type=""):
command = "select flaw_id,flaw_type,camera_id,coordinate_x,coordinate_y,width,highth,flaw_time from Flaw where flaw_type='{}'".format(str(flaw_type))
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7]) for row in rows]
cur.close()
return ret
def insertIntoFlawStatistic(self,flaw_type='',flaw_cont=0):
sql = "INSERT INTO FlawStatistic(flaw_type,flaw_cont) VALUES(?,?)"
cur = self.con.cursor()
cur.execute(sql,(flaw_type,flaw_cont))
cur.close()
self.con.commit()
def updateFlawStatistic(self,flaw_type='',flaw_cont=0):
sql = "UPDATE FlawStatistic SET flaw_cont= ? where flaw_type='{}'".format(str(flaw_type))
cur = self.con.cursor()
cur.execute(sql,[(flaw_cont)])
cur.close()
self.con.commit()
def getcntFromFlawStatistic(self,flaw_type=""):
command = "select flaw_cont from FlawStatistic where flaw_type = '{}'".format(str(flaw_type))
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[0]) for row in rows]
cur.close()
return ret[0]
def getallFromFlawStatistic(self):
command = "select flaw_type,flaw_cont from FlawStatistic"
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[0],row[1]) for row in rows]
cur.close()
return ret
#对瑕疵统计表的操作
# def insertIntoFlawStatistic(self,flaw_label=1,flaw_type=''):
# sql = "INSERT INTO FlawStatistic(flaw_label,flaw_type) VALUES(?,?)"
# cur = self.con.cursor()
# cur.execute(sql,(flaw_label,flaw_type))
# cur.close()
# self.con.commit()
# def getFlawCount(self):
# command = 'SELECT flaw_type,count(flaw_label) AS flaw_count FROM FlawStatistic GROUP BY flaw_label'
# cur = self.con.cursor()
# coursor =cur.execute(command)
# cur.close()
# return coursor
# #使用方法
# #for row in coursor:
# # row[0] 瑕疵类型
# # row[1] 瑕疵数量
# #对设置表的操作
def insertIntoSetting(self,model='yolo',width=0.0,highth=0.0,rate=1.0):
sql = "INSERT INTO Setting(model,width,highth,rate) VALUES(?,?,?,?)"
cur = self.con.cursor()
cur.execute(sql,(model,width,highth,rate))
cur.close()
self.con.commit()
def updateSetting(self,model='yolo',width=0.0,higth=0.0,rate=1.0):
sql = "UPDATE Setting SET width=?,highth=?,rate=? where model='{}'".format(model)
cur = self.con.cursor()
cur.execute(sql, (width,higth,rate))
cur.close()
self.con.commit()
def getSetting(self,model='yolo'):
command = "select width,highth,rate from Setting where model = '{}'".format(model)
cur = self.con.cursor()
cur.execute(command)
rows = cur.fetchall()
ret = [(row[0],row[1],row[2]) for row in rows]
cur.close()
return ret[0]
def deleteAllFlaw(self):
commad = "delete from Flaw"
cur = self.con.cursor()
cur.execute(commad)
cur.close()
self.con.commit()
def deleteAllFlawStatistic(self):
commad = "delete from FlawStatistic"
cur = self.con.cursor()
cur.execute(commad)
cur.close()
self.con.commit()
<file_sep>/add_window/Statistics.py
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtWidgets import QMainWindow
from PyQt5.uic import loadUi
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtCore import QTimer
from PyQt5.QtGui import QImage, QPixmap
from PyQt5.QtWidgets import QMainWindow, QStatusBar, QListWidget, QAction, qApp, QMenu,QMessageBox
from PyQt5.uic import loadUi
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import QSizePolicy
import openpyxl
from Database import Database
class_name = {
'1':'停车痕紧',
'2':'停车痕松',
'3':'断经',
'4':'错花',
'5':'并纬',
'6':'缩纬',
'7':'缺纬',
'8':'糙纬',
'9':'折返',
'10':'断纬',
'11':'油污',
'12':'起机',
'13':'尽机',
'14':'经条',
'15':'擦白',
'16':'擦伤',
'17':'浆斑',
'18':'空织'
}
class AddstatisForm(QMainWindow):
def __init__(self, parent=None, ui=None):
super(AddstatisForm, self).__init__(parent)
loadUi("UI/Statistics.ui", self)
self.tablemodel = QStandardItemModel(10,2)
self.tablemodel.setHorizontalHeaderLabels(['瑕疵种类', '总数'])
self.tablemodel_2 = QStandardItemModel(10, 8)
self.tablemodel_2.setHorizontalHeaderLabels(['编号', '瑕疵种类', '相机', 'X坐标', 'Y坐标', 'W', 'H', '时间'])
self.s1 = 'all'
self.s2 = 'all'
self.comboBox.addItem('all')
self.comboBox.addItem('cam_01')
self.comboBox.addItem('cam_02')
self.comboBox.addItem('cam_03')
self.comboBox_2.addItems(['all', '停车痕紧', '停车痕松','断经','错花','并纬','缩纬','缺纬','糙纬','折返','断纬','油污','起机','尽机','经条','擦白','擦伤','浆斑','空织'])
self.pushButton_2.clicked.connect(self.getFile)
self.pushButton.clicked.connect(self.sta_work_all)
self.pushButton_3.clicked.connect(self.GetSeclectFile)
self.pushButton_4.clicked.connect(self.select_work_all)
self.comboBox.currentIndexChanged.connect(self.selectionchange)
self.comboBox_2.currentIndexChanged.connect(self.selectionchange_2)
self.cnt = -1
self.cnt_select=-1
def write_excel_xlsx(self,workbook,path, sheet, value):
try:
index = len(value)
# workbook = openpyxl.Workbook()
# sheet = workbook.active
# sheet = workbook.create_sheet(str(sheet_name))
# sheet.title = sheet_name
for i in range(0, index):
for j in range(0, len(value[i])):
sheet.cell(row=i+1, column=j+1, value=str(value[i][j]))
workbook.save(path)
except:
res = QMessageBox.question(self,'警告','路径冲突,请更换路径或关闭已打开的文件')
def identify_work(self, ID = 0, label = "0", camera_id = "cam_01", X = 1.0, Y = 1.0, W = 1.0, H = 1.0, flaw_time = 0):
ID = str(ID)
label = str(label)
camera_id = str(camera_id)
X = str(X)
Y = str(Y)
W = str(W)
H = str(H)
flaw_time = str(flaw_time)
self.cnt_select += 1
self.sm=self.tablemodel_2
self.sm.setItem(self.cnt_select, 0, QStandardItem(ID))
self.sm.setItem(self.cnt_select, 1, QStandardItem(label))
self.sm.setItem(self.cnt_select, 2, QStandardItem(camera_id))
self.sm.setItem(self.cnt_select, 3, QStandardItem(X))
self.sm.setItem(self.cnt_select, 4, QStandardItem(Y))
self.sm.setItem(self.cnt_select, 5, QStandardItem(W))
self.sm.setItem(self.cnt_select, 6, QStandardItem(H))
self.sm.setItem(self.cnt_select, 7, QStandardItem(flaw_time))
self.st_info.setModel(self.sm)
#QTableView
self.st_info.setColumnWidth(0,100)
self.st_info.setColumnWidth(1,200)
def downallinfo(self):
print(book_name_xlsx)
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = '停车痕紧'
if(len(book_name_xlsx)>=6 and book_name_xlsx[-5:] == '.xlsx'):
for i in range(19)[1:]:
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=class_name[str(i)])
for j in value3:
li.append(j)
sheet_name_xlsx = class_name[str(i)]
if (i == 1):
self.write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
else:
sheet = workbook.create_sheet(str(sheet_name_xlsx))
self.write_excel_xlsx(workbook,book_name_xlsx, sheet, li)
li = []
def getFile(self):
book_name_xlsx = QFileDialog.getSaveFileName(self,
"另存为",
"",
"Excel工作簿(*.xlsx)")[0]
print(book_name_xlsx)
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = '停车痕紧'
if(len(book_name_xlsx)>=6 and book_name_xlsx[-5:] == '.xlsx'):
for i in range(19)[1:]:
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=class_name[str(i)])
for j in value3:
li.append(j)
sheet_name_xlsx = class_name[str(i)]
if (i == 1):
self.write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
else:
sheet = workbook.create_sheet(str(sheet_name_xlsx))
self.write_excel_xlsx(workbook,book_name_xlsx, sheet, li)
li = []
def sta_work_all(self): #界面显示
self.st_info.setModel(self.tablemodel)
value = Database.getInstance().getallFromFlawStatistic()
self.cnt = -1
for i in range(len(value)):
label = value[i][0]
num = value[i][1]
self.show_work(label=label, num=num)
def show_work(self, label = "0", num=0):
label = str(label)
num=str(num)
self.cnt += 1
self.sm=self.tablemodel
self.sm.setItem(self.cnt, 0, QStandardItem(label))
self.sm.setItem(self.cnt, 1, QStandardItem(num))
self.st_info.setModel(self.sm)
#QTableView
self.st_info.setColumnWidth(0,100)
self.st_info.setColumnWidth(1, 200)
def select_work_all(self): #界面显示
self.st_info.setModel(self.tablemodel_2)
str1 = self.s1
str2 = self.s2
li_select=[]
if (str2 == 'all'):
for i in range(19)[1:]:
value3_select = Database.getInstance().getDataFromFlaw(flaw_type=class_name[str(i)])
for j in value3_select:
if (j[2] == str1 or str1=='all'):
li_select.append(j)
else:
value3_select = Database.getInstance().getDataFromFlaw(flaw_type=str2)
for j in value3_select:
if (j[2] == str1 or str1=='all'):
li_select.append(j)
# self.cnt_select = -1
while (self.cnt_select != -1):
self.tablemodel_2.removeRow(self.cnt_select)
self.cnt_select -= 1
for i in range(len(li_select)):
ID = li_select[i][0]
label = li_select[i][1]
camer_id = li_select[i][2]
X = li_select[i][3]
Y = li_select[i][4]
W = li_select[i][5]
H = li_select[i][6]
flaw_time = li_select[i][7]
self.identify_work(ID=ID, label=label, camera_id=camer_id, X=X, Y=Y, W=W, H=H, flaw_time=flaw_time)
def selectionchange(self):
self.s1 = self.comboBox.currentText()
print(self.s1)
def selectionchange_2(self):
self.s2 = self.comboBox_2.currentText()
print(self.s2)
def GetSeclectFile(self):
book_name_xlsx = QFileDialog.getSaveFileName(self,
"另存为",
"",
"Excel工作簿(*.xlsx)")[0]
print(book_name_xlsx)
str1 = self.s1
str2 = self.s2
print(str1)
print(str2)
if (str2 == 'all'):
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = '停车痕紧'
if(len(book_name_xlsx)>=6 and book_name_xlsx[-5:] == '.xlsx'):
for i in range(19)[1:]:
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=class_name[str(i)])
for j in value3:
if (j[2] == str1 or str1=='all'):
li.append(j)
sheet_name_xlsx = class_name[str(i)]
if (i == 1):
self.write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
else:
sheet = workbook.create_sheet(str(sheet_name_xlsx))
self.write_excel_xlsx(workbook,book_name_xlsx, sheet, li)
li = []
else:
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = str2
if (len(book_name_xlsx) >= 6 and book_name_xlsx[-5:] == '.xlsx'):
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=str2)
for j in value3:
if (j[2] == str1 or str1=='all'):
li.append(j)
self.write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
<file_sep>/database/test.py
from PyQt5.QtWidgets import QMainWindow
from PyQt5.uic import loadUi
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
class AddMainWindow(QMainWindow):
def __init__(self, parent=None, ui=None):
super(AddMainWindow, self).__init__(parent)
loadUi(ui, self)
self.tablemodel = QStandardItemModel(10,6)
self.tablemodel.setHorizontalHeaderLabels(['ID','瑕疵种类','X坐标','Y坐标','时间'])
self.TV_info.setModel(self.tablemodel)
# self.add.clicked.connect(self.addToDatabase)
self.cancel.clicked.connect(self.close)
self.file_browse.clicked.connect(lambda: self.getFile(self.file))
self.PB_work.clicked.connect(self.identify_work)
def close(self):
self.destroy(True)
def addToDatabase(self):
pass
def getFile(self, lineEdit):
pic_map = QFileDialog.getOpenFileName()[0]
lineEdit.setText(pic_map)
# print(pic_map)
pixmap = QPixmap (pic_map)
self.LB_Pic.setPixmap(pixmap)
self.LB_Pic.setScaledContents(True)
self.LB_Pic.show()
def identify_work(self):
# def __init__(self, parent=None):
# super(MainWindow, self).__init__(parent)
# loadUi('qtdesigner.ui', self)
# self.pushButton.clicked.connect(self.say)
# self.showData()
# def showData(self):
# # 准备数据模型
self.sm=self.tablemodel
#设置数据头栏名称
# self.sm.setHorizontalHeaderItem(0, QtGui.QStandardItem("Name"))
# self.sm.setHorizontalHeaderItem(1, QtGui.QStandardItem("NO."))
#设置数据条目
self.sm.setItem(0, 0, QStandardItem("刘董"))
self.sm.setItem(0, 1, QStandardItem("123"))
self.sm.setItem(1, 0, QStandardItem("杜阳阳"))
self.sm.setItem(1, 1, QStandardItem("456"))
self.sm.setItem(2, 0, QStandardItem("常家鑫"))
self.sm.setItem(2, 1, QStandardItem("789"))
#设置条目颜色和字体
# self.sm.item(0, 0).setForeground(QBrush(QColor(255, 0, 0)))
# self.sm.item(0, 0).setFont(QFont("Times", 10, QFont.Black))
# self.sm.item(3, 1).setBackground(QBrush(QColor(255, 255, 0)))
#按照编号排序
# self.sm.sort(1,Qt.DescendingOrder)
#将数据模型绑定到QTableView
self.TV_info.setModel(self.sm)
#QTableView
self.TV_info.setColumnWidth(0,100)
self.TV_info.setColumnWidth(1,200)
<file_sep>/add_window/SetCamera.py
from PyQt5.QtWidgets import QFileDialog
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtWidgets import QMessageBox
from Database import Database
from add_window.AddMainWindow import AddMainWindow
class SetCamera(AddMainWindow):
def __init__(self, parent=None,camid='cam_01'):
self.clear = True
super().__init__(parent, "UI/SetCamera.ui")
#self.file_browse.clicked.connect(lambda: self.getFile(self.file))
self.ID.setText(camid)
self.set.clicked.connect(self.updataCaminfoToDatabase)
self.clear.clicked.connect(self.clearCaminfoToDatabase)
caminfo = Database.getInstance().getCaminfo(ID=camid)[0]
self.X.setText(str(caminfo[6]))
self.Y.setText(str(caminfo[7]))
self.rate.setText(str(caminfo[5]))
self.IP.setText(str(caminfo[0]))
self.Port.setText(str(caminfo[1]))
self.Username.setText(str(caminfo[2]))
self.Password.setText(str(caminfo[3]))
@QtCore.pyqtSlot()
def clearCaminfoToDatabase(self):
self.clear = True
camid=str(self.ID.text())
Database.getInstance().updataCaminfo(camid=camid)
Database.getInstance().updataOpenflag(camid,0)
caminfo = Database.getInstance().getCaminfo(ID=camid)[0]
self.X.setText(str(caminfo[6]))
self.Y.setText(str(caminfo[7]))
self.rate.setText(str(caminfo[5]))
self.IP.setText(str(caminfo[0]))
self.Port.setText(str(caminfo[1]))
self.Username.setText(str(caminfo[2]))
self.Password.setText(str(caminfo[3]))
@QtCore.pyqtSlot()
def updataCaminfoToDatabase(self):
self.clear = False
camid=str(self.ID.text())
coordinate_x=str(self.X.text())
coordinate_y=str(self.Y.text())
rate=str(self.rate.text())
camip=str(self.IP.text())
camport=str(self.Port.text())
username=str(self.Username.text())
userpassword=str(self.Password.text())
Database.getInstance().updataCaminfo(camid=camid,
coordinate_x=float(coordinate_x),
coordinate_y=float(coordinate_y),
rate=float(rate),
camip=camip,
camport=camport,
username=username,
userpassword=<PASSWORD>)
Database.getInstance().updataOpenflag(camid,1)
# def closeEvent(self,QCloseEvent):
# res = QMessageBox.question(self,'提示','是否保存设置?',QMessageBox.Yes|QMessageBox.No,QMessageBox.No)
# if res == QMessageBox.Yes:
# camid=str(self.ID.text())
# coordinate_x=str(self.X.text())
# coordinate_y=str(self.Y.text())
# rate=str(self.rate.text())
# camip=str(self.IP.text())
# camport=str(self.Port.text())
# username=str(self.Username.text())
# userpassword=str(self.Password.text())
# Database.getInstance().updataCaminfo(camid=camid,
# coordinate_x=float(coordinate_x),
# coordinate_y=float(coordinate_y),
# rate=float(rate),
# camip=camip,
# camport=camport,
# username=username,
# userpassword=<PASSWORD>)
# Database.getInstance().updataOpenflag(camid,1)
# QCloseEvent.accept()
# else:
# QCloseEvent.accept()
# def addToDatabase(self):
# id = str(self.id.text())
# group = str(self.group.text())
# location = str(self.location.text())
# x = str(self.x_coord.text())
# y = str(self.y_coord.text())
# file = str(self.file.text())
# Database.getInstance().insertIntoCamera(id, location, x, y, group, file)
# self.destroy()
<file_sep>/src/requirements-cpu.txt
numpy
scipy
Pillow
cython
matplotlib
scikit-image
tensorflow
keras==2.2.4
opencv-python
imageio
h5py
imgaug
argparse>=1.1
dill==0.2.7.1
fire >= 0.1.3
psutil >= 5.4.5
requests >= 2.18.4
slidingwindow >= 0.0.13
tqdm >= 4.23.4
tensorpack >= 0.8.5
pillow
h2o==3.18.0.8
PyQt5
<file_sep>/add_window/help.py
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtWidgets import QMainWindow
from PyQt5.uic import loadUi
#from Database import Database
class HelpForm(QMainWindow):
def __init__(self, parent=None, ui=None):
super(HelpForm, self).__init__(parent)
loadUi("UI/help.ui", self)
# def addToDatabase(self):
# id = str(self.id.text())
# group = str(self.group.text())
# location = str(self.location.text())
# x = str(self.x_coord.text())
# y = str(self.y_coord.text())
# file = str(self.file.text())
# Database.getInstance().insertIntoCamera(id, location, x, y, group, file)
# self.destroy()
<file_sep>/MainWindow.py
import time
import numpy as np
import cv2.cv2 as cv2
import qdarkstyle
from PyQt5 import QtCore, QtWidgets,QtGui
from PyQt5.QtCore import QTimer
from PyQt5.QtGui import QImage, QPixmap
from PyQt5.QtWidgets import QApplication,QSplashScreen
from PyQt5.QtWidgets import QMainWindow, QStatusBar, QListWidget, QAction, qApp, QMenu,QMessageBox
from PyQt5.uic import loadUi
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import QSizePolicy
from Database import Database
from add_window.SetCamera import SetCamera
from add_window.AddPicworkForm import AddPicworkForm
from add_window.Setsetting import SetSetting
from processor.MainProcessor import MainProcessor
from yolo3.yolo import YOLO
from yolo3.yolo_online import YOLO_ONLINE
from add_window.Statistics import AddstatisForm
from add_window.help import HelpForm
class_name = {
'1':'停车痕紧',
'2':'停车痕松',
'3':'断经',
'4':'错花',
'5':'并纬',
'6':'缩纬',
'7':'缺纬',
'8':'糙纬',
'9':'折返',
'10':'断纬',
'11':'油污',
'12':'起机',
'13':'尽机',
'14':'经条',
'15':'擦白',
'16':'擦伤',
'17':'浆斑',
'18':'空织'
}
class MainWindow(QMainWindow):
def __init__(self,model_flag='local'):
self.model_flag = model_flag
super(MainWindow, self).__init__()
loadUi("./UI/MainForm.ui", self)
self.cam_01 = False
self.cam_02 = False
self.cam_03 = False
self.run_flag = False
self.vs1 = None
self.feed1 = None
self.vs2 = None
self.feed2 = None
self.vs3 = None
self.feed3 = None
self.cnt = -1
res = QMessageBox.question(self,'提示','是否使用在线识别模式?',QMessageBox.Yes|QMessageBox.No,QMessageBox.No)
if res == QMessageBox.Yes:
self.model_flag = 'online'
else:
self.model_flag = 'local'
if (self.model_flag=='local'):
self.yolo = YOLO()
elif (self.model_flag == 'online'):
self.yolo = YOLO_ONLINE()
self.st = time.time()
self.coco = False
self.first = True
self.CameraA.setScaledContents(True)
self.CameraA.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
self.CameraB.setScaledContents(True)
self.CameraB.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
self.CameraC.setScaledContents(True)
self.CameraC.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
self.cam_clear_gaurd = False
self.processor = MainProcessor(['cam_01','cam_02','cam_03'])
self.database = Database.getInstance()
#----------数据库初始化-------------------------------------#
# self.database.insertIntoCamera('cam_01')
# self.database.insertIntoCamera('cam_02')
# self.database.insertIntoCamera('cam_03')
# self.database.insertIntoSetting()
# for i in range(19)[1:]:
# flow_type=class_name[str(i)]
# self.database.insertIntoFlawStatistic(flaw_type=flow_type,flaw_cont=0)
# self.database.deleteAllFlaw()
#----------------------------------------------------------#
#----------测试数据-----------------------------------------#
# for i in range(19)[1:]:
# self.database.insertIntoFlaw(flaw_id=i-1,flaw_type=class_name[str(i)])
# for i in range(19)[1:]:
# self.database.updateFlawStatistic(flaw_type=class_name[str(i)],flaw_cont=1)
#----------------------------------------------------------#
self.statistic_cnt = {}
for i in range(19)[1:]:
self.statistic_cnt[class_name[str(i)]]=self.database.getcntFromFlawStatistic(flaw_type=class_name[str(i)])
self.database.updataCaminfo('cam_01')
self.database.updataCaminfo('cam_02')
self.database.updataCaminfo('cam_03')
self.database.updataOpenflag('cam_01',0)
self.database.updataOpenflag('cam_02',0)
self.database.updataOpenflag('cam_03',0)
self.ID = self.database.getFlawCount() #获取最大的标号
self.updateCamInfo()
self.PB_CamAsettings.clicked.connect(self.setCameraA)
self.PB_CamBsettings.clicked.connect(self.setCameraB)
self.PB_CamCsettings.clicked.connect(self.setCameraC)
self.PB_picture.clicked.connect(self.picworkForm)
self.PB_start.clicked.connect(self.start)
self.PB_end.clicked.connect(self.stop)
self.User.triggered.connect(self.helpform)
self.PB_statistics.clicked.connect(self.statisForm)
self.PB_settings.clicked.connect(self.setting)
self.tablemodel = QStandardItemModel(10, 8)
self.tablemodel.setHorizontalHeaderLabels(['编号','瑕疵种类','相机','X坐标','Y坐标','瑕疵宽度','瑕疵高度','时间戳'])
self.tV_info.setModel(self.tablemodel)
self.timer = QTimer(self)
self.timer.timeout.connect(self.update_image)
self.timer.start(50)
def setCameraA(self):
if self.run_flag == False:
addWin = SetCamera(parent=self,camid='cam_01')
addWin.show()
else:
res = QMessageBox.question(self,'警告','请先停止实时识别')
def setCameraB(self):
if self.run_flag == False:
addWin = SetCamera(parent=self,camid='cam_02')
addWin.show()
else:
res = QMessageBox.question(self,'警告','请先停止实时识别')
def setCameraC(self):
if self.run_flag == False:
addWin = SetCamera(parent=self,camid='cam_03')
addWin.show()
else:
res = QMessageBox.question(self,'警告','请先停止实时识别')
def picworkForm(self):
#self.yolo.close_session()
if (self.model_flag=='local'):
addWin = AddPicworkForm(parent=self,yolo=self.yolo,model_flag=self.model_flag)
else:
addWin = AddPicworkForm(parent=self,yolo=self.yolo,model_flag=self.model_flag)
addWin.show()
def helpform(self):
addWin = HelpForm(parent=self)
addWin.show()
def statisForm(self):
for i in range(19)[1:]:
self.database.updateFlawStatistic(flaw_type=class_name[str(i)],flaw_cont=self.statistic_cnt[class_name[str(i)]])
addWin = AddstatisForm(parent=self)
addWin.show()
def setting(self):
addWin = SetSetting(parent=self)
addWin.show()
def update_image(self):
try:
if self.run_flag == True:
frames = {}
frames[0]=None
frames[1]=None
frames[2]=None
if self.cam_01 == False and self.cam_02 == False and self.cam_03 == False:
return
if self.cam_01 == True:
ret1, frame1 = self.vs1.read()
if ret1 == True:
frames[0]=frame1
if self.cam_02 == True:
ret2, frame2 = self.vs2.read()
if ret2 == True:
frames[1]=frame2
if self.cam_03 == True:
ret3, frame3 = self.vs3.read()
if ret3 == True:
frames[2]=frame3
packet = self.processor.getProcessedImage(yolo = self.yolo,frames = frames)
boxes1 = packet['boxes1']
boxes2 = packet['boxes2']
boxes3 = packet['boxes3']
if self.cam_01 and isinstance(packet['frame01'],np.ndarray):
qimg = self.toQImage(packet['frame01'])
self.CameraA.setPixmap(QPixmap.fromImage(qimg))
if self.cam_02 and isinstance(packet['frame02'],np.ndarray):
qimg = self.toQImage(packet['frame02'])
self.CameraB.setPixmap(QPixmap.fromImage(qimg))
if self.cam_03 and isinstance(packet['frame03'],np.ndarray):
qimg = self.toQImage(packet['frame03'])
self.CameraC.setPixmap(QPixmap.fromImage(qimg))
if self.cam_01:
for i in range(len(boxes1['label'])):
ID = self.ID
self.ID += 1
label = class_name[boxes1['label'][i]]
if self.coco:
label = boxes1['label'][i]
X = boxes1['X'][i]
Y = boxes1['Y'][i]
W = boxes1['W'][i]
H = boxes1['H'][i]
camer_id = 'cam_01'
timeStamp = int(time.time())
timeArray = time.localtime(timeStamp)
flaw_time = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
rate = Database.getInstance().getSetting()
wid_value = rate[0]
hight_value = rate[1]
rate_value = rate[2]
ft_time = time.time()
Y = (ft_time-self.st)*rate_value + Y
if(W>=wid_value and H>=hight_value):
self.identify_work(ID=ID, label=label, camera_id=camer_id, X=X, Y=Y, W=W, H=H, flaw_time=flaw_time)
self.database.insertIntoFlaw(flaw_id=int(ID),
flaw_type=label,
camera_id=camer_id,
coordinate_x=X,
coordinate_y=Y,
width=W,
highth=H,
flaw_time=flaw_time)
self.statistic_cnt[label]+=1
print('ID: '+str(ID))
if self.cam_02:
for i in range(len(boxes2['label'])):
ID = self.ID
self.ID += 1
label = class_name[boxes2['label'][i]]
if self.coco:
label = boxes2['label'][i]
X = boxes2['X'][i]
Y = boxes2['Y'][i]
W = boxes2['W'][i]
H = boxes2['H'][i]
camer_id = 'cam_02'
timeStamp = int(time.time())
timeArray = time.localtime(timeStamp)
flaw_time = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
rate = Database.getInstance().getSetting()
wid_value = rate[0]
hight_value = rate[1]
rate_value = rate[2]
ft_time = time.time()
Y = (ft_time-self.st)*rate_value + Y
if(W>=wid_value and H>=hight_value):
self.identify_work(ID=ID, label=label, camera_id=camer_id, X=X, Y=Y, W=W, H=H, flaw_time=flaw_time)
self.database.insertIntoFlaw(flaw_id=int(ID),
flaw_type=label,
camera_id=camer_id,
coordinate_x=X,
coordinate_y=Y,
width=W,
highth=H,
flaw_time=flaw_time)
self.statistic_cnt[label]+=1
print('ID: '+str(ID))
if self.cam_03:
for i in range(len(boxes3['label'])):
ID = self.ID
self.ID += 1
label = class_name[boxes3['label'][i]]
if self.coco:
label = boxes3[label][i]
X = boxes3['X'][i]
Y = boxes3['Y'][i]
W = boxes3['W'][i]
H = boxes3['H'][i]
camer_id = 'cam_03'
timeStamp = int(time.time())
timeArray = time.localtime(timeStamp)
flaw_time = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
rate = Database.getInstance().getSetting()
wid_value = rate[0]
hight_value = rate[1]
rate_value = rate[2]
ft_time = time.time()
Y = (ft_time-self.st)*rate_value + Y
if(W>=wid_value and H>=hight_value):
self.identify_work(ID=ID, label=label, camera_id=camer_id, X=X, Y=Y, W=W, H=H, flaw_time=flaw_time)
self.database.insertIntoFlaw(flaw_id=int(ID),
flaw_type=label,
camera_id=camer_id,
coordinate_x=X,
coordinate_y=Y,
width=W,
highth=H,
flaw_time=flaw_time)
self.statistic_cnt[label]+=1
print('ID: '+str(ID))
except:
res = QMessageBox.question(self,'警告','链接网络摄像头失败,请核对摄像头参数')
self.run_flag=False
def identify_work(self, ID = 0, label = "0", camera_id = "cam_01", X = 1.0, Y = 1.0, W = 1.0, H = 1.0, flaw_time = 0):
ID = str(ID)
label = str(label)
camera_id = str(camera_id)
X = str(X)
Y = str(Y)
W = str(W)
H = str(H)
flaw_time = str(flaw_time)
self.cnt += 1
self.sm=self.tablemodel
self.sm.setItem(self.cnt, 0, QStandardItem(ID))
self.sm.setItem(self.cnt, 1, QStandardItem(label))
self.sm.setItem(self.cnt, 2, QStandardItem(camera_id))
self.sm.setItem(self.cnt, 3, QStandardItem(X))
self.sm.setItem(self.cnt, 4, QStandardItem(Y))
self.sm.setItem(self.cnt, 5, QStandardItem(W))
self.sm.setItem(self.cnt, 6, QStandardItem(H))
self.sm.setItem(self.cnt, 7, QStandardItem(flaw_time))
self.tV_info.setModel(self.sm)
#QTableView
self.tV_info.setColumnWidth(0,100)
self.tV_info.setColumnWidth(1,200)
def updateCamInfo(self):
self.feed1 = self.database.getCamurl('cam_01')[0]
self.feed2 = self.database.getCamurl('cam_02')[0]
self.feed3 = self.database.getCamurl('cam_03')[0]
self.cam_01 = self.database.getOpenflag('cam_01')[0]
self.cam_02 = self.database.getOpenflag('cam_02')[0]
self.cam_03 = self.database.getOpenflag('cam_03')[0]
if self.feed1 == "":
self.feed1 = 0
if self.feed2 == "":
self.feed2 = 0
if self.feed3 == "":
self.feed3 = 0
#self.processor = MainProcessor(self.cam_selector.currentText())
if self.first==False:
self.vs1 = cv2.VideoCapture(self.feed1)
self.vs2 = cv2.VideoCapture(self.feed2)
self.vs3 = cv2.VideoCapture(self.feed3)
else:
self.first = False
#def updateLog(self):
def updateCamFlagInfo(self):
self.cam_01 = self.database.getOpenflag('cam_01')[0]
self.cam_02 = self.database.getOpenflag('cam_02')[0]
self.cam_03 = self.database.getOpenflag('cam_03')[0]
def toQImage(self, raw_img):
from numpy import copy
img = copy(raw_img)
qformat = QImage.Format_Indexed8
if len(img.shape) == 3:
if img.shape[2] == 4:
qformat = QImage.Format_RGBA8888
else:
qformat = QImage.Format_RGB888
outImg = QImage(img.tobytes(), img.shape[1], img.shape[0], img.strides[0], qformat)
outImg = outImg.rgbSwapped()
return outImg
@QtCore.pyqtSlot()
def start(self):
if self.run_flag == True :
return
self.updateCamFlagInfo()
#self.database.updataOpenflag('cam_01',1)
#self.database.updataOpenflag('cam_02',1)
#self.database.updataOpenflag('cam_03',1)
if(self.cam_01 or self.cam_02 or self.cam_03):
ash = QSplashScreen(QtGui.QPixmap("UI/NUI.png"))
ash.setFont(QtGui.QFont('Microsoft YaHei UI',20))
ash.show()
ash.showMessage("摄像头链接中,请稍候",QtCore.Qt.AlignLeft | QtCore.Qt.AlignBottom, QtCore.Qt.green)
self.updateCamInfo()
self.run_flag = True
if (self.cam_01 == True and self.vs1.isOpened() == False) or (self.cam_02 == True and self.vs2.isOpened() == False) or (self.cam_03 == True and self.vs3.isOpened() == False):
self.run_flag = False
ash.close()
res = QMessageBox.question(self,'警告','摄像头链接失败,请核对摄像头参数')
else:
ash.close()
else:
res = QMessageBox.question(self,'警告','请先设置摄像头参数')
@QtCore.pyqtSlot()
def stop(self):
if self.run_flag == False:
return
self.run_flag = False
if (self.vs1 != None):
self.vs1.release()
self.CameraA.setText("已停止")
if (self.vs2 != None):
self.vs2.release()
self.CameraB.setText("已停止")
if (self.vs3 != None):
self.vs3.release()
self.CameraC.setText("已停止")
self.vs1 = None
self.vs2 = None
self.vs3 = None
res = QMessageBox.question(self,'提示','是否保留摄像头设置?',QMessageBox.Yes|QMessageBox.No,QMessageBox.No)
if res == QMessageBox.Yes:
return
else:
self.database.updataOpenflag('cam_01',0)
self.database.updataOpenflag('cam_02',0)
self.database.updataOpenflag('cam_03',0)
#self.updateCamInfo()
self.cam_01 = False
self.cam_02 = False
self.cam_03 = False
def closeEvent(self,QCloseEvent):
res = QMessageBox.question(self,'提示','是否退出系统?',QMessageBox.Yes|QMessageBox.No,QMessageBox.No)
if res == QMessageBox.Yes:
QCloseEvent.accept()
else:
QCloseEvent.ignore()
<file_sep>/app.py
import sys
# import qdarkstyle
from PyQt5.QtWidgets import QApplication,QSplashScreen
from MainWindow import MainWindow
from PyQt5 import QtGui,QtCore
def main():
app = QApplication(sys.argv)
ash = QSplashScreen(QtGui.QPixmap("UI/NUI.png"))
ash.setFont(QtGui.QFont('Microsoft YaHei UI',20))
ash.show()
ash.showMessage("系统启动中,请稍候",QtCore.Qt.AlignLeft | QtCore.Qt.AlignBottom, QtCore.Qt.green)
main_window = MainWindow()
# app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
main_window.show()
ash.finish(main_window)
sys.exit(app.exec_())
if __name__ == '__main__':
main()
<file_sep>/yolo3/yolo_online.py
# -*- coding: utf-8 -*-
"""
Class definition of YOLO_v3 style detection model on image and video
"""
import colorsys
import os
from timeit import default_timer as timer
import numpy as np
from keras import backend as K
from keras.models import load_model
from keras.layers import Input
from PIL import Image, ImageFont, ImageDraw
import tensorflow as tf
import requests
import json
from yolo3.yolo3.model import yolo_eval, yolo_body, tiny_yolo_body
from yolo3.yolo3.utils import letterbox_image
import os
from keras.utils import multi_gpu_model
class_name = {
'1':'停车痕紧',
'2':'停车痕松',
'3':'断经',
'4':'错花',
'5':'并纬',
'6':'缩纬',
'7':'缺纬',
'8':'糙纬',
'9':'折返',
'10':'断纬',
'11':'油污',
'12':'起机',
'13':'尽机',
'14':'经条',
'15':'擦白',
'16':'擦伤',
'17':'浆斑',
'18':'空织'
}
class YOLO_ONLINE(object):
_defaults = {
"model_path": 'yolo3/model_data/yolov3forflaw.h5',
"anchors_path": 'yolo3/model_data/yolo_anchors.txt',
"classes_path": 'yolo3/model_data/classes.txt',
"score" : 0.3,
"iou" : 0.45,
"model_image_size" : (416, 416),
"gpu_num" : 1,
}
# _defaults = {
# "model_path": 'yolo3/model_data/yolov3.h5',
# "anchors_path": 'yolo3/model_data/yolo_anchors.txt',
# "classes_path": 'yolo3/model_data/coco_classes.txt',
# "score" : 0.3,
# "iou" : 0.45,
# "model_image_size" : (416, 416),
# "gpu_num" : 1,
# }
@classmethod
def get_defaults(cls, n):
if n in cls._defaults:
return cls._defaults[n]
else:
return "Unrecognized attribute name '" + n + "'"
def __init__(self, **kwargs):
self.__dict__.update(self._defaults) # set up default values
self.__dict__.update(kwargs) # and update with user overrides
self.class_names = self._get_class()
self.anchors = self._get_anchors()
self._t1 = 0.3
self._t2 = 0.45
#self.generate()
self.sess = K.get_session()
self.depth = (5+len(self.class_names))*3
self.input1 = tf.placeholder(tf.float32,shape=(None,13,13,self.depth))
self.input2 = tf.placeholder(tf.float32,shape=(None,26,26,self.depth))
self.input3 = tf.placeholder(tf.float32,shape=(None,52,52,self.depth))
self.boxes, self.scores, self.classes = self.generate()
def _get_class(self):
classes_path = os.path.expanduser(self.classes_path)
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def _get_anchors(self):
anchors_path = os.path.expanduser(self.anchors_path)
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
def generate(self):
# Generate colors for drawing bounding boxes.
hsv_tuples = [(x / len(self.class_names), 1., 1.)
for x in range(len(self.class_names))]
self.colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
self.colors = list(
map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),
self.colors))
np.random.seed(10101) # Fixed seed for consistent colors across runs.
np.random.shuffle(self.colors) # Shuffle colors to decorrelate adjacent classes.
np.random.seed(None) # Reset seed to default.
self.input_image_shape = K.placeholder(shape=(2, ))
boxes, scores, classes = yolo_eval([self.input1,self.input2,self.input3], self.anchors,
len(self.class_names), self.input_image_shape,
score_threshold=self.score, iou_threshold=self.iou)
return boxes,scores, classes
#"http://192.168.3.3:8500"
def detect_image(self, image=None,endpoint = "http://127.0.0.1:8500"):
start = timer()
ans = {}
ans['label']=[]
ans['X']=[]
ans['Y']=[]
ans['W']=[]
ans['H']=[]
if self.model_image_size != (None, None):
assert self.model_image_size[0]%32 == 0, 'Multiples of 32 required'
assert self.model_image_size[1]%32 == 0, 'Multiples of 32 required'
boxed_image = letterbox_image(image, tuple(reversed(self.model_image_size)))
else:
new_image_size = (image.width - (image.width % 32),
image.height - (image.height % 32))
boxed_image = letterbox_image(image, new_image_size)
image_data = np.array(boxed_image, dtype='float32')
print(image_data.shape)
image_data /= 255.
image_data = np.expand_dims(image_data, 0) # Add batch dimension.
print(image_data.shape)
inputda = np.array(image_data).tolist()
input_data = {
"model_name": "default",
"model_version": 1,
"data": {
"images": inputda}
}
result = requests.post(endpoint, json=input_data)
#print(result.elapsed.total_seconds())
end1 = timer()
print(end1-start)
new_dict = json.loads(result.text)
out_boxes = np.array(new_dict['out_boxes'])
out_scores = np.array(new_dict['out_scores'])
out_classes = np.array(new_dict['out_classes'])
output = [out_boxes,out_scores,out_classes]
out_boxes, out_scores, out_classes = self.sess.run(
[self.boxes, self.scores, self.classes],
feed_dict={
self.input1: out_boxes,
self.input2: out_scores,
self.input3: out_classes,
self.input_image_shape: [image.size[1], image.size[0]],
K.learning_phase(): 0
})
end2 = timer()
print(end2-end1)
print('Found {} boxes for {}'.format(len(out_boxes), 'img'))
font = ImageFont.truetype(font='yolo3/font/FiraMono-Medium.otf',
size=np.floor(3e-2 * image.size[1] + 0.5).astype('int32'))
thickness = (image.size[0] + image.size[1]) // 300
for i, c in reversed(list(enumerate(out_classes))):
predicted_class = self.class_names[c]
box = out_boxes[i]
score = out_scores[i]
label = '{} {:.2f}'.format(predicted_class, score)
draw = ImageDraw.Draw(image)
label_size = draw.textsize(label, font)
top, left, bottom, right = box
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(image.size[1], np.floor(bottom + 0.5).astype('int32'))
right = min(image.size[0], np.floor(right + 0.5).astype('int32'))
print(label, (left, top), (right, bottom))
ans['label'].append(str(predicted_class))
ans['X'].append((left+right)/2)
ans['Y'].append((top+bottom)/2)
ans['W'].append(right-left)
ans['H'].append(bottom-top)
if top - label_size[1] >= 0:
text_origin = np.array([left, top - label_size[1]])
else:
text_origin = np.array([left, top + 1])
# My kingdom for a good redistributable image drawing library.
for i in range(thickness):
draw.rectangle(
[left + i, top + i, right - i, bottom - i],
outline=self.colors[c])
draw.rectangle(
[tuple(text_origin), tuple(text_origin + label_size)],
fill=self.colors[c])
draw.text(text_origin, label, fill=(0, 0, 0), font=font)
del draw
end = timer()
print(end - start)
return image,ans
<file_sep>/add_window/old/Statistics.py
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtWidgets import QMainWindow
from PyQt5.uic import loadUi
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtCore import QTimer
from PyQt5.QtGui import QImage, QPixmap
from PyQt5.QtWidgets import QMainWindow, QStatusBar, QListWidget, QAction, qApp, QMenu
from PyQt5.uic import loadUi
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import QSizePolicy
import openpyxl
from Database import Database
book_name_xlsx = 'xlsx格式测试工作簿.xlsx'
class_name = {
'1':'停车痕紧',
'2':'停车痕松',
'3':'断经',
'4':'错花',
'5':'并纬',
'6':'缩纬',
'7':'缺纬',
'8':'糙纬',
'9':'折返',
'10':'断纬',
'11':'油污',
'12':'起机',
'13':'尽机',
'14':'经条',
'15':'擦白',
'16':'擦伤',
'17':'浆斑',
'18':'空织'
}
sheet_name_xlsx = 'xlsx格式测试表'
def write_excel_xlsx(workbook,path, sheet, value):
index = len(value)
# workbook = openpyxl.Workbook()
# sheet = workbook.active
# sheet = workbook.create_sheet(str(sheet_name))
# sheet.title = sheet_name
for i in range(0, index):
for j in range(0, len(value[i])):
sheet.cell(row=i+1, column=j+1, value=str(value[i][j]))
workbook.save(path)
class AddstatisForm(QMainWindow):
def __init__(self, parent=None, ui=None):
super(AddstatisForm, self).__init__(parent)
loadUi("UI/Statistics.ui", self)
self.tablemodel = QStandardItemModel(10,2)
self.tablemodel.setHorizontalHeaderLabels(['瑕疵种类','总数'])
self.st_info.setModel(self.tablemodel)
self.s1 = 'all'
self.s2 = 'all'
# self.cb = QComboBox()
self.comboBox.addItem('all')
self.comboBox.addItem('cam_01')
self.comboBox.addItem('cam_02')
self.comboBox.addItem('cam_03')
self.comboBox_2.addItems(['all', '停车痕紧', '停车痕松','断经','错花','并纬','缩纬','缺纬','糙纬','折返','断纬','油污','起机','尽机','经条','擦白','擦伤','浆斑','空织'])
# self.pushButton_2.clicked.connect(self.writoexc)
self.pushButton_2.clicked.connect(self.getFile)
self.pushButton.clicked.connect(self.sta_work_all)
self.pushButton_3.clicked.connect(self.GetSeclectFile)
self.comboBox.currentIndexChanged.connect(self.selectionchange)
self.comboBox_2.currentIndexChanged.connect(self.selectionchange_2)
self.cnt = -1
# def writoexc(self):
# write_excel_xlsx(book_name_xlsx, sheet_name_xlsx, value3)
def downallinfo(self):
print(book_name_xlsx)
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = '停车痕紧'
if(len(book_name_xlsx)>=6 and book_name_xlsx[-5:] == '.xlsx'):
for i in range(19)[1:]:
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=class_name[str(i)])
for j in value3:
li.append(j)
sheet_name_xlsx = class_name[str(i)]
if (i == 1):
write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
else:
sheet = workbook.create_sheet(str(sheet_name_xlsx))
write_excel_xlsx(workbook,book_name_xlsx, sheet, li)
li = []
def getFile(self):
book_name_xlsx = QFileDialog.getSaveFileName(self,
"另存为",
"",
"Excel工作簿(*.xlsx)")[0]
print(book_name_xlsx)
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = '停车痕紧'
if(len(book_name_xlsx)>=6 and book_name_xlsx[-5:] == '.xlsx'):
for i in range(19)[1:]:
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=class_name[str(i)])
for j in value3:
li.append(j)
sheet_name_xlsx = class_name[str(i)]
if (i == 1):
write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
else:
sheet = workbook.create_sheet(str(sheet_name_xlsx))
write_excel_xlsx(workbook,book_name_xlsx, sheet, li)
li = []
def sta_work_all(self): #界面显示
value = Database.getInstance().getallFromFlawStatistic()
self.cnt = -1
for i in range(len(value)):
label = value[i][0]
num = value[i][1]
self.show_work(label=label, num=num)
def show_work(self, label = "0", num=0):
label = str(label)
num=str(num)
self.cnt += 1
self.sm=self.tablemodel
self.sm.setItem(self.cnt, 0, QStandardItem(label))
self.sm.setItem(self.cnt, 1, QStandardItem(num))
self.st_info.setModel(self.sm)
#QTableView
self.st_info.setColumnWidth(0,100)
self.st_info.setColumnWidth(1,200)
def selectionchange(self):
self.s1 = self.comboBox.currentText()
print(self.s1)
def selectionchange_2(self):
self.s2 = self.comboBox_2.currentText()
print(self.s2)
def GetSeclectFile(self):
book_name_xlsx = QFileDialog.getSaveFileName(self,
"另存为",
"",
"Excel工作簿(*.xlsx)")[0]
print(book_name_xlsx)
str1 = self.s1
str2 = self.s2
print(str1)
print(str2)
if (str2 == 'all'):
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = '停车痕紧'
if(len(book_name_xlsx)>=6 and book_name_xlsx[-5:] == '.xlsx'):
for i in range(19)[1:]:
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=class_name[str(i)])
for j in value3:
if (j[2] == str1 or str1=='all'):
li.append(j)
sheet_name_xlsx = class_name[str(i)]
if (i == 1):
write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
else:
sheet = workbook.create_sheet(str(sheet_name_xlsx))
write_excel_xlsx(workbook,book_name_xlsx, sheet, li)
li = []
else:
workbook = openpyxl.Workbook()
sheet1 = workbook.active
sheet1.title = str2
if (len(book_name_xlsx) >= 6 and book_name_xlsx[-5:] == '.xlsx'):
li = [['编号', '类型', '相机ID', 'X轴坐标', 'Y轴坐标', '宽度', '高度', '时间戳']]
value3 = Database.getInstance().getDataFromFlaw(flaw_type=str2)
for j in value3:
if (j[2] == str1 or str1=='all'):
li.append(j)
write_excel_xlsx(workbook,book_name_xlsx, sheet1, li)
<file_sep>/add_window/AddPicworkForm.py
from PyQt5.QtWidgets import QFileDialog
from PyQt5.uic import loadUi
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtGui import QPixmap
from PyQt5.QtGui import QImage
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import QtCore, QtWidgets
#from Database import Database
from add_window.AddMainWindow import AddMainWindow
import imghdr
import os.path
import cv2.cv2 as cv2
import numpy as np
from PIL import Image, ImageFont, ImageDraw
from yolo3.yolo import YOLO
class_name = {
'1':'停车痕紧',
'2':'停车痕松',
'3':'断经',
'4':'错花',
'5':'并纬',
'6':'缩纬',
'7':'缺纬',
'8':'糙纬',
'9':'折返',
'10':'断纬',
'11':'油污',
'12':'起机',
'13':'尽机',
'14':'经条',
'15':'擦白',
'16':'擦伤',
'17':'浆斑',
'18':'空织',
'19':'缺纬'
}
def file_extension(path):
return os.path.splitext(path)[1]
class AddPicworkForm(AddMainWindow):
def __init__(self, parent=None,yolo=None,model_flag='local'):
super().__init__(parent, "UI/PicworkForm.ui")
self.YOLO = yolo
self.model_flag=model_flag
self.shape = (640,480)
self.path = ''
self.cnt = -1
self.type = 'error'
self.tablemodel = QStandardItemModel(10,5)
self.tablemodel.setHorizontalHeaderLabels(['瑕疵种类','X坐标','Y坐标','宽度W','高度H'])
#self.tablemodels = QStandardItemModel(10,5)
#self.tablemodels.setHorizontalHeaderLabels(['瑕疵种类','X坐标','Y坐标','宽度W','高度H'])
self.TV_info.setModel(self.tablemodel)
# self.add.clicked.connect(self.addToDatabase)
self.cancel.clicked.connect(self.close)
self.file_browse.clicked.connect(lambda: self.getFile(self.file))
self.PB_work.clicked.connect(self.work)
def close(self):
#self.YOLO.close_session()
self.destroy(True)
def addToDatabase(self):
pass
def getFile(self, lineEdit):
pic_map = QFileDialog.getOpenFileName()[0]
self.path = pic_map
lineEdit.setText(pic_map)
# print(pic_map)
if pic_map!="":
if imghdr.what(pic_map)!=None:
pixmap = QPixmap (pic_map)
self.LB_Pic.setPixmap(pixmap)
self.LB_Pic.setScaledContents(True)
self.LB_Pic.show()
self.type = 'pic'
elif file_extension(pic_map) in ['.AVI','.avi','.MP4','.mp4','.mov','.MOV','.rmvb','.RMVB','.wmv','.WMV']:
vid = cv2.VideoCapture(pic_map)
if not vid.isOpened():
raise IOError("Couldn't open webcam or video")
return_value, frame = vid.read()
frame = cv2.resize(frame,(480,640))
qimg = self.toQImage(frame)
self.LB_Pic.setPixmap(QPixmap.fromImage(qimg))
self.type = 'mov'
vid.release()
else:
self.type = 'error'
def identify_work(self, label = "0", X = 1.0, Y = 1.0, W = 1.0, H = 1.0):
label = str(label)
X = str(X)
Y = str(Y)
W = str(W)
H = str(H)
self.cnt += 1
self.sm=self.tablemodel
self.sm.setItem(self.cnt, 0, QStandardItem(label))
self.sm.setItem(self.cnt, 1, QStandardItem(X))
self.sm.setItem(self.cnt, 2, QStandardItem(Y))
self.sm.setItem(self.cnt, 3, QStandardItem(W))
self.sm.setItem(self.cnt, 4, QStandardItem(H))
self.TV_info.setModel(self.sm)
#QTableView
self.TV_info.setColumnWidth(0,100)
self.TV_info.setColumnWidth(1,200)
def toQImage(self, raw_img):
from numpy import copy
img = copy(raw_img)
qformat = QImage.Format_Indexed8
if len(img.shape) == 3:
if img.shape[2] == 4:
qformat = QImage.Format_RGBA8888
else:
qformat = QImage.Format_RGB888
outImg = QImage(img.tobytes(), img.shape[1], img.shape[0], img.strides[0], qformat)
outImg = outImg.rgbSwapped()
return outImg
@QtCore.pyqtSlot()
def work(self):
if self.type == 'pic':
frame = cv2.imread(self.path)
self.cnt = -1
self.tablemodel = QStandardItemModel(10,5)
self.tablemodel.setHorizontalHeaderLabels(['瑕疵种类','X坐标','Y坐标','宽度W','高度H'])
self.TV_info.setModel(self.tablemodel)
try:
frame = cv2.resize(frame,(480,640))
pack = self.object_detecation_pic(frame)
frame = pack['frame']
box = pack['box']
for i in range(len(box['label'])):
self.identify_work(class_name[box['label'][i]], box['X'][i], box['Y'][i], box['W'][i], box['H'][i])
qimg = self.toQImage(frame)
self.LB_Pic.setPixmap(QPixmap.fromImage(qimg))
except:
print("image error")
elif self.type == 'mov':
vid = cv2.VideoCapture(self.path)
self.cnt = -1
self.tablemodel = QStandardItemModel(10,5)
self.tablemodel.setHorizontalHeaderLabels(['瑕疵种类','X坐标','Y坐标','宽度W','高度H'])
self.TV_info.setModel(self.tablemodel)
while True:
return_value, frame = vid.read()
if return_value:
try:
frame = cv2.resize(frame,(480,640))
pack = self.object_detecation_pic(frame)
frame = pack['frame']
box = pack['box']
for i in range(len(box['label'])):
self.identify_work(class_name[box['label'][i]], box['X'][i], box['Y'][i], box['W'][i], box['H'][i])
qimg = self.toQImage(frame)
self.LB_Pic.setPixmap(QPixmap.fromImage(qimg))
except:
print("image error")
if cv2.waitKey(1) & 0xFF == ord('q'):
break
vid.release()
else:
self.LB_Pic.setText("请选择图片或视频")
def object_detecation_pic(self,frame):
ans={}
if (isinstance(frame,np.ndarray)):
#image = cv2.resize(frame,self.shape)
image = Image.fromarray(frame)
image,ans = self.YOLO.detect_image(image)
#image,ans = yolo.detect_image(image)
frame = np.asarray(image)
else:
frame = None
pack = {'frame': frame,'box':ans}
return pack
def closeEvent(self,QCloseEvent):
res = QMessageBox.question(self,'提示','是否退出?',QMessageBox.Yes|QMessageBox.No,QMessageBox.No)
if res == QMessageBox.Yes:
QCloseEvent.accept()
else:
QCloseEvent.ignore()
# def addToDatabase(self):
# id = str(self.id.text())
# group = str(self.group.text())
# location = str(self.location.text())
# x = str(self.x_coord.text())
# y = str(self.y_coord.text())
# file = str(self.file.text())
# Database.getInstance().insertIntoCamera(id, location, x, y, group, file)
# self.destroy()
<file_sep>/processor/MainProcessor.py
from Database import Database
from processor.DetectionProcessor import DetectionProcessor
class MainProcessor:
def __init__(self, camera_id):
self.cam_ids = camera_id
#CamAinfo = Database.getInstance().getCaminfo(camera_id[0])
#CamBinfo = Database.getInstance().getCaminfo(camera_id[1])
#CamCinfo = Database.getInstance().getCaminfo(camera_id[2])
self.processor = DetectionProcessor()
def getProcessedImage(self,yolo,frames=None):
dicti = self.processor.object_detection(frames,yolo)
return dicti<file_sep>/processor/DetectionProcessor.py
import sys
sys.path.append("..")
import cv2.cv2 as cv2
import numpy as np
#import datatime
from yolo3.yolo import YOLO
from PIL import Image, ImageFont, ImageDraw
class DetectionProcessor:
def __init__(self):
self.shape = (320,640)
self.cnt = 0
def object_detection(self,frames,yolo):
ans={}
ans1={}
ans2={}
if (isinstance(frames[0],np.ndarray)):
image = cv2.resize(frames[0],self.shape)
image = Image.fromarray(image)
image,ans = yolo.detect_image(image)
self.frame01 = np.asarray(image)
else:
self.frame01 = None
if (isinstance(frames[1],np.ndarray)):
#self.frame02 = cv2.resize(frames[1],self.shape)
image1 = cv2.resize(frames[1],self.shape)
image1 = Image.fromarray(image1)
image1,ans1 = yolo.detect_image(image1)
self.frame02 = np.asarray(image1)
else:
self.frame02 = None
if (isinstance(frames[2],np.ndarray)):
#self.frame03 = cv2.resize(frames[2],self.shape)
image2 = cv2.resize(frames[2],self.shape)
image2 = Image.fromarray(image2)
image2,ans2 = yolo.detect_image(image2)
self.frame03 = np.asarray(image2)
else:
self.frame03 = None
pack = {'frame01': self.frame01,'frame02': self.frame02,'frame03':self.frame03,'boxes1':ans,'boxes2':ans1,'boxes3':ans2}
return pack
<file_sep>/add_window/Setsetting.py
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtWidgets import QMessageBox
from PyQt5 import QtCore, QtWidgets
from Database import Database
from add_window.AddMainWindow import AddMainWindow
class SetSetting(AddMainWindow):
def __init__(self, parent=None):
super().__init__(parent, "UI/Setting.ui")
#self.file_browse.clicked.connect(lambda: self.getFile(self.file))
self.setok.clicked.connect(self.updataSetToDatabase)
self.clearall.clicked.connect(self.clearSetToDatabase)
# caminfo = Database.getInstance().getCaminfo(ID=camid)[0]
self.hight.setText('')
self.wid.setText('')
self.rate_video.setText('')
self.model.setText('yolo')
model_name = str(self.model.text())
rate = Database.getInstance().getSetting(model=model_name)
wid_value = rate[0]
hight_value = rate[1]
rate_value = rate[2]
self.hight.setText(str(hight_value))
self.wid.setText(str(wid_value))
self.rate_video.setText(str(rate_value))
@QtCore.pyqtSlot()
def clearSetToDatabase(self):
model_name = str(self.model.text())
Database.getInstance().updateSetting(model_name)
rate = Database.getInstance().getSetting(model=model_name)
wid_value = rate[0]
hight_value = rate[1]
rate_value = rate[2]
self.hight.setText(str(hight_value))
self.wid.setText(str(wid_value))
self.rate_video.setText(str(rate_value))
#self.model.setText('')
@QtCore.pyqtSlot()
def updataSetToDatabase(self):
model_name = str(self.model.text())
hight_value = str(self.hight.text())
wid_value = str(self.wid.text())
rate_value = str(self.rate_video.text())
Database.getInstance().updateSetting(model_name,wid_value,hight_value,rate_value)
def closeEvent(self,QCloseEvent):
res = QMessageBox.question(self,'提示','是否保存设置?',QMessageBox.Yes|QMessageBox.No,QMessageBox.No)
if res == QMessageBox.Yes:
model_name = str(self.model.text())
hight_value = str(self.hight.text())
wid_value = str(self.wid.text())
rate_value = str(self.rate_video.text())
Database.getInstance().updateSetting(model_name,wid_value,hight_value,rate_value)
QCloseEvent.accept()
else:
QCloseEvent.accept()
<file_sep>/yolo3/yolo.py
# -*- coding: utf-8 -*-
"""
Class definition of YOLO_v3 style detection model on image and video
"""
import colorsys
import os
from timeit import default_timer as timer
import numpy as np
from keras import backend as K
from keras.models import load_model
from keras.layers import Input
from PIL import Image, ImageFont, ImageDraw
import requests
import json
from yolo3.yolo3.model import yolo_eval, yolo_body, tiny_yolo_body
from yolo3.yolo3.utils import letterbox_image
import os
from keras.utils import multi_gpu_model
class_name = {
'1':'停车痕紧',
'2':'停车痕松',
'3':'断经',
'4':'错花',
'5':'并纬',
'6':'缩纬',
'7':'缺纬',
'8':'糙纬',
'9':'折返',
'10':'断纬',
'11':'油污',
'12':'起机',
'13':'尽机',
'14':'经条',
'15':'擦白',
'16':'擦伤',
'17':'浆斑',
'18':'空织'
}
class YOLO(object):
_defaults = {
"model_path": 'yolo3/model_data/yolov3forflaw.h5',
"anchors_path": 'yolo3/model_data/yolo_anchors.txt',
"classes_path": 'yolo3/model_data/classes.txt',
"score" : 0.3,
"iou" : 0.45,
"model_image_size" : (416, 416),
"gpu_num" : 1,
}
# _defaults = {
# "model_path": 'yolo3/model_data/yolov3.h5',
# "anchors_path": 'yolo3/model_data/yolo_anchors.txt',
# "classes_path": 'yolo3/model_data/coco_classes.txt',
# "score" : 0.3,
# "iou" : 0.45,
# "model_image_size" : (416, 416),
# "gpu_num" : 1,
# }
@classmethod
def get_defaults(cls, n):
if n in cls._defaults:
return cls._defaults[n]
else:
return "Unrecognized attribute name '" + n + "'"
def __init__(self, **kwargs):
self.__dict__.update(self._defaults) # set up default values
self.__dict__.update(kwargs) # and update with user overrides
self.class_names = self._get_class()
self.anchors = self._get_anchors()
self.sess = K.get_session()
self._t1 = 0.3
self._t2 = 0.45
self.boxes, self.scores, self.classes = self.generate()
def _get_class(self):
classes_path = os.path.expanduser(self.classes_path)
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def _get_anchors(self):
anchors_path = os.path.expanduser(self.anchors_path)
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
def generate(self):
model_path = os.path.expanduser(self.model_path)
assert model_path.endswith('.h5'), 'Keras model or weights must be a .h5 file.'
# Load model, or construct model and load weights.
num_anchors = len(self.anchors)
num_classes = len(self.class_names)
is_tiny_version = num_anchors==6 # default setting
try:
self.yolo_model = load_model(model_path, compile=False)
except:
self.yolo_model = tiny_yolo_body(Input(shape=(None,None,3)), num_anchors//2, num_classes) \
if is_tiny_version else yolo_body(Input(shape=(None,None,3)), num_anchors//3, num_classes)
self.yolo_model.load_weights(self.model_path) # make sure model, anchors and classes match
else:
assert self.yolo_model.layers[-1].output_shape[-1] == \
num_anchors/len(self.yolo_model.output) * (num_classes + 5), \
'Mismatch between model and given anchor and class sizes'
print('{} model, anchors, and classes loaded.'.format(model_path))
# Generate colors for drawing bounding boxes.
hsv_tuples = [(x / len(self.class_names), 1., 1.)
for x in range(len(self.class_names))]
self.colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
self.colors = list(
map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),
self.colors))
np.random.seed(10101) # Fixed seed for consistent colors across runs.
np.random.shuffle(self.colors) # Shuffle colors to decorrelate adjacent classes.
np.random.seed(None) # Reset seed to default.
# Generate output tensor targets for filtered bounding boxes.
self.input_image_shape = K.placeholder(shape=(2, ))
if self.gpu_num>=2:
self.yolo_model = multi_gpu_model(self.yolo_model, gpus=self.gpu_num)
boxes, scores, classes = yolo_eval(self.yolo_model.output, self.anchors,
len(self.class_names), self.input_image_shape,
score_threshold=self.score, iou_threshold=self.iou)
return boxes, scores, classes
def detect_image(self, image):
start = timer()
ans = {}
ans['label']=[]
ans['X']=[]
ans['Y']=[]
ans['W']=[]
ans['H']=[]
if self.model_image_size != (None, None):
assert self.model_image_size[0]%32 == 0, 'Multiples of 32 required'
assert self.model_image_size[1]%32 == 0, 'Multiples of 32 required'
boxed_image = letterbox_image(image, tuple(reversed(self.model_image_size)))
else:
new_image_size = (image.width - (image.width % 32),
image.height - (image.height % 32))
boxed_image = letterbox_image(image, new_image_size)
image_data = np.array(boxed_image, dtype='float32')
print(image_data.shape)
image_data /= 255.
image_data = np.expand_dims(image_data, 0) # Add batch dimension.
print(image_data.shape)
out_boxes, out_scores, out_classes = self.sess.run(
[self.boxes, self.scores, self.classes],
feed_dict={
self.yolo_model.input: image_data,
self.input_image_shape: [image.size[1], image.size[0]],
K.learning_phase(): 0
})
print('Found {} boxes for {}'.format(len(out_boxes), 'img'))
font = ImageFont.truetype(font='yolo3/font/FiraMono-Medium.otf',
size=np.floor(3e-2 * image.size[1] + 0.5).astype('int32'))
thickness = (image.size[0] + image.size[1]) // 300
for i, c in reversed(list(enumerate(out_classes))):
predicted_class = self.class_names[c]
box = out_boxes[i]
score = out_scores[i]
label = '{} {:.2f}'.format(predicted_class, score)
draw = ImageDraw.Draw(image)
label_size = draw.textsize(label, font)
top, left, bottom, right = box
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(image.size[1], np.floor(bottom + 0.5).astype('int32'))
right = min(image.size[0], np.floor(right + 0.5).astype('int32'))
print(label, (left, top), (right, bottom))
ans['label'].append(str(predicted_class))
ans['X'].append((left+right)/2)
ans['Y'].append((top+bottom)/2)
ans['W'].append(right-left)
ans['H'].append(bottom-top)
if top - label_size[1] >= 0:
text_origin = np.array([left, top - label_size[1]])
else:
text_origin = np.array([left, top + 1])
# My kingdom for a good redistributable image drawing library.
for i in range(thickness):
draw.rectangle(
[left + i, top + i, right - i, bottom - i],
outline=self.colors[c])
draw.rectangle(
[tuple(text_origin), tuple(text_origin + label_size)],
fill=self.colors[c])
draw.text(text_origin, label, fill=(0, 0, 0), font=font)
del draw
end = timer()
print(end - start)
return image,ans
def online_detect_image(self, image=None,endpoint = "http://192.168.3.3:8500"):
start = timer()
ans = {}
ans['label']=[]
ans['X']=[]
ans['Y']=[]
ans['W']=[]
ans['H']=[]
if self.model_image_size != (None, None):
assert self.model_image_size[0]%32 == 0, 'Multiples of 32 required'
assert self.model_image_size[1]%32 == 0, 'Multiples of 32 required'
boxed_image = letterbox_image(image, tuple(reversed(self.model_image_size)))
else:
new_image_size = (image.width - (image.width % 32),
image.height - (image.height % 32))
boxed_image = letterbox_image(image, new_image_size)
image_data = np.array(boxed_image, dtype='float32')
print(image_data.shape)
image_data /= 255.
image_data = np.expand_dims(image_data, 0) # Add batch dimension.
print(image_data.shape)
inputda = np.array(image_data).tolist()
input_data = {
"model_name": "default",
"model_version": 1,
"data": {
"images": inputda}
}
result = requests.post(endpoint, json=input_data)
new_dict = json.loads(result.text)
out_boxes = np.array(new_dict['out_boxes'])
out_scores = np.array(new_dict['out_scores'])
out_classes = np.array(new_dict['out_classes'])
output = [out_boxes,out_classes,out_scores]
out_boxes, out_classes , out_scores= self._yolo_out(output,[image.size[1], image.size[0]])
print('Found {} boxes for {}'.format(len(out_boxes), 'img'))
font = ImageFont.truetype(font='yolo3/font/FiraMono-Medium.otf',
size=np.floor(3e-2 * image.size[1] + 0.5).astype('int32'))
thickness = (image.size[0] + image.size[1]) // 300
for i, c in reversed(list(enumerate(out_classes))):
predicted_class = self.class_names[c]
box = out_boxes[i]
score = out_scores[i]
label = '{} {:.2f}'.format(predicted_class, score)
draw = ImageDraw.Draw(image)
label_size = draw.textsize(label, font)
top, left, bottom, right = box
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(image.size[1], np.floor(bottom + 0.5).astype('int32'))
right = min(image.size[0], np.floor(right + 0.5).astype('int32'))
print(label, (left, top), (right, bottom))
ans['label'].append(str(predicted_class))
ans['X'].append((left+right)/2)
ans['Y'].append((top+bottom)/2)
ans['W'].append(right-left)
ans['H'].append(bottom-top)
if top - label_size[1] >= 0:
text_origin = np.array([left, top - label_size[1]])
else:
text_origin = np.array([left, top + 1])
# My kingdom for a good redistributable image drawing library.
for i in range(thickness):
draw.rectangle(
[left + i, top + i, right - i, bottom - i],
outline=self.colors[c])
draw.rectangle(
[tuple(text_origin), tuple(text_origin + label_size)],
fill=self.colors[c])
draw.text(text_origin, label, fill=(0, 0, 0), font=font)
del draw
end = timer()
print(end - start)
return image,ans
def close_session(self):
self.sess.close()
def _yolo_out(self, outs, shape):
"""Process output of yolo base net.
# Argument:
outs: output of yolo base net.
shape: shape of original image.
# Returns:
boxes: ndarray, boxes of objects.
classes: ndarray, classes of objects.
scores: ndarray, scores of objects.
"""
masks = [[6, 7, 8], [3, 4, 5], [0, 1, 2]]
anchors = [[10, 13], [16, 30], [33, 23], [30, 61], [62, 45],
[59, 119], [116, 90], [156, 198], [373, 326]]
boxes, classes, scores = [], [], []
for out, mask in zip(outs, masks):
b, c, s = self._process_feats(out, anchors, mask)
b, c, s = self._filter_boxes(b, c, s)
boxes.append(b)
classes.append(c)
scores.append(s)
boxes = np.concatenate(boxes)
classes = np.concatenate(classes)
scores = np.concatenate(scores)
# Scale boxes back to original image shape.
width, height = shape[1], shape[0]
image_dims = [width, height, width, height]
boxes = boxes * image_dims
nboxes, nclasses, nscores = [], [], []
for c in set(classes):
inds = np.where(classes == c)
b = boxes[inds]
c = classes[inds]
s = scores[inds]
keep = self._nms_boxes(b, s)
nboxes.append(b[keep])
nclasses.append(c[keep])
nscores.append(s[keep])
if not nclasses and not nscores:
return None, None, None
boxes = np.concatenate(nboxes)
classes = np.concatenate(nclasses)
scores = np.concatenate(nscores)
return boxes, classes, scores
def _nms_boxes(self, boxes, scores):
"""Suppress non-maximal boxes.
# Arguments
boxes: ndarray, boxes of objects.
scores: ndarray, scores of objects.
# Returns
keep: ndarray, index of effective boxes.
"""
x = boxes[:, 0]
y = boxes[:, 1]
w = boxes[:, 2]
h = boxes[:, 3]
areas = w * h
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
xx1 = np.maximum(x[i], x[order[1:]])
yy1 = np.maximum(y[i], y[order[1:]])
xx2 = np.minimum(x[i] + w[i], x[order[1:]] + w[order[1:]])
yy2 = np.minimum(y[i] + h[i], y[order[1:]] + h[order[1:]])
w1 = np.maximum(0.0, xx2 - xx1 + 1)
h1 = np.maximum(0.0, yy2 - yy1 + 1)
inter = w1 * h1
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= self._t2)[0]
order = order[inds + 1]
keep = np.array(keep)
return keep
def _sigmoid(self, x):
"""sigmoid.
# Arguments
x: Tensor.
# Returns
numpy ndarray.
"""
return 1 / (1 + np.exp(-x))
def _process_feats(self, out, anchors, mask):
"""process output features.
# Arguments
out: Tensor (N, N, 3, 4 + 1 +80), output feature map of yolo.
anchors: List, anchors for box.
mask: List, mask for anchors.
# Returns
boxes: ndarray (N, N, 3, 4), x,y,w,h for per box.
box_confidence: ndarray (N, N, 3, 1), confidence for per box.
box_class_probs: ndarray (N, N, 3, 80), class probs for per box.
"""
grid_h, grid_w, num_boxes = map(int, out.shape[1: 4])
anchors = [anchors[i] for i in mask]
anchors_tensor = np.array(anchors).reshape(1, 1, len(anchors), 2)
# Reshape to batch, height, width, num_anchors, box_params.
out = out[0]
box_xy = self._sigmoid(out[..., :2])
box_wh = np.exp(out[..., 2:4])
box_wh = box_wh * anchors_tensor
box_confidence = self._sigmoid(out[..., 4])
box_confidence = np.expand_dims(box_confidence, axis=-1)
box_class_probs = self._sigmoid(out[..., 5:])
col = np.tile(np.arange(0, grid_w), grid_w).reshape(-1, grid_w)
row = np.tile(np.arange(0, grid_h).reshape(-1, 1), grid_h)
col = col.reshape(grid_h, grid_w, 1, 1).repeat(3, axis=-2)
row = row.reshape(grid_h, grid_w, 1, 1).repeat(3, axis=-2)
grid = np.concatenate((col, row), axis=-1)
box_xy += grid
box_xy /= (grid_w, grid_h)
box_wh /= (416, 416)
box_xy -= (box_wh / 2.)
boxes = np.concatenate((box_xy, box_wh), axis=-1)
return boxes, box_confidence, box_class_probs
def _filter_boxes(self, boxes, box_confidences, box_class_probs):
"""Filter boxes with object threshold.
# Arguments
boxes: ndarray, boxes of objects.
box_confidences: ndarray, confidences of objects.
box_class_probs: ndarray, class_probs of objects.
# Returns
boxes: ndarray, filtered boxes.
classes: ndarray, classes for boxes.
scores: ndarray, scores for boxes.
"""
box_scores = box_confidences * box_class_probs
box_classes = np.argmax(box_scores, axis=-1)
box_class_scores = np.max(box_scores, axis=-1)
pos = np.where(box_class_scores >= self._t1)
boxes = boxes[pos]
classes = box_classes[pos]
scores = box_class_scores[pos]
return boxes, classes, scores
| 5665fa7572317d13be18b127c90a04db3570822a | [
"Python",
"Text"
] | 15 | Python | jinzhu6/Realtime_Flaw_Object | e963d8698d2db1a2d4e832cffea36aa8a008c27e | ba808f88f601e2df6f07d8c0febc6509bfc804fb | |
refs/heads/master | <repo_name>JohannesBuchner/flight-reservation-emails<file_sep>/testparser.py
import notmuch
import os
import emailparser
import sys
import logging
dbpath = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test-private')
dbpath = sys.argv[1]
db = notmuch.Database(dbpath)
query = db.create_query(sys.argv[2])
if __name__ == '__main__':
emailparser.default_logging()
logging.getLogger('emailparser').setLevel(logging.DEBUG)
for m in query.search_messages():
r = emailparser.parse_email_message(m)
f = os.path.basename(m.get_filename())
txt = ""
for info in r:
txt += """
Flight %(departure)s --> %(arrival)s
Departing %(departureTimestr)s %(boardingTime)s
from %(departure)s %(departureTerminal)s %(departureGate)s
arriving %(arrivalTimestr)s
To %(arrival)s %(arrivalTerminal)s %(arrivalGate)s
Flight number %(flightNumber)s with %(airline)s%(operator)s
%(ticketNumber)s %(ticketText)s %(ticketDownload)s %(ticketPrint)s
""" % info
if txt != "":
with open(os.path.join(dbpath, 'parsed', f), 'w') as fout:
fout.write(txt.encode('utf8'))
ftest = os.path.join(dbpath, 'expected', f)
if os.path.exists(ftest):
test = open(ftest).read()
if txt == "":
print "no parsing output for %s" % f
print "expected:", ftest
print test
break
elif txt != test:
print "parsing difference for %s" % f
print "expected:", ftest
print test
print "actual:", os.path.join(dbpath, 'parsed', f)
print txt
print 'to compare: meld %s %s' % (ftest, os.path.join(dbpath, 'parsed', f))
break
else:
print "result for %s" % f
print txt
print 'ok'
else:
if txt != "":
print "unexpected parsing output for %s" % f
print "actual:", os.path.join(dbpath, 'parsed', f)
print txt
break
else:
print "result for %s" % f
print txt
print 'ok'
<file_sep>/emailparser.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
import notmuch
import BeautifulSoup
import datetime
import dateutil.parser
import dateparser
import logging
from tzlocal import get_localzone
# this makes everything utf8
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
# this disables relative dates
import dateparser.date
dateparser.date._DateLanguageParser._try_freshness_parser = lambda self: False
def get_value(reservation, element, itemprop, default):
node = reservation.find(element, itemprop=itemprop)
if node is None:
return default
else:
attrs = dict(node.attrs)
if element == 'link':
return attrs.get('href', default)
elif element == 'meta':
return attrs.get('content', default)
def get_name(parent, itemprop, itemtype, default, childitemprop='name'):
node = parent.find('div', itemprop=itemprop, itemtype=itemtype)
if node is None:
return default
return get_value(node, 'meta', childitemprop, default)
def get_code(parent, itemprop, itemtype, default):
return get_name(parent, itemprop, itemtype, default, childitemprop='iataCode')
def prepend(info, k, prefix):
if info[k] and info[k] != '':
info[k] = prefix + info[k]
from six.moves.html_parser import HTMLParser
h = HTMLParser()
def nicefy_htmltext(txt):
el = h.unescape(txt.strip())
el = el.strip('.').replace('\n', ' ').replace('\t', ' ').replace(' ', ' ').replace(' ', ' ').replace(' ', ' ').strip()
return el
def parse_field(v):
# split td field into components
vs = []
#for el in list(v) + list(v.findChildren()):
for el in list(v.recursiveChildGenerator()) + list(v):
if hasattr(el, 'text'):
el = el.text
el = nicefy_htmltext(el)
if '<' in el or '>' in el:
continue
if len(el) != 0 and len(el) < 200:
vs.append(el)
return vs
def shorten_airport(name):
name = nicefy_htmltext(name).strip('#')
if len(name) < 8:
return name
if '(' not in name:
return name
part = name.split('(')[1]
if ')' not in part:
return name
part = part.split(')')[0]
if len(part) != 3:
return name
return part
def parsedate_simple(s):
logf = logging.getLogger('emailparser.parsedate')
logf.info('simple date parsing "%s"...' % s)
if s == '':
return ''
return dateutil.parser.parse(s)
def parsedate(s, default, languages=None):
logf = logging.getLogger('emailparser.parsedate')
logf.info('date parsing "%s"...' % s)
try:
return dateutil.parser.parse(s, default=default.replace(second=0, minute=0, hour=0))
except ValueError as e:
r = dateparser.parse(s, languages=languages, settings=
dateparser.conf.Settings().replace(
RELATIVE_BASE=default.replace(second=0, minute=0, hour=0)))
if r is None:
raise e
else:
return r
previous_dates = {}
def parsedate_cached(s, default, languages=None):
logf = logging.getLogger('emailparser.parsedate_cached')
s = s.replace('\n', ' ').replace('\t', ' ').replace(' ', ' ').replace(' ', ' ').replace(' ', ' ')
if len(s) > 50:
raise ValueError('too long for a date')
if len(s) < 4:
raise ValueError('too short for a date')
if not any([n in s for n in '0123456789']):
raise ValueError('numbers expected in a date')
k = (s, default)
if k not in previous_dates:
try:
d = parsedate(s, default=default, languages=languages)
if d.year < 1980 or d.year > datetime.datetime.now().year + 2:
logf.warn('problem parsing "%s": outside reasonable date range' % (s))
d = None
previous_dates[k] = d
except OverflowError as e:
logf.warn('problem parsing "%s": %s' % (s, e))
previous_dates[k] = None
except Exception as e:
logf.warn('problem parsing "%s": %s' % (s, e))
previous_dates[k] = None
if previous_dates[k] is None:
raise ValueError(s)
else:
return previous_dates[k]
def parse_flight(columns, values, global_info, languages=None):
logf = logging.getLogger('emailparser.parse_flight')
info = {}
defaultdate = global_info['emailTime']
logf.info('defaultdate(email) <- %s' % defaultdate)
for c, v in zip(columns, values):
logf.debug('parsing row: column %s: "%s"' % (c, v))
if c.lower() in ['departs', 'departure', 'departure city and time', 'from', 'salida']:
logf.debug('parsing departure: "%s"' % v)
for vi in parse_field(v):
logf.debug('parsing departure component: "%s"' % vi)
# try to parse as time
try:
time = parsedate_cached(vi, default=defaultdate, languages=languages)
if time.hour != 0 or time.minute != 0:
logf.info('departureTime <- %s' % time)
info['departureTime'] = time
except ValueError as e:
# could be a location
v = shorten_airport(vi)
if is_airport(v):
logf.info('departure (location) <- %s (date: %s)' % (v, e))
info['departure'] = v
elif c.lower() in ['arrives', 'arrival', 'arrival city and time', 'to', 'llegada']:
logf.debug('parsing arrival: "%s"' % v)
for vi in parse_field(v):
logf.debug('parsing arrival component: "%s"' % vi)
# try to parse as time
try:
time = parsedate_cached(vi, default=defaultdate, languages=languages)
if time.hour != 0 or time.minute != 0:
logf.info('arrivalTime <- %s' % time)
info['arrivalTime'] = time
except ValueError as e:
# could be a location
v = shorten_airport(vi)
if is_airport(v):
logf.info('arrival (location) <- %s (date: %s)' % (v, e))
info['arrival'] = v
elif c.lower() in ['day, date']:
day = nicefy_htmltext(v.text)
if day != '':
logf.debug('parsing day "%s"' % day)
try:
defaultdate = parsedate_cached(day, default=defaultdate, languages=languages)
logf.info('defaultdate <- %s' % defaultdate)
except ValueError as e:
try:
defaultdate = datetime.datetime.strptime(day, '%a, %d%b%y')
logf.info('defaultdate <- %s' % defaultdate)
except ValueError as e:
logf.debug('failed to parse as day "%s" (%s)' % (day[:100], e))
pass
elif c.lower() in ['flight', 'flights', 'vuelo \xe2\x84\x96']:
flight = nicefy_htmltext(v.text.strip())
#if flight.startswith('Seat'):
# logf.info('airplaneSeat <- "%s"' % flight)
# info['airplaneSeat'] = flight
#else:
logf.info('flightNumber <- "%s"' % flight)
info['flightNumber'] = flight
for k in 'operado por', 'operated by':
if k in flight.lower():
i = flight.lower().index(k)
flightNumber = flight[:i].strip()
operator = flight[i+len(k):].strip()
logf.info('flightNumber <- "%s"' % flightNumber)
info['flightNumber'] = flightNumber
logf.info('operator <- "%s"' % operator)
info['operator'] = operator
elif c.lower() in ['airline']:
airline = v.text.strip()
try:
airline, flightNumber = airline.split('#')
logf.info('airline <- "%s"' % airline.strip())
info['airline'] = airline.strip()
logf.info('flightNumber <- "%s"' % flightNumber.strip())
info['flightNumber'] = flightNumber.strip()
except:
logf.info('airline <- "%s"' % airline.strip())
info['airline'] = airline.strip()
else:
logf.debug('unhandled column "%s" with content: "%s"' % (c, v.text))
if len(info) > 0:
logf.info('learned flight info: %s' % info)
info.update(global_info)
all_keys = ['reservationNumber', 'checkinUrl', 'ticketNumber', 'ticketDownload',
'ticketPrint', 'ticketText', 'airplaneSeat', 'boardingGroup', 'flightNumber',
'airline', 'operator', 'departure', 'boardingTime', 'departureTime', 'departureGate',
'departureTerminal', 'arrival', 'arrivalTime', 'arrivalGate', 'arrivalTerminal']
for k in all_keys:
if k not in info:
info[k] = ''
return info
def is_airport(v):
if len(v) < 3:
return False
if any([n in v for n in '0123456789']):
return False
if len(v.split()) > 5:
return False
if v.startswith('('):
return False
invalid_symbols = ':@#'
if any(s in v for s in invalid_symbols) or v.startswith('/'):
return False
return True
stopwords = ['estimated', 'total',
'cabin: ', 'take this survey', 'callout panel', 'award miles',
'passengers', 'right column',
'foreign affairs', 'security', 'book now', 'see deal',
'enjoy', 'experience', 'entertainment', 'footer', 'awards', '®',
'twitter', ' your ', 'requires', 'approval', 'www',
'@', '#',
] + ['january', 'february', 'march', 'april', 'july', 'august', 'september', 'october', 'november', 'december']
def is_flight(info):
required_keys = ['departureTime', 'departure', 'arrivalTime', 'arrival']
logf = logging.getLogger('emailparser.is_flight')
logf.info('checking if is a flight: %s' % info)
if not all([k in info and info[k] != '' for k in required_keys]):
return False
for k in 'departure', 'arrival':
v = info[k].lower()
if v in ['manage flight', 'airport Lounges', 'total estimated cost', 'here']:
return False
if any([stopword in v for stopword in stopwords]):
return False
# if not is_airport(info[k]):
# return False
logf.info('yes, is a flight: %s' % info)
return True
def replace_booking_number(info, key, number):
if key not in info or info[key] == number:
info[key] = number
return
info[key] = info[key] + ', ' + number
def parse_flight_info(columns, values):
global_info = {}
logf = logging.getLogger('emailparser.parse_flight_info')
logf.debug('parsing row: %s %s' % (columns, [str(v)[:200] for v in values]))
for c, v in zip(columns, values):
number = v.text
if c.lower() in ['eticket number', 'flight confirmation number', 'airline booking number', 'reservation number', 'código de reserva', 'código de reservación', 'buchungsnummer', 'pnr #']:
logf.info('found ticketNumber key "%s" -> %s' % (c, number))
if is_airline_booking_number(number):
replace_booking_number(global_info, 'ticketNumber', number)
logf.info('ticketNumber <- %s' % number)
for c, v in zip(columns, values):
if c.lower() in ['eticket number', 'booking id', 'booking number', 'e-ticket #']:
number = v.text
logf.info('found booking number key "%s" -> %s' % (c, number))
if is_ticket_number(number) and 'bookingNumber' not in global_info:
replace_booking_number(global_info, 'bookingNumber', number)
global_info['bookingNumber'] = number
logf.info('bookingNumber <- %s' % number)
if is_airline_booking_number(number) and 'ticketNumber' not in global_info:
replace_booking_number(global_info, 'ticketNumber', number)
global_info['ticketNumber'] = number
logf.info('ticketNumber <- %s' % number)
if c.lower() in ['seats']:
global_info['airplaneSeat'] = v.text
if len(global_info) > 0:
logf.info('learned new global info: %s' % global_info)
return global_info
def is_airline_booking_number(number):
if len(number) != 6:
return False
for d in number:
if d not in '0123456789QWERTZUIOPASDFGHJKLYXCVBNM':
return False
return True
def is_ticket_number(number):
if len(number) < 6 or len(number) > 20:
return False
for d in number:
if d not in '0123456789QWERTZUIOPASDFGHJKLYXCVBNM':
return False
return True
def default_logging():
logging.basicConfig(filename='emailparser.log',level=logging.DEBUG)
logFormatter = logging.Formatter("[%(name)s %(levelname)s]: %(message)s")
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
consoleHandler.setLevel(logging.INFO)
logging.getLogger().addHandler(consoleHandler)
def parse_email_html_message(t, languages = None, email_info={}):
logf = logging.getLogger('emailparser.html')
reservations = []
if 'schema.org' in t:
logf.info('parsing with schema.org information')
b = BeautifulSoup.BeautifulSoup(t)
for fr in b.findAll('div', itemtype="http://schema.org/FlightReservation"):
fl = fr.find(itemprop="reservationFor", itemtype="http://schema.org/Flight")
if fl is None:
logf.debug('no reservationFor element in %s' % fr)
continue
info = dict(
reservationNumber = get_value(fr, 'meta', "reservationNumber", ''),
checkinUrl = get_value(fr, 'link', "checkinUrl", ''),
ticketNumber = get_value(fr, 'meta', "ticketNumber", ''),
ticketDownload = get_value(fr, 'link', "ticketDownloadUrl", ''),
ticketPrint = get_value(fr, 'link', "ticketPrintUrl", ''),
ticketText = get_value(fr, 'meta', "additionalTicketText", ''),
airplaneSeat = get_value(fr, 'meta', "airplaneSeat", ''),
boardingGroup = get_value(fr, 'meta', "boardingGroup", ''),
flightNumber = get_value(fl, 'meta', 'flightNumber', ''),
airline = get_name(fl, 'airline', 'http://schema.org/Airline', ''),
operator = get_name(fl, 'operatedBy', 'http://schema.org/Airline', ''),
departure = get_code(fl, 'departureAirport', 'http://schema.org/Airport', ''),
boardingTime = parsedate_simple(get_value(fl, 'meta', 'boardingTime', '')),
departureTime = parsedate_simple(get_value(fl, 'meta', 'departureTime', '')),
departureGate = get_value(fl, 'meta', 'departureGate', ''),
departureTerminal = get_value(fl, 'meta', 'departureTerminal', ''),
arrival = get_code(fl, 'arrivalAirport', 'http://schema.org/Airport', ''),
arrivalTime = parsedate_simple(get_value(fl, 'meta', 'arrivalTime', '')),
arrivalGate = get_value(fl, 'meta', 'arrivalGate', ''),
arrivalTerminal = get_value(fl, 'meta', 'arrivalTerminal', '')
)
if info['departureTime'] == '':
logf.warn('skipping: not enough schema.org information, missing departureTime')
logf.debug(str(info))
continue
# add email subject and date
info.update(email_info)
reservations.append(info)
if len(reservations) == 0:
b = BeautifulSoup.BeautifulSoup(t)
logf.info('parsing html email')
global_info = dict(email_info)
txt = b.text
txtlower = txt.lower()
for key, languages1 in ('flight confirmation number', ['en']), ('airline booking number', ['en']), ('confirmation', ['en']), ('digo de reserva', ['es']):
if key in txtlower:
logf.debug('found key: "%s"' % key)
languages = languages1
try:
i = txtlower.index(key) + len(key)
for number in txt[i:i+1000].split(':')[1:4:2]:
logf.debug('parsing flight confirmation number: %s: %s' % (key, number))
number = number.strip()[:6]
if is_airline_booking_number(number):
global_info['ticketNumber'] = number
logf.info('ticketNumber <- "%s"' % number)
except Exception as e:
logf.warn('parsing %s failed: %s' % (key, e))
for key, languages1 in ('booking id', ['en']), ('booking number', ['en']), ('buchungsnummer', ['de']):
if key in txtlower:
languages = languages1
logf.debug('found key: "%s"' % key)
try:
i = txtlower.index(key) + len(key)
number = txt[i:i+1000].split(':')[1]
logf.debug('parsing booking number: %s: %s' % (key, number))
for j in range(len(number)):
if number[j] not in '0123456789QWERTZUIOPASDFGHJKLYXCVBNM':
break
if j == 0:
continue
number = number[:j]
if is_ticket_number(number) and 'bookingNumber' not in global_info:
global_info['bookingNumber'] = number
logf.info('bookingNumber <- "%s"' % number)
if is_airline_booking_number(number) and 'ticketNumber' not in global_info:
global_info['ticketNumber'] = number
logf.info('ticketNumber <- "%s"' % number)
except Exception as e:
logf.warn('parsing %s failed: %s' % (key, e))
if languages is None:
if any([v in txtlower for v in 'confirmacion', 'itinerario', 'viaje']):
languages = ['es']
logf.info('language detected: es')
if any([v in txtlower for v in 'buchung', 'rechnung', 'flugzeug', 'stunden', 'danke', 'vielen dank']):
languages = ['de']
logf.info('language detected: de')
if any([v in txtlower for v in 'passenger information', 'traveler information', 'departs', 'arrives', 'reconfirm', 'payment', 'itinerary', 'receipt', 'confirmation', 'thank you']):
logf.info('language detected: en')
languages = ['en']
if languages is None:
logf.warn('language unsure')
for table in b.findAll('table'):
# make dictionaries for vertical tables
header = []
#logf.debug('')
#logf.debug('')
#logf.debug('found table: %s' % (table))
#logf.debug('')
#logf.debug('')
rows = table.findChildren('tr')
override_header = True
for row in rows:
# build header column
newheader = []
for th in row.findChildren('th'):
newheader.append(nicefy_htmltext(th.text.strip().strip(':')))
override_header = True
if len(newheader) == 0:
for td in row.findChildren('td'):
# has h1,h2,h3,h4 as child? probably a header
header_names = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']
if any([c.name in header_names
for c in td.recursiveChildGenerator() if hasattr(c, 'name')]):
override_header = True
newheader.append(nicefy_htmltext(td.text.strip().strip(':')))
if len(newheader) == 1 or all([len(h) > 100 for h in newheader]):
newheader = []
if override_header:
logf.debug('table header: %s' % newheader)
if override_header and len(newheader) > 0 and any([len(h) > 0 for h in newheader]):
header = newheader
logf.debug('new header assigned')
override_header = False
continue
if len(header) != 0:
override_header = False
# deal with content
values = []
for td in row.findChildren('td'):
values.append(td)
if len(header) > 0:
info = parse_flight(header, values, global_info, languages=languages)
if is_flight(info):
if info not in reservations:
reservations.append(info)
else:
global_info.update(parse_flight_info(header, values))
else:
logf.info('skipping row, no header found so far, in "%s"' % row)
continue
if len(reservations) == 0:
for table in b.findAll('table'):
# make dictionaries for vertical tables
rows = table.findChildren('tr')
for row in rows:
values = row.findChildren('td')
# could be this information:
logf.info('no header, trying something with "%s"' % values)
testheader = ['Day, date', 'Departure', 'Arrival', 'Flight']
info = parse_flight(testheader, values, global_info, languages=languages)
if is_flight(info):
if info not in reservations:
reservations.append(info)
if len(reservations) == 0:
# try making bullet points out of all horizontal tables
header = []
values = []
logf.info('horizontal parsing')
info = {}
for row in b.findAll('tr'):
cells = row.findChildren('td')
if len(cells) == 0:
continue
cellheaders = row.findChildren('th')
logf.debug('learning from row: [%s] %s' % (
[c.text[:100] for c in cellheaders],
[c.text[:100] for c in cells]))
if len(cellheaders) == 0:
cell = cells[0].text.strip()
cells = cells[1:]
else:
cell = cellheaders[0].text.strip()
if len(cells) > 0 and cell.endswith(':'):
key = cell.rstrip(':').strip()
for v in cells:
if len(v.text) > 0 and len(v.text) < 100:
logf.info('learned fact: %s = %s' % (key, v.text))
header.append(nicefy_htmltext(key))
values.append(v)
elif ' to ' in cell and len(cell) < 150 and '.' not in cell and ' you ' not in cell and ' do ' not in cell:
parts = cell.split(' to ')
if len(parts) == 2:
logf.info('learned from-to: %s' % parts)
departure, arrival = parts
info['departure'] = departure
info['arrival'] = arrival
else:
logf.info('strange from-to: %s' % cell)
elif ':' in cell and len(cell) < 150 and '.' not in cell and ' you ' not in cell and ' do ' not in cell:
parts = cell.split(':')
if len(parts) == 2:
key, v = parts
key, v = key.strip(), v.strip()
v = BeautifulSoup.BeautifulSoup(v)
logf.info('learned fact: %s = %s' % (key, v))
header.append(nicefy_htmltext(key))
values.append(v)
else:
logf.info('strange fact: %s' % cell)
#for k, v in items:
# logf.info('learned following fact: %s = %s' % (k, v))
logf.info('finding global info %s -> %s' % (header, [v.text for v in values]))
global_info.update(parse_flight_info(header, values))
info.update(global_info)
logf.info('finding flight info %s -> %s' % (header, [v.text for v in values]))
info = parse_flight(header, values, info, languages=languages)
if is_flight(info):
if info not in reservations:
reservations.append(info)
for info in reservations:
for datekey in ['departureTime', 'arrivalTime', 'boardingTime']:
if info[datekey] != '':
info[datekey + 'str'] = info[datekey].strftime('%Y-%m-%d %H:%M')
else:
info[datekey + 'str'] = ''
return reservations
def parse_email_message(m, languages = None):
logf = logging.getLogger('emailparser')
email_info = {}
email_info['emailTime'] = datetime.datetime.fromtimestamp(m.get_date())
email_info['emailSubject'] = m.get_header('Subject')
reservations = []
for mp in m.get_message_parts():
logf.info('Subject:' + m.get_header('Subject'))
t = mp.get_payload(decode=True)
if mp.get_content_type() == 'text/html':
reservations += parse_email_html_message(t, languages=languages, email_info=email_info)
elif mp.get_content_type() == 'text/plain':
logf.debug('message: text/plain: %s' % t)
else:
logf.debug('message: other content type: "%s"' % mp.get_content_type())
return reservations
def parse_multiple_email_messages(query_results, languages=None):
all_reservations = []
import joblib
call = joblib.delayed(parse_email_html_message)
calls = []
messages = list(query_results)
logf = logging.getLogger('emailparser')
for i, m in enumerate(messages):
logf.info('handling %d/%d: "%s" from %s' % (i, len(messages), m.get_header('Subject'),
datetime.datetime.fromtimestamp(m.get_date()).strftime('%Y-%m-%d')))
email_info = {}
email_info['emailTime'] = datetime.datetime.fromtimestamp(m.get_date())
email_info['emailSubject'] = m.get_header('Subject')
email_info['emailId'] = m.get_message_id()
for mp in m.get_message_parts():
t = mp.get_payload(decode=True)
if mp.get_content_type() == 'text/html':
calls.append(call(t, languages=languages, email_info=email_info))
elif mp.get_content_type() == 'text/plain':
logf.debug('message: text/plain: %s' % t)
else:
logf.debug('message: other content type: %s' % mp.get_content_type())
logf.info('processing in parallel...')
for reservations_part in joblib.Parallel(n_jobs=-1)(calls):
all_reservations += reservations_part
logf.info('%d reservations' % len(all_reservations))
return all_reservations
<file_sep>/thunderbird-notmuch-import.py
import configparser
import os
import glob
import subprocess
thunderbirdpath = os.path.join(os.environ['HOME'], '.thunderbird')
cf = configparser.ConfigParser()
cf.read(os.path.join(thunderbirdpath, 'profiles.ini'))
paths = []
for s in cf.sections():
if cf.has_option(s, 'Default') and cf.getint(s, 'Default') == 1:
path = cf.get(s, 'Path')
if cf.getint(s, 'IsRelative') == 1:
path = os.path.join(thunderbirdpath, path)
paths += glob.glob(os.path.join(path, 'ImapMail/*/INBOX*'))
paths += glob.glob(os.path.join(path, 'Mail/*/Inbox'))
print '# getting notmuch path ...'
o = None
try:
p = subprocess.Popen(['notmuch', 'config', 'get', 'database.path'], stdout=subprocess.PIPE)
if p.wait() == 0:
o = p.stdout.read().strip()
except OSError:
print 'ERROR could not check notmuch config'
print ' make sure notmuch is installed and configured with "notmuch config"'
print '# will export maildir into %s' % o
print '# execute the following commands:'
for p in paths:
if p.endswith('.msf') or 'trash' in p.lower():
continue
print "perl mb2md-3.20.pl -s '%s' -d '%s'" % (p, o)
print
print 'notmuch new'
<file_sep>/README.rst
Flight & Hotel reservation email parser
========================================
Searches emails for flight tickets and hotel reservations.
Builds a brief summary view of all your reservations over time.
--------
Usage
--------
1. Adding emails to the email database
People store emails in various ways.
Here we support the notmuch database (https://notmuchmail.org/)
It is trivial to include maildir emails into notmuch with "notmuch new".
For email programs with mailbox, mb2md (http://batleth.sapienti-sat.org/projects/mb2md/) can be run to convert to maildir, followed by "notmuch new"
For Thunderbird, the thunderbird-notmuch-import.py script is provided,
which finds the relevant folders automatically.
2. Building the report
run with some email search keywords::
$ python summary.py 'schema.org/FlightReservation OR ticket OR flight OR flug OR viaje OR booking OR confirmation OR confirmacion'
It will give you some idea of what it finds, for example::
2015-11-28 Flight HOUSTON, TX --> WASHINGTON, DC
Departing 2015-11-28 19:10
from HOUSTON, TX
arriving 2015-11-28 23:05
To WASHINGTON, DC
Flight number UA1955
3. View report
For an example report see "summary.html"!
Features implemented
----------------------
* Summary of all flights, with crucial information (when, from-to, ...)
* Including PDF eticket files, extracted from emails.
* Parallel parsing for speed-up.
* Parsing of the flight reservations schema following https://developers.google.com/gmail/markup/reference/flight-reservation
* Some heuristic parsing of html emails in English, Spanish and German.
If you have emails that can serve as additional test cases, please submit
them. Contributions to the parsing are welcome!
To Do
------------
* More heuristic parsing rules.
* Implement hotel bookings (https://developers.google.com/gmail/markup/reference/hotel-reservation). Booking.com and some others produce the json version.
<file_sep>/summary.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
import notmuch
import BeautifulSoup
import datetime
import dateutil.parser
import emailparser
import logging
from tzlocal import get_localzone
import sys
import os
logging.basicConfig(filename='emailparser.log',level=logging.DEBUG)
logFormatter = logging.Formatter("[%(name)s %(levelname)s]: %(message)s")
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
consoleHandler.setLevel(logging.WARN)
logging.getLogger().addHandler(consoleHandler)
if len(sys.argv) < 3:
sys.stderr.write("""SYNOPSIS: %(exe)s <query>
database: absolute path to notmuch database
query: query to use. Has to be in quotes.
Example usage:
%(exe)s 'schema.org/FlightReservation OR ticket OR flight OR flug OR viaje OR booking OR confirmation OR confirmacion'
To speed up date parsing, you can specify the languages to consider with the
LANGUAGES environment variable:
LANGUAGES="en de es" <cmd>
Author: <NAME> (c) 2017
""" % dict(exe=sys.argv[0]))
sys.exit(1)
db = notmuch.Database()
query = sys.argv[1]
query = db.create_query(query)
#'schema.org/FlightReservation OR ticket OR flight OR flug OR viaje OR booking OR confirmation OR confirmacion')
languages = os.environ.get('LANGUAGES', None)
if languages is not None:
languages = languages.split()
#query = db.create_query('schema.org/FlightReservation OR eticket OR flight')
#languages = ['en']
#query = db.create_query('schema.org/FlightReservation')
all_reservations = emailparser.parse_multiple_email_messages(query.search_messages(), languages=languages)
#all_reservations = []
#messages = list(query.search_messages())
#for i, m in enumerate(messages[::-1]):
# print('handling %d/%d: "%s" from %s' % (i, len(messages), m.get_header('Subject'),
# datetime.datetime.fromtimestamp(m.get_date()).strftime('%Y-%m-%d')))
# reservations = emailparser.parse_email_message(m, languages = languages)
# print('got %d reservations' % len(all_reservations))
# all_reservations += reservations
print('got %d reservations' % len(all_reservations))
def prepend(info, k, prefix):
if info[k] and info[k] != '':
info[k] = prefix + info[k]
def dateConverter(day):
#day = dateutil.parser.parse(dateText)
if day.tzinfo is not None:
return day
print 'Warning: Using local time zone to order %s' % day
local_tz = get_localzone()
return day.replace(tzinfo=local_tz)
# sort by departure time
all_reservations.sort(key=lambda info: dateConverter(info['departureTime']))
previous = None
fout = open('summary.html', 'w')
fout.write("""<!doctype html><html lang="en">
<head>
<meta charset=utf-8>
<title>Flight summary</title>
<link rel="stylesheet" type="text/css" href="theme.css">
</head>
<body>
<h1>Flights</h1>
<table>
""")
file_id = 1
for info in all_reservations:
prepend(info, 'departureGate', 'Gate ')
prepend(info, 'arrivalGate', 'Gate ')
prepend(info, 'arrivalTerminal', 'Terminal ')
prepend(info, 'departureTerminal', 'Terminal ')
prepend(info, 'ticketNumber', 'Ticket#')
prepend(info, 'operator', ' operated by ')
flightday = info['departureTime'].date()
prepend(info, 'boardingTimestr', 'Boarding ')
filenames = []
msg_id = info['emailId']
for m in db.create_query('id:%s' % msg_id).search_messages():
for mp in m.get_message_parts():
if mp.get_content_type() == 'application/pdf' or (mp.get_content_type() == 'application/octet-stream' and mp.get_filename().lower().endswith('.pdf')):
filename = 'file_id%d.pdf' % file_id
with open(filename, 'w') as f:
f.write(mp.get_payload(decode=True))
filenames.append((mp.get_filename(), filename))
file_id += 1
info['pdffiles'] = ' | '.join(['<a class="pdffile" href="%s">%s</a>' % (filename, origfilename) for (origfilename, filename) in filenames])
if previous is not None and (flightday - previous).days > 14:
delta = (flightday - previous).days
print '=============', delta, 'days later'
fout.write("""
<tr>
<td colspan="3" class="gaplater">%d days later
</tr>
""" % delta)
else:
fout.write("""
<tr>
<td colspan="3" class="gap">
</tr>
""")
previous = flightday
info['departureDay'] = flightday.strftime('%Y-%m-%d')
info['departureJustTime'] = info['departureTime'].strftime('%H:%M')
info['emailday'] = info['emailTime'].date().strftime('%Y-%m-%d')
print """
%(departureDay)s Flight %(departure)s --> %(arrival)s
Departing %(departureTimestr)s %(boardingTime)s
from %(departure)s %(departureTerminal)s %(departureGate)s
arriving %(arrivalTimestr)s
To %(arrival)s %(arrivalTerminal)s %(arrivalGate)s
Flight number %(flightNumber)s with %(airline)s%(operator)s
%(ticketNumber)s %(ticketText)s %(ticketDownload)s %(ticketPrint)s
Email %(emailday)s "%(emailSubject)s"
""" % info
fout.write(("""
<tr><td class="left">
<h5>From</h5>
%(departure)s
%(departureTerminal)s
%(departureGate)s
<td class="middle" rowspan="2" >✈
<td class="right">
<h5>Destination</h5>
%(arrival)s
%(arrivalTerminal)s
%(arrivalGate)s
</tr>
<tr>
<td class="left">
<h5>Depart</h5>
%(departureJustTime)s
<td class="right">
<h5>Date</h5>
%(departureDay)s
</tr>
<tr>
<td colspan="3" class="details">
<h5>Arriving</h5>
%(arrivalTimestr)s
<h5>Flight number</h5>
Flight number %(flightNumber)s with %(airline)s%(operator)s
<h5>Ticket</h5>
%(ticketNumber)s %(ticketText)s %(ticketDownload)s %(ticketPrint)s
<div>%(boardingTime)s</div>
<div>%(pdffiles)s</div>
</td>
</tr>
<tr>
<td colspan="3" class="email">
<h5>Email</h5>
%(emailday)s "%(emailSubject)s"
</td>
""" % info).encode('utf-8'))
| c3f9cc53bfe032eddb321f1db2ae8a03512e045c | [
"Python",
"reStructuredText"
] | 5 | Python | JohannesBuchner/flight-reservation-emails | f741dec74e3c86e3b580f8df09aa95dee95b3c39 | 51b931b949fea9734dc40ac7b4a56ef91aaba89c | |
refs/heads/master | <repo_name>temitope101/Time-Series-Prediction-with-LSTM-Recurrent-Neural-Networks-in-Python-with-Keras<file_sep>/TensorFlow-Tutorials-for-Time-Series.py
sudo -H pip install sklearn
# sudo -H pip install lstm_predictor
sudo -H pip install keras
sudo -H pip install readline
# doesnt work on windows
#Could not find a version that satisfies the requirement lstm_predictor (from versions: )
#No matching distribution found for lstm_predictor
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from tensorflow.contrib import learn
from sklearn.metrics import mean_squared_error, mean_absolute_error
from lstm_predictor import generate_data, load_csvdata, lstm_model
LOG_DIR = './ops_logs'
TIMESTEPS = 10
RNN_LAYERS = [{'steps': TIMESTEPS}]
DENSE_LAYERS = [10, 10]
TRAINING_STEPS = 100000
BATCH_SIZE = 100
PRINT_STEPS = TRAINING_STEPS / 100
dateparse = lambda dates: pd.datetime.strptime(dates, '%d/%m/%Y %H:%M')
rawdata = pd.read_csv("./TotalLoad.csv",parse_dates={'timeline':['date', '(UTC)']},index_col='timeline', date_parser=dateparse)
rawdata = pd.read_csv("./TotalLoad.csv",index_col=['date', '(UTC)'])
# parsing errors in pandas
df = pd.read_csv("./TotalLoad.csv")
df
X, y = load_csvdata(rawdata, TIMESTEPS, seperate=False)
regressor = learn.TensorFlowEstimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),n_classes=0,verbose=1,steps=TRAINING_STEPS,optimizer='Adagrad',learning_rate=0.03, batch_size=BATCH_SIZE)
regressor = learn.Estimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),n_classes=0,verbose=1,steps=TRAINING_STEPS,optimizer='Adagrad',learning_rate=0.03, batch_size=BATCH_SIZE)
https://www.tensorflow.org/api_guides/python/contrib.learn
regressor = learn.Estimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS))
WARNING:tensorflow:Using temporary folder as model directory: C:\Users\aroug\AppData\Local\Temp\tmp3nwb8sg9
WARNING:tensorflow:Using temporary folder as model directory: C:\Users\aroug\AppData\Local\Temp\tmp3nwb8sg9
INFO:tensorflow:Using default config.
INFO:tensorflow:Using default config.
INFO:tensorflow:Using config: {'_keep_checkpoint_max': 5, '_task_type': None, '_master': '', '_tf_config': gpu_options {
per_process_gpu_memory_fraction: 1
}
, '_environment': 'local', '_cluster_spec': <tensorflow.python.training.server_lib.ClusterSpec object at 0x0000014792A352E8>, '_save_checkpoints_secs': 600, '_task_id': 0, '_keep_checkpoint_ev
ery_n_hours': 10000, '_evaluation_master': '', '_tf_random_seed': None, '_num_ps_replicas': 0, '_save_checkpoints_steps': None, '_save_summary_steps': 100, '_is_chief': True}
INFO:tensorflow:Using config: {'_keep_checkpoint_max': 5, '_task_type': None, '_master': '', '_tf_config': gpu_options {
per_process_gpu_memory_fraction: 1
}
, '_environment': 'local', '_cluster_spec': <tensorflow.python.training.server_lib.ClusterSpec object at 0x0000014792A352E8>, '_save_checkpoints_secs': 600, '_task_id': 0, '_keep_checkpoint_ev
ery_n_hours': 10000, '_evaluation_master': '', '_tf_random_seed': None, '_num_ps_replicas': 0, '_save_checkpoints_steps': None, '_save_summary_steps': 100, '_is_chief': True}
>>>
++++++++++++++++
import numpy
import pandas
import tkinter
import matplotlib.pyplot as plt
dataset = pandas.read_csv('./TotalLoad.csv', usecols=[2], engine='python', skipfooter=3)
dataset
plt.plot(dataset)
plt.show()
import math
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
dataframe = pandas.read_csv('./TotalLoad.csv', usecols=[2], engine='python', skipfooter=3)
dataset = dataframe.values
dataset = dataset.astype('float32')
# normalize the dataset
scaler = MinMaxScaler(feature_range=(0, 1))
dataset = scaler.fit_transform(dataset)
# split into train and test sets
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
print(len(train), len(test))
# convert an array of values into a dataset matrix
def create_dataset(dataset, look_back=1): \
dataX, dataY = [], [] \
for i in range(len(dataset)-look_back-1): \
a = dataset[i:(i+look_back), 0] \
dataX.append(a)
dataY.append(dataset[i + look_back, 0]) \
return numpy.array(dataX), numpy.array(dataY)
#readline fails on Windows
<file_sep>/lstm-for-sine-wave.py
# coding: utf-8
# In[13]:
# get_ipython().magic('matplotlib inline')
import numpy as np
from matplotlib import pyplot as plt
from tensorflow.contrib import learn
from sklearn.metrics import mean_squared_error, mean_absolute_error
from lstm_predictor import generate_data, lstm_model
# ## Libraries
#
# - numpy: package for scientific computing
# - pandas: data structures and data analysis tools
# - tensorflow: open source software library for machine intelligence
# - matplotlib: 2D plotting library
#
#
# - **learn**: Simplified interface for TensorFlow (mimicking Scikit Learn) for Deep Learning
# - mse: "mean squared error" as evaluation metric
# - **lstm_predictor**: our lstm class
#
# In[14]:
LOG_DIR = './ops_logs'
TIMESTEPS = 5
RNN_LAYERS = [{'steps': TIMESTEPS}]
DENSE_LAYERS = [10, 10]
TRAINING_STEPS = 100000
BATCH_SIZE = 100
PRINT_STEPS = TRAINING_STEPS / 100
# ## Parameter definitions
#
# - LOG_DIR: log file
# - TIMESTEPS: RNN time steps
# - RNN_LAYERS: RNN layer 정보
# - DENSE_LAYERS: DNN 크기 [10, 10]: Two dense layer with 10 hidden units
# - TRAINING_STEPS: 학습 스텝
# - BATCH_SIZE: 배치 학습 크기
# - PRINT_STEPS: 학습 과정 중간 출력 단계 (전체의 1% 해당하는 구간마다 출력)
# In[15]:
regressor = learn.TensorFlowEstimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),
n_classes=0,
verbose=1,
steps=TRAINING_STEPS,
optimizer='Adagrad',
learning_rate=0.03,
batch_size=BATCH_SIZE)
# ## Create a regressor with TF Learn
#
# : 예측을 위한 모델 생성. TF learn 라이브러리에 제공되는 TensorFlowEstimator를 사용.
#
# **Parameters**:
#
# - model_fn: 학습 및 예측에 사용할 모델
# - n_classes: label에 해당하는 클래스 수 (0: prediction, 1이상: classification) 확인필요
# - verbose: 과정 출력
# - steps: 학습 스텝
# - optimizer: 최적화 기법 ("SGD", "Adam", "Adagrad")
# - learning_rate: learning rate
# - batch_size: batch size
#
#
# In[ ]:
X, y = generate_data(np.sin, np.linspace(0, 100, 10000), TIMESTEPS, seperate=False)
# create a lstm instance and validation monitor
validation_monitor = learn.monitors.ValidationMonitor(X['val'], y['val'],
every_n_steps=PRINT_STEPS,
early_stopping_rounds=1000)
# ## Generate a dataset
#
# 1. generate_data: 학습에 사용될 데이터를 특정 함수를 이용하여 만듭니다.
# - fct: 데이터를 생성할 함수
# - x: 함수값을 관측할 위치
# - time_steps: 관측(observation)
# - seperate: check multimodal
# 1. ValidationMonitor: training 이후, validation 과정을 모니터링
# - x
# - y
# - every_n_steps: 중간 출력
# - early_stopping_rounds
# In[16]:
regressor.fit(X['train'], y['train'], monitors=[validation_monitor], logdir=LOG_DIR)
# ## Train and validation
#
# - fit: training data를 이용해 학습, 모니터링과 로그를 통해 기록
#
#
# In[17]:
predicted = regressor.predict(X['test'])
mse = mean_squared_error(y['test'], predicted)
print ("Error: %f" % mse)
# ## Evaluate using test set
#
# Evaluate our hypothesis using test set. The mean squared error (MSE) is used for the evaluation metric.
#
#
# In[18]:
plot_predicted, = plt.plot(predicted, label='predicted')
plot_test, = plt.plot(y['test'], label='test')
plt.legend(handles=[plot_predicted, plot_test])
# ## Plotting
#
# Then, plot both predicted values and original values from test set.
<file_sep>/README.md
Derived from:
- http://machinelearningmastery.com/time-series-prediction-lstm-recurrent-neural-networks-python-keras/
- https://github.com/tgjeon
# TensorFlow Tutorial for Time Series Prediction
This tutorial is designed to easily learn TensorFlow for time series prediction.
Each tutorial subject includes both code and notebook with descriptions.
## Tutorial Index
#### MNIST classification using Recurrent Neural Networks (RNN)
- Classification for MNIST using RNN ([notebook](https://github.com/tgjeon/TensorFlow-Tutorials-for-Time-Series/blob/master/mnist-rnn.ipynb))
#### Time series prediction using Recurrent Neural Networks (RNN)
- Prediction for sine wave function using Gaussian process ([code](https://github.com/tgjeon/TensorFlow-Tutorials-for-Time-Series/blob/master/gp-for-sine-wave.py) / [notebook](https://github.com/tgjeon/TensorFlow-Tutorials-for-Time-Series/blob/master/gp-for-sine-wave.ipynb))
- Prediction for sine wave function using RNN ([code](https://github.com/tgjeon/TensorFlow-Tutorials-for-Time-Series/blob/master/lstm-for-sine-wave.py) / [notebook](https://github.com/tgjeon/TensorFlow-Tutorials-for-Time-Series/blob/master/lstm-for-sine-wave.ipynb))
- Prediction for electricity price ([code](https://github.com/tgjeon/TensorFlow-Tutorials-for-Time-Series/blob/master/lstm-for-epf.py) / notebook)
## Slide materials
- [Slides on slideshare (TensorFlow-KR Meetup)](http://www.slideshare.net/TaegyunJeon1/electricity-price-forecasting-with-recurrent-neural-networks)
- [Slides on github (KSC 2016 Tutorial)](https://github.com/tgjeon/TensorFlow-Tutorials-for-Time-Series/raw/master/KSC2016%20-%20Recurrent%20Neural%20Networks.pptx)
#### Dependencies
```
Python (3.4.4)
TensorFlow (r0.9)
numpy (1.11.1)
pandas (0.16.2)
cuda (to run examples on GPU)
sudo apt install python3-tk (for tcl/tk)
sudo apt install tk
```
#### Dataset
- Energy Price Forecast 2016: http://complatt.smartwatt.net
- Or use the uploaded csv file for price history for 2015.
<file_sep>/lstm-for-epf.py
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from tensorflow.contrib import learn
from sklearn.metrics import mean_squared_error, mean_absolute_error
from lstm_predictor import generate_data, load_csvdata, lstm_model
LOG_DIR = './ops_logs'
TIMESTEPS = 10
RNN_LAYERS = [{'steps': TIMESTEPS}]
DENSE_LAYERS = [10, 10]
TRAINING_STEPS = 100000
BATCH_SIZE = 100
PRINT_STEPS = TRAINING_STEPS / 100
dateparse = lambda dates: pd.datetime.strptime(dates, '%d/%m/%Y %H:%M')
rawdata = pd.read_csv("./input/ElectricityPrice/RealMarketPriceDataPT.csv",
parse_dates={'timeline': ['date', '(UTC)']},
index_col='timeline', date_parser=dateparse)
X, y = load_csvdata(rawdata, TIMESTEPS, seperate=False)
regressor = learn.TensorFlowEstimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),
n_classes=0,
verbose=1,
steps=TRAINING_STEPS,
optimizer='Adagrad',
learning_rate=0.03,
batch_size=BATCH_SIZE)
validation_monitor = learn.monitors.ValidationMonitor(X['val'], y['val'],
every_n_steps=PRINT_STEPS,
early_stopping_rounds=1000)
regressor.fit(X['train'], y['train'], monitors=[validation_monitor], logdir=LOG_DIR)
predicted = regressor.predict(X['test'])
mse = mean_absolute_error(y['test'], predicted)
print ("Error: %f" % mse)
plot_predicted, = plt.plot(predicted, label='predicted')
plot_test, = plt.plot(y['test'], label='test')
plt.legend(handles=[plot_predicted, plot_test])
| 741a0e16d27d60b7b375d14b320cc540d10d4d72 | [
"Markdown",
"Python"
] | 4 | Python | temitope101/Time-Series-Prediction-with-LSTM-Recurrent-Neural-Networks-in-Python-with-Keras | 71937e6b25736c17bdc68abea0519f88f7410077 | 4bb08bca1fbff84ee0f80ee0be63437114fbeb9d | |
refs/heads/master | <repo_name>Mateus-magarinus/FACULDADE-Desenvolvimento_de_aplicacoes_Web<file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex01.php
<html>
<head>
<title>Exercicio 01</title>
<meta charset="utf-8">
</head>
<body>
<h2>Ordenação de 5 numeros</h2>
<form method="post" action="">
<input type="number" name="num01"><br>
<input type="number" name="num02"><br>
<input type="number" name="num03"><br>
<input type="number" name="num04"><br>
<input type="number" name="num05">
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
if(isset($_POST['enviar'])){
$numeros = [
$_POST['num01'],
$_POST['num02'],
$_POST['num03'],
$_POST['num04'],
$_POST['num05']
];
sort($numeros);
echo "Ordenados: ";
print_r($numeros);
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex08.php
<html>
<head>
<title>Exercicio 08</title>
<meta charset="utf-8">
</head>
<body>
<h2>Conta letras de txt</h2>
<form method="post" action="" enctype="multipart/form-data">
<input type="file" name="arquivo" id="arquivo"><br><br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
function contaLetras($txt, $letra){
$aux = 0;
$aux += substr_count($txt, strtoupper($letra));
$aux += substr_count($txt, strtolower($letra));
return $aux;
}
if(isset($_POST['enviar'])){
$array = file($_FILES["arquivo"]["tmp_name"]);
$texto = "";
for ($i=0; $i < count($array); $i++) {
$texto .= $array[$i];
}
$alfabeto = str_split('ABCDEFGHIJKLMNOPQRSTUVWXYZ');
foreach ($alfabeto as $i => $v) {
print_r($v . ' => ' . contaLetras($texto, $v) . '<br>');
}
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex10.php
<html>
<head>
<title>Exercicio 10</title>
<meta charset="utf-8">
</head>
<body>
<h2>Inverte Números</h2>
<form method="post" action="">
<input type="text" name="palavra"><br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
if (isset($_POST['enviar'])) {
print_r($_POST['palavra'] . ' => ' . strrev($_POST['palavra']));
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex06.php
<html>
<head>
<title>Exercicio 06</title>
<meta charset="utf-8">
</head>
<body>
<h2>Numeros</h2>
<form method="post" action="">
<input type="number" name="num01"><br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
if(isset($_POST['enviar'])){
$numero = $_POST['num01'];
$fat=1;
for ($i=1; $i <= $numero; $i++) {
$fat *= $i;
}
echo "Fatorial de $numero é $fat";
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex03.php
<html>
<head>
<title>Exercicio 03</title>
<meta charset="utf-8">
</head>
<body>
<h2>Abastecer</h2>
<form method="post" action="">
<p>Valor Gasolina</p><input type="number" name="num01" step="any"><br>
<p>Valor Álcool</p><input type="number" name="num02" step="any"><br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
if(isset($_POST['enviar'])){
$gasolina = $_POST['num01'];
$alcool = $_POST['num02'];
$media = $gasolina * 0.7;
if($media > $alcool)
echo "abasteça com álcool";
else
echo "abasteça com gasolina";
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex09.php
<html>
<head>
<title>Exercicio 09</title>
<meta charset="utf-8">
</head>
<body>
<h2>Gera combinações</h2>
<form method="post" action="">
<input type="text" name="palavra"><br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
function gen_nos(&$set, &$results)
{
for ($i = 0; $i < count($set); $i++) {
$results[] = $set[$i];
$tempset = $set;
array_splice($tempset, $i, 1);
$tempresults = array();
gen_nos($tempset, $tempresults);
foreach ($tempresults as $res) {
$results[] = $set[$i] . $res;
}
}
}
if (isset($_POST['enviar'])) {
$palavra = $_POST['palavra'];
$results = array();
$set = str_split($palavra);
gen_nos($set, $results);
$array = array();
foreach ($results as $i => $v) {
if(strlen($v) == strlen($palavra)) array_push($array, $v);
}
print_r($array);
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex07.php
<html>
<head>
<title>Exercicio 07</title>
<meta charset="utf-8">
</head>
<body>
<h2>Entre numeros</h2>
<form method="post" action="">
<input type="number" name="num01"><br>
<input type="number" name="num02"><br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
if(isset($_POST['enviar'])){
$numeros = [$_POST['num01'], $_POST['num02']];
for ($i=$numeros[0]+1; $i < $numeros[1]; $i++) {
echo "$i ";
}
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex02.php
<html>
<head>
<title>Exercicio 02</title>
<meta charset="utf-8">
</head>
<body>
<h2>Média Aritmética</h2>
<form method="post" action="">
<input type="number" name="num01"><br>
<input type="number" name="num02"><br>
<input type="number" name="num03"><br>
<input type="number" name="num04"><br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
if(isset($_POST['enviar'])){
$numeros = [
$_POST['num01'],
$_POST['num02'],
$_POST['num03'],
$_POST['num04']
];
$media = 0;
foreach ($numeros as $n => $v) {
$media += $v;
}
$media = $media / 4;
echo "Media: $media <br>";
if($media >= 7)
echo "Aluno aprovado";
else if($media >= 5 && $media < 7)
echo "Aluno em recuperação";
else if($media < 5)
echo "Aluno reprovado";
}
?><file_sep>/Desenvolvimento_de_aplicacoes_Web/TaskPoints/ListaExercicios/php/ex04.php
<html>
<head>
<title>Exercicio 04</title>
<meta charset="utf-8">
</head>
<body>
<h2>IMC</h2>
<form method="post" action="">
<input type="number" name="num01" step="any"><br>
<input type="radio" id="sexo" name="num02" value="1">Homem<br>
<input type="radio" id="sexo" name="num02" value="2">Mulher<br>
<br><br>
<input type="submit" name="enviar" value="enviar">
</form>
</body>
</html>
<?php
if(isset($_POST['enviar'])){
$a = $_POST['num01'];
$sexo = $_POST['num02'];
if($sexo == 1){
echo "Peso Ideal Homem: ";
echo (72.7 * $a) - 58;
}
else{
echo "Peso Ideal Mulher: ";
echo (62.1 * $a) - 44;
}
}
?> | 8b51dc5d1f6016a9552a88abd3cc455a8ac55767 | [
"PHP"
] | 9 | PHP | Mateus-magarinus/FACULDADE-Desenvolvimento_de_aplicacoes_Web | 0d8377416a16985d1b9167809d97ca1c27030585 | af2288319c692c8b56e06e3176d70afd09190a57 | |
refs/heads/master | <file_sep>
# `bash_ec2_completion`
Bash helper script that fetches ec2 instances via aws-cli
and adds their Name=>PublicIp pairs to ~/.ssh/config
and thus allows you to see them with SSH completion
and to connect by their names
## The benefits
`bash_ec2_completion` is written in pure shell scrit, and
depends on aws-cli and perl only.
## Configuration
Autofetch is performed daily.
To set the default user name for SSH connection,
change the `$_EC2_DEFAULT_SSH_USER` variable.
## Dependencies:
* bash_completion (with SSH completion of course)
* aws-cli (must be configured)
* perl
## Installation
Put the `bash_ec2_completion.sh` somewhere in your system
and the invoke it in your `~/.bash_profile`:
```
. /path/to/bash_ec2_completion.sh
```
For OS X + brew users it may look like that:
```
if [ -f $(brew --prefix)/etc/bash_completion ]; then
. $(brew --prefix)/etc/bash_completion
. /path/to/bash_ec2_completion.sh
fi
```<file_sep>#!/bin/bash
#
# bash_ec2_completion
# Bash helper script that fetches ec2 instances via aws-cli
# and adds their Name=>PublicIp pairs to ~/.ssh/config
# and thus allows you to see them with SSH completion
# and to connect by their names
#
# Autofetch is performed daily.
# To set the default user name for SSH connections
# change the $_EC2_DEFAULT_SSH_USER variable below
#
# Dependencies:
# * aws-cli (must be configured)
# * perl
#
# Default user name for ssh connections to EC2 instances
# You can change it if you use username different from a local one
_EC2_DEFAULT_SSH_USER=$(whoami)
# Reload period of EC2 instance list
_EC2_UPDATE_INTERVAL="1440"
_EC2_DEBUG=""
# Path to ssh_config file
_SSH_USER_CONFIG_PATH="$HOME/.ssh/config"
_EC2_LOCK="/tmp/_ec2_completion_reload.lock.d"
# Fetches PublicIp=>InstanceName pairs and prints an ssh_config-ready text
_ec2_completion_fetch()
{
aws ec2 describe-instances \
--query 'Reservations[].Instances[].[PublicIpAddress,Tags[?Key==`Name`].Value[]]' \
--output text \
| sed '$!N;s/\n/ /' \
| while read line; do \
cols=($line)
if [[ "${cols[1]}" != "" ]]; then
cat << EOT
Host ${cols[1]}
Hostname ${cols[0]}
User ${_EC2_DEFAULT_SSH_USER}
EOT
fi
done
}
_ec2_completion_debug()
{
if [[ "${_EC2_DEBUG}" != "" ]]; then
echo "[`date` $$] $1" >> /tmp/_ec2_completion_debug.log;
fi
}
# Updates the contents of ssh_config file
_ec2_completion_reload()
{
_ec2_completion_debug "_ec2_completion_reload started"
rm -f ${_SSH_USER_CONFIG_PATH}_bak
rm -f ${_SSH_USER_CONFIG_PATH}_tmp
cp ${_SSH_USER_CONFIG_PATH} ${_SSH_USER_CONFIG_PATH}_bak
cp ${_SSH_USER_CONFIG_PATH} ${_SSH_USER_CONFIG_PATH}_tmp
perl -0pi -e 's/\s*# AWS BEGIN.+AWS END//sg' ${_SSH_USER_CONFIG_PATH}_tmp
echo '# AWS BEGIN' >> ${_SSH_USER_CONFIG_PATH}_tmp
echo '# This section is created automatically at ' `date` >> ${_SSH_USER_CONFIG_PATH}_tmp
_ec2_completion_fetch >> ${_SSH_USER_CONFIG_PATH}_tmp
echo '# AWS END' >> ${_SSH_USER_CONFIG_PATH}_tmp
rm -f ${_SSH_USER_CONFIG_PATH}
mv ${_SSH_USER_CONFIG_PATH}_tmp ${_SSH_USER_CONFIG_PATH}
rm -rf ${_EC2_LOCK};
_ec2_completion_debug "_ec2_completion_reload finished"
}
_ec2_completion_run()
{
_ec2_completion_debug "Call _ec2_completion_run"
if mkdir ${_EC2_LOCK} 2>/dev/null; then
_ec2_completion_debug " lock acked"
_ec2_completion_reload >/dev/null 2>/dev/null &
elif [[ -n $(find ${_EC2_LOCK} -mmin +${_EC2_UPDATE_INTERVAL}) ]]; then
_ec2_completion_debug " stale lock"
rm -rf ${_EC2_LOCK};
else
_ec2_completion_debug " lock not acked"
fi
}
# Check if it is time to refresh the list of EC2 instances, and updates
# the list if necessary. Then calls the default ssh completion script
_ec2_completion_complete()
{
# replocess daily
_ec2_completion_debug "Call _ec2_completion_complete"
if [[ -n $(find ${_SSH_USER_CONFIG_PATH} -mmin +${_EC2_UPDATE_INTERVAL}) ]]
then
_ec2_completion_debug " run!"
_ec2_completion_run
fi
# proxy call to the default ssh completion handler
_ssh "$@"
}
# attach to ssh
complete -o default -F _ec2_completion_complete ssh
| 728cba0174946aaf0884263cf40e95afc4de61c1 | [
"Markdown",
"Shell"
] | 2 | Markdown | phoebus/bash_ec2_completion | 82c25311d8e94f6efa13161eef9017f934a62e48 | a1bb295b350cef378fd663de948a899a61069721 | |
refs/heads/master | <repo_name>johnsonchon/dotimg<file_sep>/home.js
// search
const homeForm = document.querySelector('.home-form');
const nav = document.querySelector('.nav')
window.onscroll = () => {
const top = window.scrollY
if (top >= 100) {
nav.classList.add('nav-shadow')
} else {
nav.classList.remove('nav-shadow')
}
}
homeForm.addEventListener('submit', function (e) {
// e.target.el
e.preventDefault();
let searchedPic = e.target.elements.searchHome.value;
console.log(searchedPic);
localStorage.clear();
localStorage.setItem('Search', searchedPic);
window.location.href = './results.html';
})
// Put random pixa images on homepage
const getPixa = function () {
const pixaApiKey = "<KEY>";
let heroImgA = [
'forest',
'mountain',
'ocean',
'rain'
]
let mainImg = [
'paris',
'rome',
'new york',
'san francisco',
'japan'
]
let topImg = [
'love',
'friends',
'family'
]
let botImg = [
'cat',
'dog',
'elephant',
'wolf'
]
let heroImgAR = heroImgA[Math.floor(Math.random() * heroImgA.length)];
let mainImgR = mainImg[Math.floor(Math.random() * mainImg.length)];
let topImgR = topImg[Math.floor(Math.random() * topImg.length)];
let botImgR = botImg[Math.floor(Math.random() * botImg.length)];
let randomNumb = Math.random() * (20 - 1) + 1;
let randomNumF = randomNumb.toFixed(0);
fetch('https://pixabay.com/api/?key=' + pixaApiKey + '&q=' + heroImgAR + '&image_type=photo&', {
method: 'GET',
body: JSON.stringify()
}).then(function (response) {
return response.json();
}).then(function (data) {
// console.log(data);
const heroPhoto = document.querySelector('.hero-img');
const heroPhotoLink = document.querySelector('.hero-img-link');
heroPhoto.src = data.hits[randomNumF].largeImageURL;
});
fetch('https://pixabay.com/api/?key=' + pixaApiKey + '&q=' + mainImgR + '&image_type=photo&', {
method: 'GET',
body: JSON.stringify()
}).then(function (response) {
return response.json();
}).then(function (data) {
// console.log(data);
const leftPhoto = document.querySelector('.main-img');
const leftPhotoLink = document.querySelector('.main-img-link');
leftPhoto.src = data.hits[randomNumF].largeImageURL;
leftPhotoLink.href = data.hits[randomNumF].pageURL;
});
fetch('https://pixabay.com/api/?key=' + pixaApiKey + '&q=' + topImgR + '&image_type=photo&', {
method: 'GET',
body: JSON.stringify()
}).then(function (response) {
return response.json();
}).then(function (data) {
// console.log(data);
const topPhoto = document.querySelector('.photo-top');
const topPhotoLink = document.querySelector('.photo-top-link');
topPhoto.src = data.hits[randomNumF].largeImageURL;
topPhotoLink.href = data.hits[randomNumF].pageURL;
});
fetch('https://pixabay.com/api/?key=' + pixaApiKey + '&q=' + botImgR + '&image_type=photo&', {
method: 'GET',
body: JSON.stringify()
}).then(function (response) {
return response.json();
}).then(function (data) {
// console.log(data);
const botPhoto = document.querySelector('.photo-bot');
const botPhotoLink = document.querySelector('.photo-bot-link');
botPhoto.src = data.hits[randomNumF].largeImageURL;
botPhotoLink.href = data.hits[randomNumF].pageURL;
// console.log(data.hits)
});
}
getPixa(); | 2601eb034b5093affb2b0a7618aa82823ee5a486 | [
"JavaScript"
] | 1 | JavaScript | johnsonchon/dotimg | 1875ed95202dbdd1df6684b6572e084ec0afc3ea | fa04fc64bddd5555ccf705d36728acf241533785 | |
refs/heads/master | <file_sep>def cipher(message, rot):
ciphertxt = ""
polishw = ["zero", "jeden", "dwa", "trzy", "cztery", "pięć", "sześć", "siedem", "osiem", "dziewięć"]
numberspl = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
for polishwords, polishnumbers in zip(polishw, numberspl):
if rot == polishwords:
rot = rot.replace(polishwords, polishnumbers)
rot = int(rot)
break
englishw = ["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
numbersen = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
for englishwords, englishnumbers in zip(englishw, numbersen):
if rot == englishwords:
rot = rot.replace(englishwords, englishnumbers)
rot = int(rot)
break
if len(message) == 0:
return "No message to cipher!"
if len(message) > 200:
return "Message must be maximum 200 characters long"
if not isinstance(rot, int):
return "Invalid rot type!"
elif rot < 0:
return "Cannot cipher a message by negative rot value!"
elif rot == 0:
return "Message not ciphered: " + message
else:
for character in message:
if character == " ":
ciphertxt += character
continue
polishch = ["ó", "ą", "ę", "ć", "ń", "ś", "ż", "ź", "ł"]
standardch = ["o", "a", "e", "c", "n", "s", "z", "z", "l"]
for polish_char, standard_char in zip(polishch, standardch):
character = character.replace(polish_char, standard_char)
character = ord(character)
if character < 32:
return "Invalid character in the message!"
elif character > 126:
return "Invalid character in the message!"
else:
character = (character - 32 + rot) % (127 - 32) + 32
ciphertxt += chr(character)
return "Ciphered message: " + ciphertxt
def decipher(text, rot):
deciphertxt = ""
polishw = ["zero", "jeden", "dwa", "trzy", "cztery", "pięć", "sześć", "siedem", "osiem", "dziewięć"]
numberspl = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
for polishwords, polishnumbers in zip(polishw, numberspl):
if rot == polishwords:
rot = rot.replace(polishwords, polishnumbers)
rot = int(rot)
break
englishw = ["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
numbersen = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
for englishwords, englishnumbers in zip(englishw, numbersen):
if rot == englishwords:
rot = rot.replace(englishwords, englishnumbers)
rot = int(rot)
break
if len(text) == 0:
return "No ciphered message to decode!"
if len(text) > 200:
return "Ciphered message must be maximum 200 characters long"
if not isinstance(rot, int):
return "Invalid rot type!"
elif rot < 0:
return "Rot value cannot be a negative number"
elif rot == 0:
return "Message not deciphered: " + text
for character in text:
if character == " ":
deciphertxt += character
continue
character = ord(character)
if character < 32:
return "Invalid character in the ciphered message!"
elif character > 126:
return "Invalid character in the ciphered message!"
else:
character = (character - 32 - rot) % (127 - 32) + 32
deciphertxt += chr(character)
return "Deciphered message: " + deciphertxt
<file_sep>import unittest
import sys, os
PACKAGE_PARENT = '..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
import caesar_cipher
class TestCipher(unittest.TestCase):
def test_cipher_alpha_only_should_return_ciphered_msg(self):
expected_result="Ciphered message: oruhp"
self.assertEqual(expected_result, caesar_cipher.cipher("lorem", 3), msg="Does not return ciphered msg")
def test_cipher_negative_num_rot_should_return_error_msg(self):
expected_result="Cannot cipher a message by negative rot value!"
self.assertEqual(expected_result,caesar_cipher.cipher("lorem",-3), msg="Does not return expected error msg")
def test_cipher_empty_str_should_return_error_msg(self):
expected_result="No message to cipher!"
self.assertEqual(expected_result,caesar_cipher.cipher("",3), msg="Does not return expected error msg")
def test_cipher_special_char_should_return_ciphered_msg(self):
expected_result="Ciphered message: $C&'(a)-"
self.assertEqual(expected_result,caesar_cipher.cipher("!@#$%^&*",3), msg="Does not return ciphered msg")
def test_cipher_num_should_return_ciphered_msg(self):
expected_result="Ciphered message: 4567"
self.assertEqual(expected_result,caesar_cipher.cipher("1234",3), msg="Does not return ciphered msg")
def test_cipher_ASCII_range_should_rot_from_32(self):
expected_result="Ciphered message: !"
self.assertEqual(expected_result, caesar_cipher.cipher("~",2), msg="Does not rot from the beginning of range")
def test_cipher_polish_alphabet_should_change_to_standard(self):
expected_result="Ciphered message: oruhp"
self.assertEqual(expected_result, caesar_cipher.cipher("lóręm",3), msg="Does not change polish letters to standard")
def test_cipher_input_out_of_defined_ASCII_range_should_return_error_msg(self):
expected_result="Invalid character in the message!"
self.assertEqual(expected_result, caesar_cipher.cipher("λφα",3), msg="Does not return Error msg")
def test_cipher_rot_equals_zero_should_return_error_message(self):
expected_result="Message not ciphered: lorem"
self.assertEqual(expected_result, caesar_cipher.cipher("lorem",0), msg="does not return message and ciphered text")
def test_cipher_rot_other_than_integer_should_return_error_msg(self):
expected_result="Invalid rot type!"
self.assertEqual(expected_result,caesar_cipher.cipher("lorem",0.5))
self.assertEqual(expected_result, caesar_cipher.cipher("lorem","#"))
def test_cipher_rot_string_type_number_as_word_in_PL_should_return_ciphered_msg(self):
expected_result="Ciphered message: oruhp"
self.assertEqual(expected_result,caesar_cipher.cipher("lorem","trzy"))
def test_cipher_rot_string_type_number_as_word_in_EN_should_return_ciphered_msg(self):
expected_result="Ciphered message: oruhp"
self.assertEqual(expected_result,caesar_cipher.cipher("lorem","three"))
def test_cipher_message_longer_than_200_char_should_return_error_msg(self):
expected_result="Message must be maximum 200 characters long"
self.assertEqual(expected_result,caesar_cipher.cipher("Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",3))
def test_cipher_spaces_should_be_left_unciphered(self):
expected_result="Ciphered message: oruhp lsvxp"
self.assertEqual(expected_result,caesar_cipher.cipher("lorem ipsum",3))
class TestDecipher(unittest.TestCase):
def test_decipher_ciphered_message_should_decipher_by_rot(self):
expected_result="Deciphered message: lorem"
self.assertEqual(expected_result, caesar_cipher.decipher("oruhp",3))
def test_decipher_ciphered_message_should_decipher_only_in_defined_ASCII_range(self):
expected_result="Deciphered message: ~"
self.assertEqual(expected_result,caesar_cipher.decipher("!",2))
def test_decipher_ciphered_message_character_out_of_defined_ASCII_scope_should_return_error_message(self):
expected_result="Invalid character in the ciphered message!"
self.assertEqual(expected_result,caesar_cipher.decipher("λφα",3))
def test_decipher_rot_equals_zero_should_return_error_message(self):
expected_result="Message not deciphered: oruhp"
self.assertEqual(expected_result,caesar_cipher.decipher("oruhp",0))
def test_decipher_negative_rot_value_should_return_error_message(self):
expected_result="Rot value cannot be a negative number"
self.assertEqual(expected_result,caesar_cipher.decipher("oruhp",-3))
def test_decipher_empty_str_should_return_error_msg(self):
expected_result="No ciphered message to decode!"
self.assertEqual(expected_result,caesar_cipher.decipher("",3), msg="Does not return expected error msg")
def test_decipher_rot_other_than_integer_should_return_error_msg(self):
expected_result="Invalid rot type!"
self.assertEqual(expected_result,caesar_cipher.decipher("lorem",0.5))
self.assertEqual(expected_result, caesar_cipher.decipher("lorem","#"))
def test_decipher_rot_string_type_number_as_word_in_PL_should_return_deciphered_msg(self):
expected_result="Deciphered message: lorem"
self.assertEqual(expected_result,caesar_cipher.decipher("oruhp","trzy"))
def test_decipher_rot_string_type_number_as_word_in_EN_should_return_deciphered_msg(self):
expected_result="Deciphered message: lorem"
self.assertEqual(expected_result,caesar_cipher.decipher("oruhp","three"))
def test_decipher_alpha_only_should_return_ciphered_msg(self):
expected_result="Deciphered message: lorem"
self.assertEqual(expected_result, caesar_cipher.decipher("oruhp", 3), msg="Does not return ciphered msg")
def test_decipher_num_should_return_deciphered_msg(self):
expected_result="Deciphered message: 1234"
self.assertEqual(expected_result,caesar_cipher.decipher("4567",3), msg="Does not return ciphered msg")
def test_decipher_special_char_should_return_deciphered_msg(self):
expected_result="Deciphered message: !@#$%^&*"
self.assertEqual(expected_result,caesar_cipher.decipher("$C&'(a)-",3), msg="Does not return ciphered msg")
def test_decipher_message_longer_than_200_char_should_return_error_msg(self):
expected_result="Ciphered message must be maximum 200 characters long"
self.assertEqual(expected_result,caesar_cipher.decipher("Oruhp#lsvxp#groru#vlw#dphw,#frqvhfwhwxu#dglslvflqj#holw,#vhg#gr#hlxvprg#whpsru#lqflglgxqw#xw#oderuh#hw#groruh#pdjqd#doltxd.#Xw#hqlp#dg#plqlp#yhqldp#txlv#qrvwuxg#hahuflwdwlrq#xoodpfr#oderulv#qlvl#xw#doltxls#ha#hd#frpprgr#frqvhtxdw.#Gxlv#dxwh#luxuh#groru#lq#uhsuhkhqghulw#lq#yroxswdwh#yholw#hvvh#flooxp#groruh#hx#ixjldw#qxood#sduldwxu.#Hafhswhxu#vlqw#rffdhfdw#fxslgdwdw#qrq#surlghqw,#vxqw#lq#fxosd#txl#riilfld#ghvhuxqw#proolw#dqlp#lg#hvw#oderuxp.",3))
def test_decipher_spaces_should_be_left_undeciphered(self):
expected_result="Deciphered message: lorem ipsum"
self.assertEqual(expected_result,caesar_cipher.decipher("oruhp lsvxp",3))
if __name__ == "__main__":
unittest.main() | 9cb6886863626d59e844141c8fa49b3b40cd284a | [
"Python"
] | 2 | Python | monika-voy/caesar-cipher | c124c6e99e2beecc6e5cdeecfd2553d94489b19d | e002e78b5e1dfd3bde382cb04f3357be4f1e174c | |
refs/heads/master | <repo_name>maxnawa31/C-T-C-I<file_sep>/README.md
# C-T-C-I<file_sep>/algos.py
def fibonacci(n):
if n < 2:
return n
else:
return fibonacci(n - 1) + fibonacci(n - 2)
#stairs combination
def davis(n, memo={1: 1, 2: 2, 3: 4}):
if n in memo:
return memo[n]
memo[n] = davis(n - 1, memo) + davis(n - 2, memo) + davis(n - 3, memo)
return memo[n]<file_sep>/algos.js
///Fibonacci
function fibonacci(n) {
if (n < 2) {
return n;
}
return fibonacci(n - 1) + fibonacci(n - 2);
}
//Fibonacci memoization
function fibonacci(n, memo = []) {
if (n < 2) {
return n;
} else if (memo[n]) {
return memo[n];
} else {
memo[n] = fibonacci(n - 2, memo) + fibonacci(n - 1, memo);
return memo[n];
}
}
//stairs combination
function davis(n, memo = { 1: 1, 2: 2, 3: 4 }) {
if (n in memo) {
return memo[n];
}
memo[n] = davis(n - 1, memo) + davis(n - 2, memo) + davis(n - 3, memo);
return memo[n];
}
| a631618b2c1c80b5fd7a16ba86102105c9b039de | [
"Markdown",
"Python",
"JavaScript"
] | 3 | Markdown | maxnawa31/C-T-C-I | c35dc2fdbdeac0fb496f7c6e36d8fe5d7357dd42 | a3a9711536ab64576346f86a7bccdd57f4184bc4 | |
refs/heads/master | <file_sep>package center.rodrigo.main;
import java.io.IOException;
import com.leapmotion.leap.Controller;
import center.rodrigo.core.Core;
public class Main {
public static void main(String[] args) {
Core core = new Core();
Controller controller = new Controller();
controller.addListener(core);
System.out.println("Press Enter to quit...");
try {
System.in.read();
} catch (IOException e) {
e.printStackTrace();
}
controller.removeListener(core);
}
}
<file_sep>package center.rodrigo.core;
import com.leapmotion.leap.*;
import com.leapmotion.leap.Gesture.State;
public class Core extends Listener {
private Frame frame;
public void onConnect(Controller controller) {
System.out.println("Connected");
controller.enableGesture(Gesture.Type.TYPE_SWIPE);
controller.enableGesture(Gesture.Type.TYPE_CIRCLE);
controller.enableGesture(Gesture.Type.TYPE_SCREEN_TAP);
controller.enableGesture(Gesture.Type.TYPE_KEY_TAP);
}
public void onFrame(Controller controller) {
frame = controller.frame();
GestureList gestures = frame.gestures();
// Maos
for (Hand hand : frame.hands()) {
Arm arm = hand.arm();
Vector normal = hand.palmNormal();
Vector direction = hand.direction();
System.out.println(hand.isLeft() ? "Mao Esquerda" : "Mao Direita");
// System.out.println("\n id: " + hand.id()
// + "\n Palma: " + hand.palmPosition());
// System.out.println("\n pitch: " + Math.toDegrees(direction.pitch())
// + "\n roll: " + Math.toDegrees(normal.roll())
// + "\n yaw: " + Math.toDegrees(direction.yaw()));
// System.out.println("\n Brašo: " + arm.direction()
// + "\n Pulso: " + arm.wristPosition()
// + "\n Cotovelo: " + arm.elbowPosition());
// Dedos
for (Finger finger : hand.fingers()) {
// System.out.println(finger.type()
// + " id: " + finger.id()
// + ", length: " + finger.length()
// + "mm, width: " + finger.width() + "mm");
// Ossos
for (Bone.Type boneType : Bone.Type.values()) {
Bone bone = finger.bone(boneType);
// System.out.println(" " + bone.type()
// + ", start: " + bone.prevJoint()
// + ", end: " + bone.nextJoint()
// + ", direction: " + bone.direction());
}
}
}
// Objetos
for (Tool tool : frame.tools()) {
// System.out.println(" Tool id: " + tool.id()
// + ", position: " + tool.tipPosition()
// + ", direction: " + tool.direction());
}
for (int i = 0; i < gestures.count(); i++) {
Gesture gesture = gestures.get(i);
switch (gesture.type()) {
case TYPE_CIRCLE:
CircleGesture circle = new CircleGesture(gesture);
String sentido;
if (circle.pointable().direction().angleTo(circle.normal()) <= Math.PI / 2)
sentido = "horario";
else
sentido = "anti-horario";
double sweptAngle = 0;
if (circle.state() != State.STATE_START) {
CircleGesture previousUpdate = new CircleGesture(controller.frame(1).gesture(circle.id()));
sweptAngle = (circle.progress() - previousUpdate.progress()) * 2 * Math.PI;
}
// System.out.println("id: " + circle.id()
// + " " + sentido
// + ", " + circle.state()
// + "\n progress: " + circle.progress()
// + ", radius: " + circle.radius()
// + ", angle: " + Math.toDegrees(sweptAngle));
break;
case TYPE_SWIPE:
SwipeGesture swipe = new SwipeGesture(gesture);
// System.out.println(" Swipe id: " + swipe.id()
// + ", " + swipe.state()
// + ", position: " + swipe.position()
// + ", direction: " + swipe.direction()
// + ", speed: " + swipe.speed());
break;
case TYPE_SCREEN_TAP:
ScreenTapGesture screenTap = new ScreenTapGesture(gesture);
// System.out.println(" Screen Tap id: " + screenTap.id()
// + ", " + screenTap.state()
// + ", position: " + screenTap.position()
// + ", direction: " + screenTap.direction());
break;
case TYPE_KEY_TAP:
KeyTapGesture keyTap = new KeyTapGesture(gesture);
// System.out.println(" Key Tap id: " + keyTap.id()
// + ", " + keyTap.state()
// + ", position: " + keyTap.position()
// + ", direction: " + keyTap.direction());
break;
default:
System.out.println("Unknown gesture type.");
break;
}
}
}
}
<file_sep># BasicLeapMotionJava
Basic Leap Motion Java
Projeto Eclipse
| 95aad56ca03a52d7fe30aae000e47c0e63ea6abb | [
"Markdown",
"Java"
] | 3 | Java | rodrigaun/BasicLeapMotionJava | 26fd1b452f8c974c7b1a0e62c5a5de1514ff5897 | 2db895bb6e6877d6446634b2358acc52ac308aad | |
refs/heads/master | <file_sep>/* eslint-disable */
export const regPhone = /^1\d{10}$/ // 手机号
export const regEmail = /^[A-Za-z0-9\u4e00-\u9fa5]+@[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)+$/
// export const regEmail = /^\w+([-+.]\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*$/ //邮箱
export const regpassword = /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)[^]{8,20}$/ // 密码8-20位字符,包含大小写字母、数字
export const regIDCard = /(^\d{15}$)|(^\d{18}$)|(^\d{17}(\d|X|x)$)/ // 身份证号
export const regCode = /^\d{6}$/ // 验证码
export const regSignPassword = /^\d{6}$/ // 签约密码
export const regName = /^[\u4e00-\u9fa5\.]{2,15}$/ // 姓名 2-15位汉字,可以包含“·”
// export const regComponyName = /^[\u4e00-\u9fa5]{2,50}$/ //公司、机构名称 2-50位汉字
export const regComponyName = null
export const regCreditCode = /^[0-9a-zA-Z]{18}$/ // 信用代码 18位数字,可以包含字母
export const regOrganizationcCode = /^[0-9a-zA-Z]{1,18}$/ // 机构代码 18位以内数字,可以包含字母
export const regBankCardNumber = /^([0-9]\d{10,24})$/ // 银行卡号\打款账号
export const regBankName = /.{2,50}/ // 开户支行
export const regDepartmentName = /^[\u4e00-\u9fa5]{2,10}$/ // 部门名称
export const regApplicationName = /^[A-Za-z0-9\u4e00-\u9fa5]{1,20}$/ // 应用名称
export const regSealName = /^[\u4e00-\u9fa5]{1,20}$/ // 印章名称,20字以内中文
export const regPostalCode = /^[[0-9]{6}$/ // 邮政编码
export const regPostalChinese = /^[\u4e00-\u9fa5]{1,50}$/ // 中文
/* eslint-disable */
<file_sep>import axios from 'axios'
import config from '../config/index'
import { getToken, setToken } from '@/utils/common'
const { WEB_API } = config
export const TOKEN_KEY = 'CLIENT_TOKEN'
const baseUrl = WEB_API || ''
// 创建一个独立的axios实例
const service = axios.create({
baseURL: baseUrl,
// 定义统一的请求头部
headers: {
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'
},
// 配置请求超时时间
timeout: 60000
// 如果用的JSONP,可以配置此参数带上cookie凭证,如果是代理和CORS不用设置
// withCredentials: true
})
service.interceptors.request.use(config => {
const token = getToken()
if (token) {
config.headers['exchange-token'] = token
}
config.headers.Accept = 'application/json'
config.headers['Content-Type'] = 'application/json; charset=utf-8'
if (config.type === 'download') {
config.responseType = 'arraybuffer'
}
return config
}, err => {
return Promise.reject(err)
})
service.interceptors.response.use(
(response) => {
// 关于文件下载的处理
if (response.config.type === 'download') {
const contentType = response.headers['content-type']
const disposition = response.headers['content-disposition']
let fileName = response.config.downloadName
if (disposition) {
if (disposition.split('fileName=').length > 1) {
const name = disposition.split('fileName=')[1].replace(/"/g, '')
if (name) {
fileName = decodeURI(name)
}
}
}
const blob = new Blob([response.data], { type: contentType })
const link = document.createElement('a')
link.href = window.URL.createObjectURL(blob)
link.download = fileName
link.click()
window.URL.revokeObjectURL(link.href)
return response
} else {
const { data } = response
if (data.code === 0) {
return data
} else {
return Promise.reject(data)
}
}
}, (error) => {
if (error.response && error.response.status === 401) {
setToken('')
}
return Promise.reject(error.response)
}
)
export default service
<file_sep>export default {
/**
* @description 配置显示在浏览器标签的title
*/
title: '北京国脉信安科技业务演示管理系统',
/**
* @description 默认打开的首页的路由name值,默认为home
*/
homeName: 'homeIndex',
/**
* @description 请求接口地址
*/
WEB_API: process.env.VUE_APP_WEB_API
}
<file_sep>const path = require('path')
const CompressionPlugin = require('compression-webpack-plugin')
const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin
const resolve = dir => {
return path.join(__dirname, dir)
}
const theme = require('./src/config/theme')
const webTheme = theme[process.env.VUE_APP_WEB_THEME]
let str = ''
for (const key in webTheme) {
str += `$${key}: ${webTheme[key]}; `
}
module.exports = {
publicPath: process.env.VUE_APP_BASE_URL,
lintOnSave: false,
configureWebpack: config => {
const newPlugins = [
new CompressionPlugin({
algorithm: 'gzip',
test: /\.(js|css)$/, // 匹配文件名
threshold: 10240, // 对超过10k的数据压缩
deleteOriginalAssets: false, // 不删除源文件
minRatio: 0.8 // 压缩比
})
]
config.plugins = [...config.plugins, ...newPlugins]
},
chainWebpack: config => {
if (process.env.NODE_ENV === 'production') {
config.plugin('webpack-report').use(BundleAnalyzerPlugin, [{ analyzerMode: 'static' }])
}
config.resolve.alias.set('@', resolve('src'))
},
css: {
loaderOptions: {
sass: {
prependData: str
},
less: {
globalVars: webTheme
}
}
},
devServer: {
// proxy: {
// '^/api': {
// target: 'http://127.0.0.1:7001', // 目标代理接口地址
// secure: false,
// changeOrigin: true // 开启代理,在本地创建一个虚拟服务端
// }
// }
},
productionSourceMap: false
}
<file_sep># spa-base-web (vue 脚手架基础架构)
#### 1、基于vue官方脚手架搭建 当前版本 @vue/cli 4.5.14 (需要 Node.js v10及以上)
#### 2、相关配置选项 Vue2.x、Babel、Vuex、Router、Eslint-Standard、CSS Pre-processors
#### 3、相关配置信息
- 环境变量
- ESlint检测
- proxy跨域
- Gzip压缩
- 打包压缩
- Css 样式预设
- mock数据
- sass样式、less样式提供统一的全局样式及全局变量
#### 4、src目录结构
- api 【web前端接口】
- assets
- - iconfont 【字体库】
- - images 【资源图片】
- components
- - common 【公共组件】
- config
- - index.js 【系统配置信息】
- - theme.js 【颜色主题变量配置】
- directive 【指令配置】
- layout
- - main.vue 【页面布局】
- mock 【mock数据】
- plugin 【插件配置】
- router
- - index.js 【路由守卫、跳转等处理】
- - routers.js【路由配置信息】
- store 【Vuex配置信息】
- style 【全局样式与自定义样式】
- utils 【工具类】
- views 【页面】
<file_sep>import Mock from 'mockjs'
import {
login,
logout,
currentUserInfo
} from './login'
// 配置Ajax请求延时,可用来测试网络延迟大时项目中一些效果
Mock.setup({
timeout: 300
})
const BaseURL = process.env.VUE_APP_WEB_API
Mock.mock(BaseURL + '/api/user/login', 'post', login)
Mock.mock(BaseURL + '/api/user/logout', 'post', logout)
Mock.mock(BaseURL + '/api/user/current', 'get', currentUserInfo)
export default Mock
| f986f37b6d66c68da5a4f3cf7a54063b50a70176 | [
"JavaScript",
"Markdown"
] | 6 | JavaScript | 412826tao/spa-base-web | bd9ea7974fd67aa800916babf257e72f95c1c208 | 12131017ae18cdf64334a6cb79f2993fc8d02bb1 | |
refs/heads/master | <repo_name>leofusco89/AndroidAvanzadoClase02<file_sep>/settings.gradle
include ':app'
rootProject.name='AndroidAvanzadoClase02'
<file_sep>/app/src/main/java/com/example/androidavanzadoclase02/utils/UsersFactory.kt
package com.example.androidavanzadoclase02.utils
import com.example.androidavanzadoclase02.models.User
class UsersFactory private constructor(){
companion object{
private var sInstance: UsersFactory? = null
fun getInstance(): UsersFactory{
//sInstance si no es null, llama al constuctor
sInstance = sInstance ?: UsersFactory()
return sInstance!!
}
}
private var userList: MutableList<User>? = null
init {
userList = ArrayList()
addUsers()
}
private fun addUsers(){
val user1 = User(0, "Juan", "juan.com", "4143-1243")
val user2 = User(1, "Martina", "martina.com", "4555-1555")
val user3 = User(2, "José", "jose.com", "4666-1666")
userList!!.add(user1)
userList!!.add(user2)
userList!!.add(user3)
}
fun getAllUsers(): List<User>{
return userList!!
}
fun getUserById(id: Int): User{
return userList!![id]
}
}<file_sep>/app/src/main/java/com/example/androidavanzadoclase02/UsersAdapter.kt
package com.example.androidavanzadoclase02
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.BaseAdapter
import android.widget.TextView
import com.example.androidavanzadoclase02.models.User
class UsersAdapter( private var users: List<User>): BaseAdapter() {
override fun getView(p0: Int, p1: View?, p2: ViewGroup?): View {
val vista = p1 ?: LayoutInflater.from(p2!!.context).inflate(R.layout.item_user, p2, false)
var tvUserName = vista.findViewById<TextView>(R.id.tv_user_name)
var tvUserWebsite = vista.findViewById<TextView>(R.id.tv_user_website)
tvUserName.text = getItem(p0).name
tvUserWebsite.text = getItem(p0).website
return vista
}
override fun getItem(p0: Int): User {
return users[p0]
}
override fun getItemId(p0: Int): Long {
return users[p0].id.toLong()
}
override fun getCount(): Int {
return users.size
}
}<file_sep>/app/src/main/java/com/example/androidavanzadoclase02/MainActivity.kt
package com.example.androidavanzadoclase02
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.widget.Toast
class MainActivity : AppCompatActivity(), UsersListFragment.OnUserSelectedListener {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val fragmentManager = supportFragmentManager
val fragmentTransaction = fragmentManager.beginTransaction()
var userListFragment = UsersListFragment()
fragmentTransaction.add(R.id.fragment_container, userListFragment)
fragmentTransaction.commit()
}
override fun onUserSelected(userId: Int) {
val userDetailFragment = UserDetailFragment.newInstance(userId)
supportFragmentManager
.beginTransaction()
.replace(R.id.fragment_container, userDetailFragment)
.addToBackStack(null) //Agregamos el fragment al stack, así al volver, no sale de la app,
//sino que vuelve a cargar el anterior, ejecuta su ciclo post onCreate, no ejecuta el onCreate
//pero sí el resto del ciclo. Recibe un texto que identifica la transacción, pero no lo vamos a usar
.commit()
}
}
<file_sep>/app/src/main/java/com/example/androidavanzadoclase02/UsersListFragment.kt
package com.example.androidavanzadoclase02
import android.content.Context
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import com.example.androidavanzadoclase02.models.User
import com.example.androidavanzadoclase02.utils.UsersFactory
import kotlinx.android.synthetic.main.fragment_users_list.*
import java.lang.ClassCastException
class UsersListFragment : Fragment() {
private lateinit var users: List<User>
private lateinit var adapter: UsersAdapter
private lateinit var listener: OnUserSelectedListener
//Para que las activities implementen el método
interface OnUserSelectedListener{
fun onUserSelected(userId: Int)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
val factory = UsersFactory.getInstance()
users = factory.getAllUsers()
adapter = UsersAdapter(users)
}
override fun onCreateView( inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View? {
return inflater.inflate(R.layout.fragment_users_list, container, false)
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
lv_users_list.adapter = adapter
lv_users_list.setOnItemClickListener { _, _, _, id ->
listener.onUserSelected(id.toInt())
}
}
override fun onAttach(context: Context?) {
super.onAttach(context)
//Acá guardamos la activity que llama a este fragment, para poder usarla después y saber
//en que fragment hay que hacer cambios, sino no sabemos, basicamente guardamos el
//contexto de la instancia que se está cargando
try {
listener = context as OnUserSelectedListener
}catch (ex: ClassCastException){
throw ClassCastException("Tenés que implementar la interfaz OnUserSelectedListener desde la activity")
}
}
}<file_sep>/app/src/main/java/com/example/androidavanzadoclase02/UserDetailFragment.kt
package com.example.androidavanzadoclase02
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import com.example.androidavanzadoclase02.utils.UsersFactory
import kotlinx.android.synthetic.main.fragment_user_detail.*
class UserDetailFragment: Fragment() {
companion object{
private const val USER_ID = "USER_ID" //const obliga a dar valor hardcodeado, solo puede usarse en companion o por fuera de la clase
fun newInstance(userId: Int): UserDetailFragment { //Llamado newInstance por convención
//Recibe por parámetro el número de usuario y vamos a devolver una instancia
//con el dato de usuario
val userDetailFragment = UserDetailFragment()
//Creamos un argumento/bundle, que es un paquete de datos y que sirve para que no se
//pierdan datos cuando se destruya el fragment, por ejemplo, para cuando vemos otra
//app y volvemos a la nuestra, debemos volver a cargarla y necesitamos saber que
//usuario cargar:
val args = Bundle()
args.putInt(USER_ID, userId)
//Le pasamos el argumento a la instancia del fragment para que no se pierda ese dato
userDetailFragment.arguments = args
return userDetailFragment
}
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle? ): View? {
return inflater.inflate(R.layout.fragment_user_detail, container, false)
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
//Acá vamos a recuperar el valor del usuario para asignarlo en el TextView
val factory = UsersFactory.getInstance()
val userId = arguments!!.getInt(USER_ID)
val user = factory.getUserById(userId)
tv_user_name_detail.text = user.name
}
} | 1cff546c3d0a0384ef5d7ba2fd27d62ad1b56b5d | [
"Kotlin",
"Gradle"
] | 6 | Gradle | leofusco89/AndroidAvanzadoClase02 | 9a8a9411c8b898dbd7027b12eee7be169f554c2f | e0c96ce6a3d0418901078afa9d6f2a226b127452 | |
refs/heads/main | <file_sep>#Display the output
print("First Python File")
<file_sep># Data-science-practise
This is a repository for data science and data analysis and to practise some procedures, tools and applications of Data Science.
Lets begin! :)
<file_sep>#Creating a test source code file for the child branch
print("This was created within the child branch")
| 0a79dba65134ac294ec43a3cf35ef5f215155f8c | [
"Markdown",
"Python"
] | 3 | Python | nit-b-s/Data-science-practise | c42dfa54ff468ab1b3e4b967b7b3860349896737 | c0c9558e57356673c4f03c19c2530e7387024d8e | |
refs/heads/master | <repo_name>mordehaigerman/Glove-SQL-Tables<file_sep>/schema-pgsql.sql
/*
* Glove SQL Tables
*
* Copyright (c) 2010-2013 <NAME> (http://mordehaigerman.com)
*
* This source file is subject to the New BSD License that is bundled
* with this package in the file LICENSE.txt. It is also available at
* http://www.opensource.org/licenses/bsd-license.php
*
* GitHub: https://github.com/mordehaigerman/Glove-SQL-Tables
* Version: 1.0.0
* Package: PostgreSQL
*/
/*
* Users Table
*/
create table users (
`user_id` serial not null,
`username` varchar(255) not null,
`password` varchar(255) not null,
`role` varchar(255) not null,
`status` varchar(255) not null,
`date` timestamptz not null,
primary key (`user_id`),
unique (`username`)
);
/*
* Users Data Table
*/
create table `users_data` (
`user_id` int not null,
`name` varchar(255) not null,
`content` text not null,
primary key (`user_id`, `name`),
foreign key (`user_id`) references `users` (`user_id`)
);
/*
* Posts Table
*/
create table `posts` (
`post_id` serial not null,
`user_id` int not null,
`url` varchar(255) not null,
`status` varchar(255) not null,
`date` timestamptz not null,
primary key (`post_id`),
foreign key (`user_id`) references `users` (`user_id`)
);
/*
* Posts URL Index
*/
create index `posts_url` on `posts` (`url`);
/*
* Posts Data Table
*/
create table `posts_data` (
`post_id` int not null,
`name` varchar(255) not null,
`content` text not null,
primary key (`post_id`, `name`),
foreign key (`post_id`) references `posts` (`post_id`)
);
/*
* Posts Tags Table
*/
create table `posts_tags` (
`post_id` int not null,
`tag` varchar(255) not null,
primary key (`post_id`, `tag`),
foreign key (`post_id`) references `posts` (`post_id`)
);
/*
* Pages Table
*/
create table `pages` (
`page_id` serial not null,
`user_id` int not null,
`parent_id` int null,
`namespace` varchar(255) not null,
`url` varchar(255) not null,
`status` varchar(255) not null,
`date` timestamptz not null,
primary key (`page_id`),
foreign key (`user_id`) references `users` (`user_id`)
);
/*
* Pages URL Index
*/
create index `pages_url` on `pages` (`url`);
/*
* Pages Data Table
*/
create table `pages_data` (
`page_id` int not null,
`name` varchar(255) not null,
`content` text not null,
primary key (`page_id`, `name`),
foreign key (`page_id`) references `pages` (`page_id`)
);
| 32b640c163d044bf54599e817bb40aff9faacecd | [
"SQL"
] | 1 | SQL | mordehaigerman/Glove-SQL-Tables | 5088eb158b154682071d413525649ef1a7d482fc | 6e6ac0725e929e61979c3faf2d1bff90f2fa1ec4 | |
refs/heads/master | <repo_name>HussainAther/drmario<file_sep>/src/colors.py
black = (0, 0, 0)
blue = (43, 85, 117)
brightred = (255,0,0)
brightgreen = (0,255,0)
green = (0, 200, 0)
darkblue = (76, 48, 124)
darkgray = (15, 15, 15)
red = (182, 70, 58)
white = (255, 255, 255)
yellow = (255, 248, 68)
<file_sep>/src/dublock.py
class dublock(object):
"""
A dublock is a block made of 2 square parts, blocka and blockb.
This is the megavitamin of Dr. Mario that the player controls.
"""
def __init__(self, blocks):
self._blocks = blocks
self._images = None
def getorient(self):
"""
Get the orientation (from the list o) of each block in the dublock (megavitamin).
This is used for sprite purposes.
"""
a, b = self.blocks # Get the two individuals blocks of the
# dublock.
if a.y < b.y:
o = ["bottom", "top"]
elif a.y > b.y:
o = ["top", "bottom"]
elif a.x < b.x:
o = ["left", "right"]
elif a.x > b.x:
o = ["right", "left"]
return o
def setblocks(self, blocka, blockb):
self._blocks = (blocka, blockb)
@property
def blocks(self):
return self._blocks
def ishorizontal(self):
return self.blocks[0].y == self.blocks[1].y
<file_sep>/README.md
# Dr. Mario

This is a Python-based Dr. Mario game. You don't even have to go through any pre-medical requirements to play.
## Instructions
1. Install `pygame` and download the repository (using the green "Clone or download" button).
2. Unzip `music.zip`.
3. Run `python src/drmario.py` in the directory to play.
<file_sep>/src/utils.py
class Pos(object):
def __init__(self, x, y):
self.x = x
self.y = y
def add(self, other):
if isinstance(other, self):
return Pos(self.x + other.x, self.y + other.y)
elif isinstance(other, (tuple, list)):
return Pos(self.x + other[0], self.y + other[1])
else:
raise TypeError("Unknown object in Pos add")
radd = add
iadd = add
def eq(self, other):
if isinstance(other, self):
return self.x == other.x and self.y == other.y
elif isinstance(other, (tuple, list)):
return self.x == other[0] and self.y == other[1]
else:
raise TypeError("Unknown object in Pos eq")
def ne(self, other):
"""
When the two aren't equal.
"""
return not self.eq(other)
<file_sep>/src/ex.py
class InvalidOperation(Exception):
pass
class InvalidParameter(Exception):
pass
class OutOfBoard(Exception):
pass
class BottomReached(Exception):
pass
class PositionOccupied(Exception):
pass
<file_sep>/src/block.py
import ex
import os
import pygame
import utils
"""
Coding the blocks is straightforward with a position and color
as well as status for cleared and falling for each one. Here we define
properties of the blocks themselves that can be used later for creating
vitamins.
"""
width = 20
height = 14
class Color(object):
"""
Give the object a color depending on how it spawns
and how it touches another block.
"""
clear = 0
blue = 1
red = 2
yellow = 3
class block(object):
"""
Give properties to the block itself. It needs to fall
at a certain speed and keep track of its color.
"""
def __init__(self, x, y, color=None):
"""
Initialize a cleared empty block.
"""
self._x = x
self._y = y
self._color = Color.clear
self._falling = False
if color:
self.setcolor(color)
def loadimage(self, name):
"""
Load the sprite from the img folder.
"""
fullname = os.path.join("img", "megavitamin", name)
try:
image = pygame.image.load(fullname)
except pygame.error as message:
print("Cannot load image:", name)
raise SystemExit(message)
image = image.convert()
return image
def setcolor(self, color):
"""
Set the color of the block to another color.
"""
if self._color != Color.clear:
raise ex.InvalidOperation("Trying to set color on a colored block.")
if color != Color.blue and color != Color.red and color != Color.yellow:
raise ex.InvalidParameter("Invalid color value: {}".format(color))
self._color = color
def setimage(self, color, orient):
"""
For a color and orientation, set and load the image.
"""
return loadimage(str(orient) + str(color) + str.bmp))
def clear(self):
"""
Clear the color.
"""
self._color = Color.clear
def setfalling(self, falling):
"""
Set a piece as falling.
"""
self._falling = falling
def __repr__(self):
"""
Get the information about a block.
"""
return "Block ({}, {}) color: {} falling: {}".format(self.x, self.y, self.color, self._falling)
def isclear(self):
"""
Is the block clear?
"""
return self.color == Color.clear
def isfalling(self):
"""
Is the block falling?
"""
return self._falling
@property
def x(self):
"""
Return x-coordinate of the block.
"""
return self._x
@property
def y(self):
"""
Return y-coordinate of the block.
"""
return self._y
@property
def xpixels(self):
"""
Get the number of pixels on the screen in the
x-direction.
"""
return self.x*width
@property
def ypixels(self):
"""
In the y-direction
"""
return self.y*height
@property
def pos(self):
"""
Get the current block position.
"""
return utils.Pos(self.x, self.y)
@property
def color(self):
"""
Get the block color.
"""
return self._color
<file_sep>/src/game.py
# Import the functionality of other files.
import board
import ex
# Import installed modules.
import pygame
import sys
# Import the Board class from the board.py file, the keyboard functions
# from pygame, the colors from colors.py, and the Pos (position) function from
# utils.py.
from board import Board
from pygame.constants import KEYUP, KEYDOWN, K_DOWN, K_LEFT, K_RIGHT, K_SPACE, K_UP, K_q, QUIT
from colors import black, blue, brightred, brightgreen, darkblue, darkgray, green, red, white, yellow
from utils import Pos
fps = 60 # frames per second
windowwidth = 400 # width of the overall window
windowheight = 400 # height of the window
# where the Dr. Mario game box will be
boardoffsetx = (windowwidth - 140) / 2
boardoffsety = (windowheight - 400) / 2
boardborder = 1 # border thickness
blockfallinterval = 300 # time before the block falls automatically
speedfallmultiplier = 100.0
def gameintro(self):
"""
Display the menu until the user starts the game.
"""
intro = True
pygame.mixer.music.load("music/birabuto.wav")
pygame.mixer.music.play(0)
while intro:
win = pygame.display.set_mode((400, 400))
win.fill((0,0,0))
titlefont = pygame.font.SysFont("Times New Roman", 36, bold = True)
instructionfont = pygame.font.SysFont("Times New Roman", 24)
startfont = pygame.font.SysFont("Times New Roman", 24)
title = titlefont.render("Dr. Mario", 36, (255, 255, 255))
instruction1 = instructionfont.render("Use arrow keys to move", 1, (255, 255, 255))
instruction2 = instructionfont.render("and space to rotate.", 1, (255, 255, 255))
starttext1 = startfont.render("Press space to start", 1, (255, 255, 255))
starttext2 = startfont.render("and q to quit.", 1, (255, 255, 255))
self._display.blit(title, (20, 100))
self._display.blit(instruction1, (20, 150))
self._display.blit(instruction2, (20, 170))
self._display.blit(starttext1, (20, 300))
self._display.blit(starttext2, (20, 320))
drmariomenu = pygame.image.load("img/drmariomenu.bmp")
self._display.blit(drmariomenu, (20, 50))
nursemenu = pygame.image.load("img/nurse.bmp")
self._display.blit(nursemenu, (50, 40))
pygame.display.update()
for event in pygame.event.get():
if event.type == KEYDOWN and event.key == K_SPACE:
intro = False
def text_objects(text, font):
"""
Create a rectangular object around which to put text.
"""
textSurface = font.render(text, True, black)
return textSurface, textSurface.get_rect()
class Game(object):
"""
This is the class for the game itself. It's the main class that controls the
other parts of the game. It's in charge of the basic, fundamental functions and
processes that the game needs.
"""
def __init__(self):
self._board = Board()
pygame.init()
pygame.mixer.init()
self.fpsClock = pygame.time.Clock()
self._display = pygame.display.set_mode((windowwidth, windowheight), 0, 32)
gameintro(self)
pygame.mixer.music.load("music/fever.wav")
pygame.mixer.music.play(-1)
self.blockfalltimer = blockfallinterval
self.speed = False
scorefont = pygame.font.Font(None, 36)
scoretext = scorefont.render(str(self._board.score), 14, white)
self._display.blit(scoretext, (10, 200))
pygame.display.update()
def run(self):
self.board.spawndublock()
while True:
for event in pygame.event.get():
self.processevent(event)
self.update(self.fpsclock.get_time())
self._display.fill(darkgray)
pygame.draw.rect(self._display, darkblue,
(boardoffsetx-boardborder, boardoffsety-boardborder,
140+boardborder*2,
400+boardborder*2))
boarddisplay = self.board.render()
self._display.blit(boarddisplay, (boardoffsetx, boardoffsety))
scorefont = pygame.font.Font(None, 36)
scoretext = scorefont.render(str(self._board.score), 14, white)
self._display.blit(scoretext, (10, 200))
drmariomenu = pygame.image.load("img/drmariomenu.bmp")
self._display.blit(drmariomenu, (20, 50))
pygame.display.update()
self.fpsclock.tick(fps)
def update(self, delta):
if self.blockfalltimer <= 0:
try:
self.board.movedublock("down")
except (ex.BottomReached, ex.PositionOccupied):
self.handlecollision()
self.blockfalltimer = blockfallinterval
else:
if self.speed:
delta *= speedfallmultiplier
self.blockfalltimer -= delta
def handlecollision(self):
self.board.handlecollision()
def processevent(self, event):
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == KEYDOWN:
if event.key == K_LEFT or event.key == K_RIGHT:
direction = "left" if event.key == K_LEFT else "right"
self.movedublock(direction)
elif event.key == K_DOWN:
self.speed = True
elif event.key == K_SPACE:
self.board.rotatedublock()
elif event.key == K_q:
pygame.quit()
sys.exit()
elif event.type == KEYUP:
if event.key == K_DOWN:
self.speed = False
def movedublock(self, direction):
try:
self.board.movedublock(direction)
except (ex.OutOfBoard, ex.PositionOccupied):
# If we can't move to a certain position,
# simply ignore those. The dublock will not move at all.
pass
@property
def board(self):
"""
Define the board property.
"""
return self._board
@property
def display(self):
"""
Define the display property for the actual pygame display.
"""
return self._display
@property
def fpsclock(self):
"""
This is a neat clock based on the fps.
"""
return self.fpsClock
<file_sep>/src/board.py
import block
import dublock
import ex
import random
from block import block, Color
from colors import black, blue, darkblue, darkgray, red, white, yellow
from dublock import dublock
from pygame import Surface
from utils import Pos
"""
Create the board. We want to use the blocks to fill up the board
with a starting setup and allow blocks to fall as they do.
"""
width = 10
height = 20
spawnpos = Pos(x=3, y=0)
class Board(object):
"""
Define the class for the boards to include the block functions for
spawning and falling.
"""
def __init__(self):
"""
Initialize the display and board for the game.
"""
self._display = Surface((140, 300))
self._board = []
for h in range(0, height):
self._board.append([])
for w in range(0, width-3):
self._board[h].append(block(w, h))
self.score = 0
self._dublock = None
def spawndublock(self):
"""
Spawn the blocks from the spawn position.
"""
blocks = (
self.block(spawnpos.x, spawnpos.y),
self.block(spawnpos.x + 1, spawnpos.y),
)
for block in blocks:
if not block.isclear(): # Block must be clear when they spawn.
# If not, then you lose the game because you've
# reached the top.
raise ex.PositionOccupied("Block is not clear at spawn point.")
colors = (random.randint(1, 3), # Choose the block colors.
random.randint(1, 3))
for i in (0, 1): # Set the block colors.
blocks[i].setcolor(colors[i])
self._dublock = dublock(blocks)
def movedublock(self, direction):
"""
Move the block in a direction.
"""
a, b = self.dublock.blocks # Get the two individuals blocks of the
# dublock.
directdict = {"down" : (0, 1), # Get the direction and how to move the block.
"left" : (-1, 0),
"right" : (1, 0)}
(x, y) = directdict[direction]
try: # Check if we've reached the bottom of the board.
newa = self.block(a.x+x, a.y+y)
newb = self.block(b.x+x, b.y+y)
except (ex.OutOfBoard, ex.BottomReached) as e:
raise e
if (newa != b and not newa.isclear()) or (newb != a and not newb.isclear()):
raise ex.PositionOccupied("Collision occured")
acolor = a.color
bcolor = b.color
a.clear()
b.clear()
newa.setcolor(acolor)
newb.setcolor(bcolor)
self.dublock.setblocks(newa, newb)
self.dublock.getsprite()
def rotatedublock(self):
"""
Rotate the dublock.
"""
transform = (
# vertical to horizontal
((Pos(1, 0), Pos(0, 1)),
(Pos(0, 0), Pos(-1, 1))),
# horizontal to vertical
((Pos(0, 0), Pos(-1, -1)),
(Pos(0, 1), Pos(-1, 0)))
)
section = int(self.dublock.ishorizontal())
origin = self.dublock.blocks
for offset in transform[section]:
try:
i = not section
j = not i
newblocks = (
self.block(origin[i].x + offset[i].x, origin[i].y + offset[i].y),
self.block(origin[j].x + offset[j].x, origin[j].y + offset[j].y),
)
# # Don't overwrite blocks.
# for b in newblocks:
# if not (b.isclear() and b.pos != origin[0].pos and b.pos != origin[1].pos):
# raise ex.InvalidOperation("New block is occupied.")
if self.dublock.ishorizontal():
# standard colors
colors = (origin[0].color, origin[1].color)
else:
# swap colors
colors = (origin[1].color, origin[0].color)
for k in range(0, 2):
origin[k].clear()
newblocks[k].setcolor(colors[k])
self.dublock.setblocks(*newblocks)
except (ex.OutOfBoard, ex.InvalidOperation) as e:
continue
else:
break
def checkmatch(self, blocks):
"""
If there's a match, make them disppear.
"""
match = False
for block in blocks:
if block.isclear():
continue # If the block is cleared, we need to see what
# it matched to.
# Check the horizontal and vertical directions to get the matches.
horizontal = set(self.getmatchesindirection(block, 1, 0) + self.getmatchesindirection(block, -1, 0))
vertical = set(self.getmatchesindirection(block, 0, 1) + self.getmatchesindirection(block, 0, 1))
for matches in (horizontal, vertical):
# For the matches, check if they're 3 or more blocks.
# If they are, they disappear.
if len(matches) >= 3:
match = True
for nextblock in matches:
self.score += 10
nextblock.clear()
block.clear()
return match
def getmatchesindirection(self, block, xdir, ydir):
"""
Get the matches in the x and y directions.
"""
matches = []
x = block.x + xdir
y = block.y + ydir
while True:
try:
nextblock = self.block(x, y)
except:
break
if nextblock.isclear():
break
matches.append(nextblock)
x += xdir
y += ydir
return matches
def checkblocksinair(self):
"""
If one part of a block is cleared, the second part should drop.
This function runs on all the blocks that have been changed. It
checks each block one by one.
"""
changed = True
while changed:
changed = False
for x in range(0, width-3):
for y in range(0, height):
block = self.block(x, y)
blockchanged = self.checkblockinair(block)
if blockchanged:
changed = True
def checkblockinair(self, block):
"""
Check a single block in the air.
"""
if block.isclear() or block.isfalling():
return False
try:
bottomblock = self.block(block.x, block.y+1)
except ex.BottomReached as e:
return False
rightblock = None
leftblock = None
try:
rightblock = self.block(block.x+1, block.y+1)
except ex.OutOfBoard as e:
pass
try:
leftblock = self.block(block.x-1, block.y-1)
except ex.OutOfBoard as e:
pass
if (bottomblock.isclear() or bottomblock.isfalling()) and \
(not rightblock or rightblock.isclear()) and \
(not leftblock or leftblock.isclear()):
block.setfalling(True)
return True
return False
def getfallingblocks(self):
"""
Get the blocks that are falling by checking their
.isfalling() status.
"""
return [block for rows in reversed(self._board)
for block in rows
if block.isfalling()]
def handlefallingblocks(self):
"""
Check the falling blocks.
"""
blocks = self.getfallingblocks()
if not blocks:
return []
blocksatbottom = []
for block in blocks:
try:
bottomblock = self.block(block.x, block.y+1)
except ex.BottomReached:
blocksatbottom.append(block)
block.setfalling(False)
continue
if bottomblock.isclear():
bottomblock.setcolor(block.color)
bottomblock.setfalling(True)
block.clear()
block.setfalling(False)
else:
blocksatbottom.append(block)
block.setfalling(False)
return blocksatbottom
def handlecollision(self):
"""
Check the collisions for matches.
"""
self.checkmatch(self._dublock.blocks)
while True:
self.checkblocksinair()
blocks = []
while self.getfallingblocks():
blocks.extend(self.handlefallingblocks())
if not self.checkmatch(blocks):
break
self.spawndublock()
def render(self):
self._display.fill(black)
for h in range(0, height):
for w in range(0, width-3):
self.renderblock(self._display, self.block(w, h))
return self._display
def renderblock(self, display, block):
if block.color == Color.clear:
return
elif block.color == Color.red:
color = red
elif block.color == Color.blue:
color = blue
elif block.color == Color.yellow:
color = yellow
else:
raise ex.InvalidParameter("Block has invalid color: {}".format(block.color))
display.fill(color,
(block.xpixels,
block.ypixels,
14,
20))
def block(self, x, y):
if x < 0 or y < 0:
raise ex.OutOfBoard("Trying to get block at negative position")
if y <= len(self._board)-1:
if x <= len(self._board[y])-1:
return self._board[y][x]
else:
raise ex.OutOfBoard("Position ({}, {}) not on board".format(x, y))
else:
raise ex.BottomReached("Bottom reached by block")
@property
def dublock(self):
return self._dublock
@property
def display(self):
return self._display
| d5161732d24d8e7a0edc845fcbccafaec02091af | [
"Markdown",
"Python"
] | 8 | Python | HussainAther/drmario | 9e3ea4a2658daa1d47f980c8ed8f691b5c0919ba | 53d8cb251714eed148e5eceb34f76811987c9c2e | |
refs/heads/master | <file_sep>var url = "スプレッドシートの公開URL"
var book = SpreadsheetApp.openByUrl(url);
var date = new Date;
var nengatsu = Utilities.formatDate( date, 'JST', 'yyyy年M月');
var nowTime = Utilities.formatDate( date, 'JST', 'HH:mm:ss');
var nowDay = Utilities.formatDate( date, 'JST', 'dd');
var sheet = book.getSheetByName(nengatsu);
var todayCell = parseInt(nowDay, 10) + 6;
function test(){
var e = { parameters:{ n : "gm", m : "gn" } }
doGet(e);
}
function doGet(e){
var message = e.parameters.m;
return message == "gm" ? helloEveryone() : goodByeEveryone();
}
function helloEveryone(){
var range = sheet.getRange("B"+ todayCell.toString());
range.setValue(nowTime);
return "GOOD MORNING FOR MY DREAM!!!";
}
function goodByeEveryone() {
var range = sheet.getRange("C"+ todayCell.toString());
range.setValue(nowTime);
return "GOODBYE AND GOOD NIGHT!!!";
}
<file_sep>'use strict';
var app = require('app');
var Tray = require('tray');
var Menu = require('menu');
var request = require('superagent');
require('crash-reporter').start();
var mainWindow = null;
app.dock.hide();
app.on('window-all-closed', function(){
if (process.platform != 'darwin'){
app.quit();
}
});
var url = "スプレッドシートのスクリプトURL";
app.on('ready', function(){
// Google Spreadsheetsへの操作
var gss = {
goodMorning : function(){
postData("gm");
},
goodNight : function(){
postData("gn");
}
}
// 通知領域への操作
var currentHighlight = false;
var appIcon = new Tray(__dirname + '/images/icon.png');
appIcon.setHighlightMode(currentHighlight);
var contextMenu = Menu.buildFromTemplate([
{ label: '出勤', type: "normal", click: gss.goodMorning },
{ label: '退勤', type: "normal", click: gss.goodNight },
{ label: 'おわり', type: "normal", click: app.quit }
]);
appIcon.setContextMenu(contextMenu);
var options = { timeout: 2000 };
function postData(word){
request.get(url).query({ m: word }).end(function(err,res){
if(!err && res.body){
console.log("OK");
appIcon.setImage(__dirname + '/images/icon2.png');
setTimeout(function(){
appIcon.setImage(__dirname + '/images/icon.png');
},2000)
} else {
console.log("fuck");
}
});
}
});
| f50036c4baea211300d37f401c3480d3fdb9c33a | [
"JavaScript"
] | 2 | JavaScript | takashifujii/writeSpreadsheets | be29068fe61e9046e2e94607f0d4dbd1eacaf5b0 | ec72921f558c3e3527e77edecdcf8f1557efcc89 | |
refs/heads/master | <repo_name>taxishare/api<file_sep>/_include/calculate_path.php
<?php
function calculatePath(){
if(date('H') < 6 || date('H') > 22)
{
if( date('N', time() ) > 6 )
$quota = 4.5;
else
$quota = 3.0;
}
else
{
$quota = 6.5;
}
$lat1 = $_GET['lat1'];
$long1 = $_GET['long1'];
$lat2 = $_GET['lat2'];
$long2 = $_GET['long2'];
$url = "https://maps.googleapis.com/maps/api/distancematrix/json?origins=".$lat1.",".$long1."&destinations=".$lat2.",".$long2."&mode=driving&language=it-IT";
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_PROXYPORT, 3128);
curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, 0);
$response = curl_exec($ch);
curl_close($ch);
$response_a = json_decode($response, true);
$dist = @$response_a['rows'][0]['elements'][0]['distance']['text'];
$time = @$response_a['rows'][0]['elements'][0]['duration']['text'];
if(distanceGeoPoints(41.794977, 12.252183, $lat1, $long1) < 0.1 && distanceGeoPoints(41.823252, 12.414329, $lat2, $long2) < 0.1)
{
// dall'aeroporto di fiumicino a castello della magliana
$price = 30;
}
elseif(distanceGeoPoints(41.794977, 12.252183, $lat2, $long2) < 0.1 && distanceGeoPoints(41.823252, 12.414329, $lat1, $long1) < 0.1)
{
// da castello della magliana all'aeroporto di fiumicino
$price = 30;
}
elseif(distanceGeoPoints(41.794977, 12.252183, $lat1, $long1) < 0.1 && distanceGeoPoints(41.806254, 12.326192, $lat2, $long2) < 0.1)
{
// dall'aeroporto di fiumicino a nuova fiera di Roma
$price = 25;
}
elseif(distanceGeoPoints(41.794977, 12.252183, $lat2, $long2) < 0.1 && distanceGeoPoints(41.806254, 12.326192, $lat1, $long1) < 0.1)
{
// da nuova fiera di Roma all'aeroporto di fiumicino
$price = 25;
}
elseif(distanceGeoPoints(41.794977, 12.252183, $lat1, $long1) < 0.1 && distanceGeoPoints(41.797220, 12.587744, $lat2, $long2) < 0.1)
{
// dall'aeroporto di fiumicino a ciampino
$price = 50.00;
}
elseif(distanceGeoPoints(41.794977, 12.252183, $lat2, $long2) < 0.1 && distanceGeoPoints(41.797220, 12.587744, $lat1, $long1) < 0.1)
{
// dall'aeroporto di ciampino a fiumicino
$price = 50.00;
}
elseif(distanceGeoPoints(41.794977, 12.252183, $lat1, $long1) < 0.1 && distanceGeoPoints(41.909828, 12.530372, $lat2, $long2) < 0.1)
{
// dall'aeroporto di fiumicino alla stazione tiburtina
$price = 55.00;
}
elseif(distanceGeoPoints(41.794977, 12.252183, $lat2, $long2) < 0.1 && distanceGeoPoints(41.909828, 12.530372, $lat1, $long1) < 0.1)
{
// dalla stazione tiburtina all'aeroporto di fiumicino
$price = 55.00;
}
elseif(distanceGeoPoints(41.797220, 12.587744, $lat1, $long1) < 0.1 && distanceGeoPoints(41.909828, 12.530372, $lat2, $long2) < 0.1)
{
// dall'aeroporto di ciampino a stazione tiburtina
$price = 35.00;
}
elseif(distanceGeoPoints(41.797220, 12.587744, $lat1, $long1) < 0.1 && distanceGeoPoints(41.909828, 12.530372, $lat2, $long2) < 0.1)
{
// dalla stazione tiburtina all'aeroporto di ciampino
$price = 35.00;
}
elseif(distanceGeoPoints(41.797220, 12.587744, $lat1, $long1) < 0.1 && distanceGeoPoints(41.874142, 12.484175, $lat2, $long2) < 0.1)
{
// dall'aeroporto di ciampino a stazione ostiense
$price = 35.00;
}
elseif(distanceGeoPoints(41.797220, 12.587744, $lat2, $long2) < 0.1 && distanceGeoPoints(41.874142, 12.484175, $lat1, $long1) < 0.1)
{
// dall'aeroporto di ciampino a stazione ostiense
$price = 35.00;
}
else
{
$price = $quota + @$dist*1.3;
}
$searchString = $_GET['userid'] . "|" . $lat1 . ";" . $long1 . ";" . $lat2 . ";" . $long2 ;
$compare = comparePath($searchString);
saveData($searchString);
if(!$compare){$compare=NULL;}
echo json_encode(
array(
'distance' => $dist,
'time' => $time,
'price' => number_format($price, '2', '.', ''),
'share' => $compare
));
}
function distanceGeoPoints($lat1, $lng1, $lat2, $lng2)
{
$earthRadius = 3958.75;
$dLat = deg2rad($lat2 - $lat1);
$dLng = deg2rad($lng2 - $lng1);
$a = sin($dLat / 2) * sin($dLat / 2) +
cos(deg2rad($lat1)) * cos(deg2rad($lat2)) *
sin($dLng / 2) * sin($dLng / 2);
$c = 2 * atan2(sqrt($a), sqrt(1 - $a));
$dist = $earthRadius * $c;
return $dist;
}
function saveData($content){
file_put_contents(LOG, $content);
}
function comparePath($pathString){
global $users;
$current = file_get_contents(LOG);
$current_arr = explode("|",$current);
$pathString = explode("|",$pathString);
if( $current_arr[0] != $pathString[0] AND $current_arr[1] == $pathString[1] ){
$user = setUserData($current_arr[0]);
return $user;
}
else{
return FALSE;
}
}
<file_sep>/taxi.php
<?php
include("users.php");
include("_include/calculate_path.php");
//error_reporting(E_ALL);
//ini_set('display_errors', 1);
date_default_timezone_set('Europe/Rome');
if( isset($_GET['delete_share']) ) {
echo json_encode( array('CANCELLO I DATI SULLO SHARE') );
saveData("x|x");
}
else if(
isset($_GET['lat1'])
AND isset($_GET['long1'])
AND isset($_GET['lat2'])
AND isset($_GET['long2'])
)
{
calculatePath();
}
<file_sep>/users.php
<?php
session_start();
ob_start();
define("LOG", "book/save");
define("SHARE", "book/share");
// users DB
$users = array(
'gabriele' => array(
"password" => "<PASSWORD>",
"nome" => "<NAME>"
),
'riccardo' => array(
"password" => "<PASSWORD>",
"nome" => "<NAME>"
)
);
function checkLogin(){
global $users;
if( isset($users[$_POST['userid']]) ){
if($users[$_POST['userid']]['password'] == $_POST['password']){
$_SESSION['user'] = $_POST['userid'];
$user = setUserData($_POST['userid']);
//echo json_encode( array('user'=>$user) );
echo json_encode($user);
return true;
}
}
header('HTTP/1.0 403 Forbidden');
echo json_encode( array('error'=>'login not correct') );
return false;
}
function setUserData($userid){
global $users;
$user = $users[$userid];
unset($user['password']);
$user['userid'] = $userid;
return $user;
}
//if(!isset($_SESSION['user']) ){
// echo json_encode( array('error'=>'user not found') );
// exit;
//}
if(isset($_GET['accept_share'])){
file_put_contents(SHARE, "1");
echo json_encode( array('ok') );
}else if(isset($_GET['check_share'])){
$current = @file_get_contents(SHARE);
if(empty($current)){
echo json_encode( array('result'=>'false') );
} else {
file_put_contents(SHARE, "");
echo json_encode( array('result'=>'true') );
}
}else if(isset($_GET['logout'])){
unset($_SESSION['user']);
session_destroy();
echo json_encode( array('logout!') );
}else if(isset($_POST['userid']) AND isset($_POST['password']) AND !isset($_GET['lat1'])){
return checkLogin();
}else if( !isset($_GET['lat1']) ){
if(!isset($_SESSION['user']) ){
header('HTTP/1.0 401 Not Authorized');
echo json_encode( array('error'=>'Not Authorized') );
}else {
$user = setUserData($_SESSION['user']);
echo json_encode( $user );
}
}
| cf8b9cc3629c579bab48e36e2b86872be816c9b1 | [
"PHP"
] | 3 | PHP | taxishare/api | 6379312eeb1f25c5369c7a8414acc025781bc150 | 4fa631b3df54f31b31abd181682f59314938f940 | |
refs/heads/master | <file_sep>#!/usr/bin/env bash
if [ ! -z "${VAULT_PASSWORD}" ]; then
echo "Using vault password from VAULT_PASSWORD environment variable" >&2
fi
echo "${VAULT_PASSWORD}"
<file_sep># Python images
This directory contains a Dockerfile for the uisautomation python image. This
image is simply the upstream Python Alpine image with updates applies, various
useful packages installed and a small set of pre-installed Python modules. These
modules are those which take a long time to install or which are common to all
of our products.
## Tags
* ``python`` is a base Python image. Tags: ``3.6-alpine``, ``alpine``, ``latest``
* ``django`` is an image with Django and common applications installed. Tags:
``2.0``, ``2.0-py3.6``, ``2.0-py3.6-alpine``, ``2.0-alpine``, ``alpine``,
``latest``
## Usage
Use like the upstream alpine image:
```Dockerfile
FROM uisautomation/python:3.6-alpine
# ...
```
<file_sep># Automation docker images
> **This repository is now hosted on the Cambridge
> [Developers Hub](https://gitlab.developers.cam.ac.uk/uis/devops/infra/dockerimages/).
[](https://circleci.com/gh/uisautomation/dockerimages)
This repository contains configuration to build the [UIS automation docker
images](https://hub.docker.com/r/uisautomation/). The images are built nightly
via CircleCI and pushed to Docker hub.
## Images
The following image types are built. See the README files in each image's
directory for more details.
* [python](python/README.md)
* [django](python/README.md)
* [ucam-webauth-proxy](ucam-webauth-proxy/README.md)
## Configuration
The [CircleCI configuration](.circleci/config.yml) iterates over the image types
and builds the docker images in each directly. Different image types may have
different versions built. If the job is running on ``master``, the images are
also pushed to Docker hub.
The CircleCI project must be set up with the Docker hub username and password in
the ``DOCKER_USER`` and ``DOCKER_PASS`` environment variables. For the
uisautomation Docker hub account see the [shared documentation
repo](https://github.com/uisautomation/docs) for these credentials.
<file_sep># The openstack SDK is an enabler for all the OpenStack modules at
# https://docs.ansible.com/ansible/latest/modules/list_of_cloud_modules.html#openstack
openstacksdk
# The netaddr module is neded to use the ipaddr jinja filter as used in the docker swarm
# playbook when creating custom networks
netaddr
<file_sep>FROM python:$PYTHON_VERSION-alpine
# Add our base requirements and install
ADD ./requirements/ /tmp/requirements/
# Update and upgrade system packages and install common Python modules.
#
# For the moment we pin pip at being less than 19.0 because of
# https://github.com/pypa/pip/issues/6158.
RUN apk --no-cache upgrade && \
apk --no-cache add git vim libffi libxml2 libxslt postgresql-client && \
apk --no-cache add \
postgresql-dev libffi-dev gcc g++ musl-dev libxml2-dev libxslt-dev libstdc++ && \
pip install --no-cache-dir --upgrade 'pip<19.0' && \
pip install --no-cache-dir --upgrade --no-cache -r /tmp/requirements/$REQUIREMENTS.txt && \
apk del postgresql-dev libffi-dev gcc g++ musl-dev libxml2-dev libxslt-dev libstdc++
<file_sep>FROM python:3.7
ADD requirements.txt /tmp/requirements.txt
RUN pip install -r /tmp/requirements.txt
RUN pip install 'ansible~=$ANSIBLE_VERSION'
ENV VAULT_PASSWORD=
ADD ansible.cfg /etc/ansible/ansible.cfg
ADD vault-password /usr/local/bin
WORKDIR /workspace
ENTRYPOINT ["ansible-playbook"]
<file_sep># Ansible playbook
Packaged Ansible playbook with vault support.
Run via:
```bash
$ export VAULT_PASSWORD="<PASSWORD>"
$ docker run \
-t \
-e VAULT_PASSWORD \
-v ${SSH_AUTH_SOCK}:/ssh-agent -e SSH_AUTH_SOCK=/ssh-agent \
-v $PWD:/workspace:ro \
uisautomation/ansible-playbook:2.6 \
<ansible-playbook options>...
```
| 06ed523e6d09d0dc1e9157d4b6e773d4cd6d3055 | [
"Markdown",
"Text",
"Dockerfile",
"Shell"
] | 7 | Shell | uisautomation/dockerimages | c99e277885fc94a69b859d62a79334b8d9912cf1 | 240fdb48dde984c8d02a534c68ce7818dc1e9307 | |
refs/heads/master | <file_sep>import "./estilo.css";
import "react";
export default props => (
<h1>blevers!!!</h1>
)
console.log("blevers");<file_sep># exercidios-webpack
| b1cfb87c49ddaf3424810c261d4eec2642b67cae | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | gfauth/exercidios-webpack | d6a85fd1e4be72e1b5bf8693344f02acd3f1ef67 | 7900b8e7926a189b8765136dbb15bc82eda3390d | |
refs/heads/main | <file_sep># EStarGmbH-Solver
A Tampermonkey / Greasemonkey Script which auto-solves the EStarGmbH Arbeitssicherheitsmodule Questionnaire.
Main Author: <NAME>.
Use at your own risk!
Instructions:
* Just install in Tampermonkey / Greasemonkey
* Open a Questionnaire in the EStarGmbH Platform
* Script automatically runs, checks the correct answers, and switches to the next pages until the Questionnaire is solved.
More Info: https://blog.rootdir.net/blog/2020/04/06/estargmbh-arbeitschutz-tampermonkey-script/
<file_sep>// ==UserScript==
// @name EstarGmbH Disable Wait Time
// @namespace http://tampermonkey.net/
// @version 0.4
// @description Disables the wait time on every page
// @author <NAME>.
// @match https://*.estargmbh.de/*
// @grant none
// @require http://code.jquery.com/jquery-3.4.1.min.js
// ==/UserScript==
$(document).ready(function() {
setTimeout(function() {
$('#btnFragebogenForm').click()
$('#btnNext').prop('disabled', false);
$('#btnNext').html('<i class="fa fa-chevron-right fa-fw"></i>');
}, 500);
setTimeout(function() {
if ($("#btn").text()=='speichern') {
$("#gelesen").click()
$("#nonPrivateUse").attr('checked', true); // Fuer Betriebliche Kommunikationsmittel v1.1, Eintragen des Wertes #nonPrivateUse = keine private Nutzung, für Private Nutzung auf #privateUse aendern.
setTimeout(function() {
$("#btn").click()
$('#btnNext').click()
}, 600);
}else{
$('#btnNext').click()
}
}, 600);
});
| d3bf81fdb2ffedcf2294b26edf651b7ea4ab5b07 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | rom-1/EStarGmbH-Solver | 4dabf5cca107d12ba2f4ae2281c0d9ed359dbf10 | bdce74d019c2030581044f69f8baf9e7a148e517 | |
refs/heads/master | <repo_name>bmikailenko/cs320finalproject<file_sep>/doc/README.md
# cs320milestone1
Milestone 1 assignment for CS 320 Fall 2019
PROJECT DESCRIPTION:
PDF Share and Schedule is an online web application that allows users to:
1. Share PDFs
2. Organise PDFs according to a schedule
PDFs are uploaded. The user uploading can select which date the PDFs are related to. The application then posts and organises the PDFs according to the date on the main webpage.
TEAM MEMBERS:
<NAME>
Roman
CURRENT STATUS: Testing Phase
HOW TO RUN:
git clone https://github.com/bmikailenko/cs320finalproject
navigate to the app folder
run npm install
run node index.js
go to localhost:3000
<file_sep>/app/index.js
var express = require("express");
var app = express();
var router = express.Router();
var fileUpload = require("express-fileupload");
var mv = require("mv");
var fs = require("fs");
const jsdom = require("jsdom");
var jquery = require("jquery");
var del = require("del");
var multer = require('multer');
var upload = multer({ dest: 'uploads/' });
// express file uploading framework
app.use(fileUpload());
app.use('/files', express.static('files'));
// get the main page
app.get('/', function(req,res){
res.sendFile(__dirname + "/views/index.html");
});
// when files are uploaded
app.post('/upload', function(req,res){
// get date input;
var date = req.body.addDate;
// if directory doesn't exist, make one
if (!fs.existsSync('./files/' + date)){
fs.mkdirSync('./files/' + date);
}
// if no uploaded files just redirect to home
if (!req.files || Object.keys(req.files).length === 0) {
return res.redirect('/');
}
// list of download links
var downloadLinkList = [];
// for each file
if (req.files.pdfInput.constructor === Array) {
req.files.pdfInput.forEach(function(item, index) {
// save the file to it's directory
item.mv('./files/' + date + '/' + item.name.replace(/\s+/g, '-'), function(err) { if (err) return res.status(500).send(err)});
// create a download link
var downloadLink = '<a href=' + ("./files/" + date + "/" + item.name.replace(/\s+/g, '-')) + ' download>' + item.name + '</a> <br>'
// and push it to the list of download links
downloadLinkList.push(downloadLink);
});
} else {
item = req.files.pdfInput;
item.mv('./files/' + date + '/' + item.name.replace(/\s+/g, '-'), function(err) { if (err) return res.status(500).send(err)});
// create a download link
var downloadLink = '<a href=' + ("./files/" + date + "/" + item.name.replace(/\s+/g, '-')) + ' download>' + item.name + '</a> <br>'
// and push it to the list of download links
downloadLinkList.push(downloadLink);
}
// entry to main page
var card = '<div class="card bg-light mb-3" id=\"' + date +'\" style="width: 18rem; text-align: center; margin: 0 auto;">' +
'<div class="card-header">' +
date +
'</div>' +
'<div class="card-body">' +
downloadLinkList.join("") +
'</div>' +
'</div>';
// write new entry to main page
fs.readFile(__dirname + '/views/index.html', 'utf8', (err, data) => {
const dom = new jsdom.JSDOM(data);
const $ = jquery(dom.window);
// write new date
var content = $('#mainPage').html();
$('#mainPage').html(content + card);
// organize
var mainPage = $('#mainPage');
var dates = mainPage.children('div').get();
dates.sort(function(a, b) {
var compA = $(a).attr('id').toUpperCase();
var compB = $(b).attr('id').toUpperCase();
return (compA < compB) ? -1 : (compA > compB) ? 1 : 0;
})
$.each(dates, function(idx, itm) {
mainPage.append(itm);
})
// write
fs.writeFile(__dirname + '/views/index.html', dom.serialize(), err => {
});
});
res.redirect('/');
});
// when files are deleted
app.post('/delete', function(req,res){
// get date input;
var date = req.body.deleteDate;
// delete folder from date input
if (!fs.existsSync('../files/' + date)){
del.sync('./files/' + date);
}
// remove entry from main page
fs.readFile(__dirname + '/views/index.html', 'utf8', (err, data) => {
const dom = new jsdom.JSDOM(data);
const $ = jquery(dom.window);
$('#' + date).remove();
fs.writeFile(__dirname + '/views/index.html', dom.serialize(), err => {
console.log(err);
});
});
res.redirect('/');
});
// when entries are edited
app.post('/edit', function(req,res){
// get date inputs;
var oldDate = req.body.oldDate;
var newDate = req.body.newDate;
// if directory doesn't exist, make one
if (!fs.existsSync('./files/' + newDate)){
fs.mkdirSync('./files/' + newDate);
}
// if no uploaded files just redirect to home
if (!req.files || Object.keys(req.files).length === 0) {
return res.redirect('/');
}
// list of download links
var downloadLinkList = [];
if (req.files.pdfInput.constructor === Array) {
// for each file
req.files.pdfInput.forEach(function(item, index) {
// save the file to it's directory
item.mv('./files/' + newDate + '/' + item.name.replace(/\s+/g, '-'), function(err) { if (err) return res.status(500).send(err)});
// create a download link
var downloadLink = '<a href=' + ("./files/" + newDate + "/" + item.name.replace(/\s+/g, '-')) + ' download>' + item.name + '</a> <br>'
// and push it to the list of download links
downloadLinkList.push(downloadLink);
});
} else {
item = req.files.pdfInput;
item.mv('./files/' + newDate + '/' + item.name.replace(/\s+/g, '-'), function(err) { if (err) return res.status(500).send(err)});
// create a download link
var downloadLink = '<a href=' + ("./files/" + newDate + "/" + item.name.replace(/\s+/g, '-')) + ' download>' + item.name + '</a> <br>'
// and push it to the list of download links
downloadLinkList.push(downloadLink);
}
// entry to main page
var card = '<div class="card bg-light mb-3" id=\"' + newDate +'\" style="width: 18rem; text-align: center; margin: 0 auto;">' +
'<div class="card-header">' +
newDate +
'</div>' +
'<div class="card-body">' +
downloadLinkList.join("") +
'</div>' +
'</div>';
// write new entry to main page
fs.readFile(__dirname + '/views/index.html', 'utf8', (err, data) => {
const dom = new jsdom.JSDOM(data);
const $ = jquery(dom.window);
// remove old date
$('#' + oldDate).remove();
// write new date
var content = $('#mainPage').html();
$('#mainPage').html(content + card);
// organize
var mainPage = $('#mainPage');
var dates = mainPage.children('div').get();
dates.sort(function(a, b) {
var compA = $(a).attr('id').toUpperCase();
var compB = $(b).attr('id').toUpperCase();
return (compA < compB) ? -1 : (compA > compB) ? 1 : 0;
})
$.each(dates, function(idx, itm) {
mainPage.append(itm);
})
// write
fs.writeFile(__dirname + '/views/index.html', dom.serialize(), err => {
});
});
res.redirect('/');
});
app.listen(3000,function(){
console.log("Live at Port 3000");
});
| 50ac6ec581e2f58f8407fe5f4a831e41e3a06c26 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | bmikailenko/cs320finalproject | b9f6c0fd68c2cbdf69f0b5b3acc39c41eaea2512 | 2167e90b0f5f6007f417ea4646c70e9ec9d02a9f | |
refs/heads/master | <repo_name>mfamis/WritingPromptsEbook<file_sep>/settings.py
import os
from dotenv import load_dotenv
dotenv_path = os.path.join(os.getcwd(), ".env")
load_dotenv(verbose=True, dotenv_path=dotenv_path)
CLIENT_ID = os.getenv('REDDIT_CLIENT_ID')
CLIENT_SECRET = os.getenv('REDDIT_CLIENT_SECRET')<file_sep>/writing_prompts_to_epub.py
from ebooklib import epub
import praw
import os
import argparse
import datetime
import logging
from settings import CLIENT_ID, CLIENT_SECRET
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
parser = argparse.ArgumentParser(description='Generate epub of /r/WritingPrompts.')
parser.add_argument('--num_posts', type=int, default=10)
parser.add_argument('--num_comments', type=int, default=1)
parser.add_argument('--top_criteria', type=str, default="week")
args = parser.parse_args()
# generate the date string for the default book file name
date = datetime.datetime.now()
date_string = '%d-%d-%d' % (date.month, date.day, date.year)
# get the subreddit post data
reddit = praw.Reddit(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, user_agent='mozilla')
subreddit = reddit.subreddit('writingprompts')
posts = subreddit.top(time_filter=args.top_criteria, limit=args.num_posts)
# create ebook
book_file_name = 'WP-%s-%s' % (args.top_criteria, date_string)
book = epub.EpubBook()
book.set_identifier(book_file_name)
book.set_title('WritingPrompts - Best of %s (%s)' % (args.top_criteria, date_string))
book.set_language('en')
book.add_author('Reddit')
book.add_metadata('DC', 'description',
'An epub generated from the top stories on /r/WritingPrompts')
spine = ['nav']
chapters = []
toc = []
# iterate over each post comments to generate chapters
for pi, post in enumerate(posts):
logging.info('Processing post (%d/%d) titled: %s' % (pi+1, args.num_posts, post.title))
story_prompt_html = "<p><i>%s</i></p>" % post.title
comments = post.comments
for ci in range(1, args.num_comments+1):
logging.info('Processing comment (%d/%d) titled: %s' \
% (ci, args.num_comments, comments[ci].author))
# create a header to credit the comment author
author_html = \
"<p><i>(written by %s)</i></p>" \
% comments[ci].author
# get the comment data and change it to html
comment_text = comments[ci].body
paragraphs = comment_text.split('\n')
comment_html = "".join(["<p>%s</p>" % p for p in paragraphs])
# create the epub chapter
chapter_number = (pi * args.num_comments) + ci
chapter_title = '%s: %s' % (comments[ci].author, post.title)
file_name = 'ch%d.xhtml' % chapter_number
title = '%d: %s' % (chapter_number, chapter_title)
chapter = epub.EpubHtml(title=title, file_name=file_name)
chapter.set_content(story_prompt_html + author_html + comment_html)
# add the chapter to the book, spine and chapter list
toc.append(epub.Link(file_name, title, title))
book.add_item(chapter)
chapters.append(chapter)
spine.append(chapter)
logging.info('Finished processing chapters, creating epub: %s' \
% (book_file_name + ".epub"))
# create table of contents, spine and write the epub to disk
book.toc = toc
book.spine = spine
book.add_item(epub.EpubNcx())
book.add_item(epub.EpubNav())
epub.write_epub(book_file_name + ".epub", book)
logging.info('Done') | 2084c76b358dbc97d97329acf028467a6da030f8 | [
"Python"
] | 2 | Python | mfamis/WritingPromptsEbook | 5cc8199a72d6d5527d2235d3c68dacd6c9b01594 | 05c6dadd8711f8e0ceb8f32722231c4e568cc978 | |
refs/heads/master | <file_sep>//// Write any custom javascript functions here
$(document).ready(function() {
$("#owl-demo").owlCarousel({
autoPlay: 3000, //Set AutoPlay to 3 seconds
items : 4,
itemsDesktop : [1199,3],
itemsDesktopSmall : [979,3]
});
});
//----------------------NAV bar
//---------------- smooth scrolling---------------
//$(document).ready(function(){
//
// $(function() {
// $("ul a").click(function() {
// var target = $(this.hash);
// if (target.length) {
// $('html,body').animate({ scrollTop: target.offset().top-77 }, 1000);
// return false;
// }
// });
//});
//////position indicator------------------------
//
// var nav1pos = $("#nav1").offset().top;
// var nav2pos = $("#nav2").offset().top;
// var nav3pos = $("#nav3").offset().top;
// var nav4pos = $("#nav4").offset().top;
//
// var $nav1nav = $("#l1");
// var $nav2nav = $("#l2");
// var $nav3nav = $("#l3");
// var $nav4nav = $("#l4");
//
// var $activeClass = $nav1nav;
//
// $(window).scroll(function(){
// var scrollPos = $(document).scrollTop() + 70;
// var winHeight = $(window).height();
// var docHeight = $(document).height();
//
// if(scrollPos < nav2pos){
// console.log("1");
//
// $activeClass.removeClass("active-nav");
// $activeClass = $nav1nav;
// $activeClass.addClass("active-nav");
// }
// else if((scrollPos > nav2pos) && (scrollPos < nav3pos)){
// console.log("2");
//
// $activeClass.removeClass("active-nav");
// $activeClass = $nav2nav;
// $activeClass.addClass("active-nav");
// }
// else if((scrollPos > nav3pos) && (scrollPos < nav4pos)){
// console.log("3");
//
// $activeClass.removeClass("active-nav");
// $activeClass = $nav3nav;
// $activeClass.addClass("active-nav");
// }
//
// else if(scrollPos > nav4pos){
// console.log("4");
//
// $activeClass.removeClass("active-nav");
// $activeClass = $nav4nav;
// $activeClass.addClass("active-nav");
// }
//
// else {
// $activeClass.addClass("active-nav")
// }
//});
//
//});
//
//
//
//
//
////--------------------Header------------------------
///**
// * cbpAnimatedHeader.js v1.0.0
// * http://www.codrops.com
// *
// * Licensed under the MIT license.
// * http://www.opensource.org/licenses/mit-license.php
// *
// * Copyright 2013, Codrops
// * http://www.codrops.com
// */
//var AnimatedHeader = (function() {
//
// var docElem = document.documentElement,
// header = document.querySelector('.header'),
// didScroll = false,
// changeHeaderOn = 65;
//
// function init() {
// window.addEventListener('scroll', function(event) {
// if (!didScroll) {
// didScroll = true;
// setTimeout(scrollPage, 250);
// }
// }, false);
// }
//
// function scrollPage() {
// var sy = scrollY();
// if (sy >= changeHeaderOn) {
// classie.add(header, 'header-shrink');
// } else {
// classie.remove(header, 'header-shrink');
// }
// didScroll = false;
// }
//
// function scrollY() {
// return window.pageYOffset || docElem.scrollTop;
// }
//
// init();
//
//})();
//
//( function( window ) {
//
//'use strict';
//
//// classList support for class management
//
//var hasClass, addClass, removeClass;
//
//if ( 'classList' in document.documentElement ) {
// hasClass = function( elem, c ) {
// return elem.classList.contains( c );
// };
// addClass = function( elem, c ) {
// elem.classList.add( c );
// };
// removeClass = function( elem, c ) {
// elem.classList.remove( c );
// };
//}
//else {
// hasClass = function( elem, c ) {
// return classReg( c ).test( elem.className );
// };
// addClass = function( elem, c ) {
// if ( !hasClass( elem, c ) ) {
// elem.className = elem.className + ' ' + c;
// }
// };
// removeClass = function( elem, c ) {
// elem.className = elem.className.replace( classReg( c ), ' ' );
// };
//}
//
//function toggleClass( elem, c ) {
// var fn = hasClass( elem, c ) ? removeClass : addClass;
// fn( elem, c );
//}
//
//var classie = {
//
// hasClass: hasClass,
// addClass: addClass,
// removeClass: removeClass,
// toggleClass: toggleClass,
//
// has: hasClass,
// add: addClass,
// remove: removeClass,
// toggle: toggleClass
//};
//
//// transport
//if ( typeof define === 'function' && define.amd ) {
// // AMD
// define( classie );
//} else {
// // browser global
// window.classie = classie;
//}
//
//})( window );
//
| 1b3012f6e748982a04f349491ddebfa5daae7353 | [
"JavaScript"
] | 1 | JavaScript | DandiW/dandiw.github.io | e47802e5766928b87931965dbd9ddccda9d0f6b4 | 20c876a140ece4ab4229a9c5bcf486a76abc1235 | |
refs/heads/master | <file_sep>let debug = require('debug')('generateSymbols');
let generateSymbols = (number) => {
if (typeof number != 'number') {
throw new Error("You must specify the number of symbols to generate.");
}
let symbols = new Set();
for (let i = 0; i < number; i++) {
symbols.add("S" + i);
}
return symbols;
};
module.exports = generateSymbols;
<file_sep>.PHONY: test
install:
npm install
test:
@NODE_ENV=test ./node_modules/mocha/bin/mocha --compilers js:babel/register
<file_sep><table>
<tr>
<td><img width="120" src="https://cdnjs.cloudflare.com/ajax/libs/octicons/8.5.0/svg/rocket.svg" alt="onboarding" /></td>
<td><strong>Archived Repository</strong><br />
The code of this repository was written during a <a href="https://marmelab.com/blog/2018/09/05/agile-integration.html">Marmelab agile integration</a>. It illustrates the efforts of a new hiree, who had to implement a board game in several languages and platforms as part of his initial learning. Some of these efforts end up in failure, but failure is part of our learning process, so the code remains publicly visible.<br />
<strong>This code is not intended to be used in production, and is not maintained.</strong>
</td>
</tr>
</table>
# Dobble
Dobble is an experiment to implementing mathematics behind the
[board game](http://www.asmodee.com/ressources/jeux_versions/dobble.php) of the same name.
## Installation
```make install```
## Tests
```make test```
*You'll notice that tests for 5, 7 and 9 symbols per card do not pass.
Feel free to contribute.*
In order to see the generated cards in the console, run:
```
DEBUG=deckGeneratorTest make test
```
## Resources (french)
- [Les mathématiques du Dobble](http://www.bibmath.net/forums/viewtopic.php?id=5134)
- [Générer un jeu de dobble](http://www.quirysse.com/informatique/generer-un-jeu-de-dobble/)
- [Dobble et la géométrie finie](http://images.math.cnrs.fr/spip.php?page=forum&id_article=927&id_forum=4233)
- [Dobble et modéle mathématique](http://www.trictrac.net/forum/viewtopic.php?p=1197354)
- [Dobble et théorie mathématique](https://www.scribd.com/fullscreen/32602239?access_key=key-290ldts3eb15fhdyt1mf)
## License
Dobble is open-source, sponsored by [marmelab](marmelab.com). You can use it freely under the terms of the [MIT License](LICENSE).
<file_sep>let assert = require("assert");
let Card = require("../lib/Card");
let Pair = require("../lib/Pair");
let Deck = require("../lib/Deck");
let deckGenerator = require("../lib/deckGenerator");
let debug = require('debug')('deckGeneratorTest');
describe('deckGenerator()', () => {
it(`should return a function when called with no arguments`, () => {
let generateDeck = deckGenerator();
assert.equal('function', typeof generateDeck);
});
it(`should throw an error when called with anything other than a function`, () => {
assert.throws(() => deckGenerator(42));
assert.throws(() => deckGenerator("42"));
assert.throws(() => deckGenerator([42]));
});
describe('deckGenerator().generateSymbol', () => {
it(`should return a function when called without arguments`, () => {
let generateDeck = deckGenerator();
assert.equal('function', typeof generateDeck.generateSymbol);
});
});
describe('deckGenerator()()', () => {
let tests = [{
dimensions: 2,
expectedCards: 3,
expectedSymbols: 2
}, {
dimensions: 3,
expectedCards: 7,
expectedSymbols: 3
}, {
dimensions: 4,
expectedCards: 13,
expectedSymbols: 4
}, {
dimensions: 5,
expectedCards: 21,
expectedSymbols: 5
}, {
dimensions: 6,
expectedCards: 31,
expectedSymbols: 6
}, {
dimensions: 7,
expectedCards: 43,
expectedSymbols: 7
}, {
dimensions: 8,
expectedCards: 57,
expectedSymbols: 8
}, {
dimensions: 9,
expectedCards: 73,
expectedSymbols: 9
}];
tests.forEach(test => {
it(`should return a deck with cards containing ${test.expectedSymbols} symbols when called with ${test.dimensions}`, () => {
let generateDeck = deckGenerator();
let deck = generateDeck(test.dimensions);
debug(`${deck}`);
let cards = deck.cards;
let invalidCards = cards.filter(c => {
return c.symbols.length != test.expectedSymbols;
});
assert.equal(0, invalidCards.length, `Found ${invalidCards.length} invalid cards\n${invalidCards.join('\n')}`);
});
});
tests.forEach(test => {
it(`should return a deck with ${test.expectedCards} cards when called with ${test.dimensions}`, () => {
let generateDeck = deckGenerator();
let deck = generateDeck(test.dimensions);
debug(`${deck}`);
let cards = deck.cards;
assert.equal(test.expectedCards, cards.length, `Expected ${test.expectedCards} cards, found ${cards.length}`);
});
});
tests.forEach(test => {
it(`should return a deck with valid cards and pairs when called with ${test.dimensions}`, () => {
let generateDeck = deckGenerator();
let deck = generateDeck(test.dimensions);
debug(`${deck}`);
assert.ok(deck.isValid(), "Deck is invalid");
});
});
});
});
<file_sep>let assert = require("assert");
let Card = require("../lib/Card");
let Deck = require("../lib/Deck");
describe('Deck', () => {
describe('constructor()', () => {
it('should throw an error when called with no values', () => {
assert.throws(() => new Deck());
});
it('does not accept anything that is not an array', () => {
assert.throws(() => new Deck(42));
});
it('does not accept an array which contain something else than cards', () => {
assert.throws(() => new Deck([
41,
new Card([41, 42])
]));
});
it('accepts an array of cards', () => {
let deck = new Deck([
new Card(["A", "B"]),
new Card(["A", "A"])
]);
assert.equal(2, deck.cards.length);
});
});
describe('isValid()', () => {
it('should return false when deck is empty', () => {
let deck = new Deck([]);
assert.ok(!deck.isValid());
});
it('should return false when any card is invalid', () => {
let card1 = new Card(["A", "B", "C", "D", "E", "F", "G", "H", "I"]);
let card2 = new Card(["A", "A", "K", "L", "M", "N", "O", "P", "Q"]);
let deck = new Deck([card1, card2]);
assert.ok(!deck.isValid());
});
it('should return false when any possible pair is invalid', () => {
let card1 = new Card(["A", "B", "C", "D", "E", "F", "G", "H", "I"]);
let card2 = new Card(["A", "B", "L", "M", "N", "O", "P", "Q", "R"]);
let deck = new Deck([card1, card2]);
assert.ok(!deck.isValid());
});
it('should return true when cards and pairs are valid', () => {
let card1 = new Card(["A", "B" , "C", "D", "E", "F", "G", "H", "I"]);
let card2 = new Card(["A", "K", "L", "M", "N", "O", "P", "Q", "R"]);
let deck = new Deck([card1, card2]);
assert.ok(deck.isValid());
});
});
});
<file_sep>let assert = require("assert");
let generateSymbol = require("../lib/generateSymbol");
describe('generateSymbol', () => {
it('should throw an error when called anything but a number', () => {
assert.throws(() => generateSymbol());
assert.throws(() => generateSymbol('g'));
assert.throws(() => generateSymbol(null));
assert.throws(() => generateSymbol([]));
});
it('should returns a Set of 10 symbols when called with 10', () => {
let symbols = generateSymbol(10);
assert.ok(symbols instanceof Set);
});
it('should returns a Set of 20 symbols when called with 20', () => {
let symbols = generateSymbol(20);
assert.ok(symbols instanceof Set);
});
});
<file_sep>let debug = require('debug')('deckGenerator');
let Card = require("./Card");
let Deck = require("./Deck");
let generateSymbol = require("./generateSymbol");
let _computeMinfactor = (dimensions) => {
let minFactor = dimensions;
let poweredDimensions = 1 + Math.floor(Math.pow(dimensions, 0.5));
debug(`poweredDimensions = ${poweredDimensions}`)
for (let i = 2; i < poweredDimensions; ++i) {
if (dimensions % i == 0) {
minFactor = i;
break;
}
}
return minFactor;
}
let deckGenerator = (generateSymbolFunc = generateSymbol) => {
if (typeof generateSymbolFunc != 'function') {
throw new Error('generateSymbolFunc must be a function.');
}
let generator = (dimensions = 8) => {
let dimensionsForComputations = dimensions - 1;
debug(`dimensions: ${dimensions}`);
if (typeof dimensions != 'number') {
throw new Error('dimensions must be a number.');
}
let numberOfDifferentCards = (dimensions * dimensions) - dimensions + 1;
let numberOfDifferentSymbols = (((dimensions - 1) * (numberOfDifferentCards - 1)) / dimensions) + dimensions;
let symbols = Array.from(generateSymbolFunc(numberOfDifferentSymbols));
let cards = [];
let minFactor = _computeMinfactor(dimensionsForComputations);
debug(`minFactor: ${minFactor}`);
debug(`numberOfDifferentCards: ${numberOfDifferentCards}`);
debug(`symbols (${numberOfDifferentSymbols}): ${Array.from(symbols)}`);
for (let i = 0; i < dimensionsForComputations; ++i) {
let cardSymbols = [];
for (let j = 0; j < dimensionsForComputations; ++j) {
cardSymbols.push(symbols[i * dimensionsForComputations + j]);
}
cardSymbols.push(symbols[dimensionsForComputations * dimensionsForComputations]);
let card = new Card(cardSymbols);
debug(`card: ${card}`);
cards.push(card);
}
for (let i = 0; i < dimensionsForComputations; ++i) {
for (let j = 0; j < dimensionsForComputations; ++j) {
let cardSymbols = [];
for (let k = 0; k < dimensionsForComputations; ++k) {
cardSymbols.push(symbols[k * dimensionsForComputations + (j + i * k) % dimensionsForComputations]);
}
cardSymbols.push(symbols[dimensionsForComputations * dimensionsForComputations + 1 + i]);
let card = new Card(cardSymbols);
debug(`card: ${card}`);
cards.push(card);
}
}
let cardSymbols = [];
for (let i = 0; i < (minFactor + 1); ++i) {
cardSymbols.push(symbols[dimensionsForComputations * dimensionsForComputations + i]);
}
if (cardSymbols.length < dimensions) {
for (let i = cardSymbols.length; i < dimensions; ++i) {
cardSymbols.push(symbols[dimensionsForComputations * dimensionsForComputations + i]);
}
}
let card = new Card(cardSymbols);
debug(`card: ${card}`);
cards.push(card);
return new Deck(cards);
};
generator.generateSymbol = (newGenerateSymbolFunc) => {
// getter if no arguments
if (arguments.length == 0) return generateSymbolFunc;
generateSymbolFunc = newGenerateSymbolFunc
return generator;
}
return generator;
};
module.exports = deckGenerator;
<file_sep>let debug = require('debug')('Pair');
let Card = require("../lib/Card");
class Pair {
constructor(card1, card2) {
if(!(card1 instanceof Card) || !(card2 instanceof Card)){
throw new Error("Pair must be instantiated with two instances of Card");
}
this._card1 = card1;
this._card2 = card2;
}
toString() {
return `\tCard 1 = ${this._card1} \n\tCard 2 = ${this._card2}`;
}
isValid() {
if(this._card1 == null || !(this._card1 instanceof Card)){
return false;
}
if(this._card2 == null || !(this._card2 instanceof Card)){
return false;
}
if(!this._card1.isValid() || !this._card2.isValid()){
return false;
}
let total = this._card1.symbols.reduce((count, symbolFromCard1) => {
let found = this._card2.symbols.reduce((count, symbolFromCard2) => {
let result = symbolFromCard1 === symbolFromCard2;
debug(`${symbolFromCard1} === ${symbolFromCard2} => ${result}`);
return (result) ? ++count : count;
}, 0);
return count += found;
}, 0);
return total === 1;
}
}
module.exports = Pair;
| 17fcc0694a7793aaad078e83d2e9622b6a4ba8a1 | [
"JavaScript",
"Makefile",
"Markdown"
] | 8 | JavaScript | marmelab/dobble | 41332c760ba7b3698059f7759b7ed9da59e78ef4 | 0d5db34e604dd0f6d85fe92bc1693e64f3285d5c | |
refs/heads/master | <file_sep>package main
import (
"fmt"
"os"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/mysql"
)
// DBConfig DBConfig
type DBConfig struct {
User string
Password string
DBName string
}
func dbConfig() *DBConfig {
var user = os.Getenv("DB_USER")
if user == "" {
user = "root"
}
var password = os.Getenv("DB_PASSWORD")
if password == "" {
password = ""
}
var name = os.Getenv("DB_NAME")
if name == "" {
name = "mizuaoi-v2_development"
}
return &DBConfig{
User: user,
Password: <PASSWORD>,
DBName: name,
}
}
func main() {
var config = dbConfig()
var connectionConfig = fmt.Sprintf("%s:%s@/%s?charset=utf8&parseTime=True&loc=UTC", config.User, config.Password, config.DBName)
db, err := gorm.Open("mysql", connectionConfig)
if err != nil {
panic(err)
}
defer db.Close()
}
| d5089711b8dc17c6a8eae1d728dab05374b32e1f | [
"Go"
] | 1 | Go | tett23/mizuaoi-v2 | 27bb24487a5bf01e50b731d916b4d7821830dbf5 | 0f0e5c78e1d2d46b1bbbbe36c4cf6826e3bd1807 | |
refs/heads/master | <repo_name>tania15/Data-structures<file_sep>/List.h
#include <iostream>
using namespace std;
template <typename T>
struct Node
{
T data;
Node<T>* next;
Node(const T& dt, Node<T>* nxt = NULL)
{
data = dt;
next = nxt;
}
};
template <typename T>
class List
{
private:
Node<T>* first;
Node<T>* last;
size_t counter;
public:
List();
List(const List&);
List& operator=(const List&);
~List();
bool isEmpty() const;
int getNumberOfElements() const;
void push_front(const T& element);
void push_back(const T& element);
void pop_front();
void pop_back();
void insert(size_t, const T&);
void remove(size_t);
void changeValueAtIndex(size_t, const T&);
T getFirstElement() const;
T getLastElement() const;
//getters for the pointers not for the actual values
Node<T>* getFirst() const;
Node<T>* getLast() const;
//the operator with index
T operator[](size_t index);
//the private methods
private:
void init();
void copy(const List&);
void empty();
Node<T>* goToElement(size_t index);
};
template <typename T>
List<T>::List()
{
this->first = NULL;
this->last = NULL;
this->counter = 0;
}
template <typename T>
List<T>::List(const List<T>& lst)
{
this->first = NULL;
this->last = NULL;
this->counter = 0;
this->copy(lst);
}
template <typename T>
List<T>& List<T>::operator=(const List<T>& lst)
{
if (this != &lst)
{
this->empty();
this->init();
this->copy(lst);
}
return *this;
}
template <typename T>
bool List<T>::isEmpty() const
{
return (this->first == NULL) && (this->last == NULL);
//the other variant is to return this->counter == 0
}
template <typename T>
Node<T>* List<T>::getFirst() const
{
if (this->isEmpty()) return NULL;
return this->first;
}
template <typename T>
Node<T>* List<T>::getLast() const
{
if (this->isEmpty()) return NULL;
return this->last;
}
//the getters of the private variables
template <typename T>
T List<T>::getFirstElement() const
{
return this->first->data;
}
template <typename T>
T List<T>::getLastElement() const
{
return this->last->data;
}
template <typename T>
int List<T>::getNumberOfElements() const
{
return this->counter;
}
template <typename T>
T List<T>::operator[](size_t index)
{
if (index < 0 || index >= this->counter)
{
cout << "Index can't be smaller than 0 or bigger than list length" << endl;
return 0;
}
return this->goToElement(index)->data;
}
//the core methods of the list
template <typename T>
void List<T>::push_front(const T& element)
{
Node<T>* node = new Node<T>(element, this->first);
if (node)
{
if (this->isEmpty())
{
this->first = node;
this->last = node;
}
else
{
this->first = node;
}
this->counter++;
}
}
template <typename T>
void List<T>::push_back(const T& element)
{
Node<T>* node = new Node<T>(element, NULL);
if (node)
{
if (this->isEmpty())
{
this->first = node;
this->last = node;
}
else
{
this->last->next = node;
this->last = node;
}
this->counter++;
}
}
template <typename T>
void List<T>::pop_front()
{
if (!this->isEmpty())
{
Node<T>* node = this->first;
this->first = this->first->next;
if (this->counter == 1)
{
this->last = NULL;
this->first = NULL;
}
this->counter--;
delete node;
}
else cout << "Empty List[There is nothing to delete]" << endl;
}
template <typename T>
void List<T>::pop_back()
{
if (!this->isEmpty())
{
if (this->last)
{
this->remove(this->counter - 1);
//this->counter--;
}
}
else cout << "Empty List[There is nothing to delete]" << endl;
}
//--------------------------------
//private methods of the list
template <typename T>
Node<T>* List<T>::goToElement(size_t index)
{
if (index < 0 || index >= this->counter)
{
cout << "Index can't be smaller than 0 or bigger than list length" << endl;
return NULL;
}
Node<T>* current = this->first;
for (size_t i = 0; i < index; i++)
{
current = current->next;
}
return current;
}
template <typename T>
void List<T>::copy(const List<T>& lst)
{
Node<T>* node = lst.getFirst();
for (int i = 0; i < lst.getNumberOfElements(); i++)
{
this->push_back(node->data);
node = node->next;
}
}
template <typename T>
void List<T>::empty()
{
for (size_t i = 0; i < this->counter; i++)
{
this->pop_front();
}
this->init();
}
template <typename T>
void List<T>::init()
{
this->first = NULL;
this->last = NULL;
this->counter = 0;
}
//------------------------------
template <typename T>
void List<T>::insert(size_t index, const T& element)
{
if (!this->isEmpty())
{
if (index < 0 || index >= this->counter)
cout << "Index can't be smaller than 0 or bigger than list length" << endl;
else if (index == 0) push_front(element);
else if (index == this->counter - 1) push_back(element);
else
{
Node<T>* node = new Node<T>(element, this->goToElement(index)->next);
this->goToElement(index)->next = node;
}
this->counter++;
}
}
template <typename T>
void List<T>::changeValueAtIndex(size_t index, const T& value)
{
if (!this->isEmpty())
{
if (index < 0 || index >= this->counter)
cout << "Index can't be smaller than 0 or bigger than list length" << endl;
else
{
this->goToElement(index)->data = value->data;
}
}
else
{
cout << "Error empty List" << endl;
}
}
template <typename T>
void List<T>::remove(size_t index)
{
if (!this->isEmpty())
{
if (index == 0) this->pop_front();
else
{
Node<T>* removed = this->goToElement(index);
Node<T>* temp = this->goToElement(index - 1);
if (index == this->counter - 1) this->last = temp;
temp->next = removed->next;
delete removed;
this->counter--;
}
}
}
//the destructor
template <typename T>
List<T>::~List()
{
this->empty();
this->init();
}
| 0a184b0f99dc8fea7a80e119e107afa7e1afca01 | [
"C++"
] | 1 | C++ | tania15/Data-structures | 1f42ce05c6c5903876a3e051625cc8e252f2426e | 85739aaa076c4f46794e84e1481e40b55bd4d6a2 | |
refs/heads/master | <file_sep># Project_Euler_Solutions
Beginner Programmer show casing his way of solving these problems
<file_sep>#project_euler_question_1.py
#If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9.
#the sum of these multiples is 23.
#Find the sum of all the multiples of 3 or 5 below 1000.
import numpy as np
import scipy as sp
#getting the remainders of the 1000 divided by 5 and 3 respectively
five_remain=1000//5 # 200
three_remain= 1000//3 # 333
#using the arange fuction to create a list of the multiples of 3 and 5 w
list_five_remain=np.arange(five_remain)*5
#added plus one to the end step to go from 0 to 333, not 332 since the end step of arrange is not inclusive
list_three_remain= np.arange(three_remain+1)*3
#printed list to verify the numbers I was expecting
#print(list_five_remain)
#print(list_three_remain)
#combining the list into one list
total_list= np.concatenate((list_three_remain,list_five_remain))
unique_total_list= np.unique(total_list)
#print(unique_total_list)
print(np.sum(unique_total_list))
#other method
sum_five_remain=np.sum(list_five_remain)
sum_three_remain= np.sum(list_three_remain)
#print(np.arange((1000//15)+1)*15)
double_count_sum=np.sum(np.arange((1000//15)+1)*15)
#print(double_count_sum)
total_two= (sum_three_remain+ sum_five_remain)- double_count_sum
print(total_two)
| d38c8ad848fa3385456c5bad9b29778cf2de25a5 | [
"Markdown",
"Python"
] | 2 | Markdown | Luis-Macias/Project_Euler_Solutions | 6b77849e0a01cfab3b3e931b6fbbf428d8e858a1 | 08919d8e73d20f16420984683262da34de7f2830 | |
refs/heads/master | <repo_name>LudwigCRON/sphinx-pywave<file_sep>/setup.py
from setuptools import setup, find_packages
project_url = 'https://github.com/LudwigCRON/sphinx-pywave'
requires = ['Sphinx>=1.8']
setup(
name='sphinxcontrib-pywaveform',
use_scm_version={
"relative_to": __file__,
"write_to": "sphinxcontrib/version.py",
},
url=project_url,
license='MIT license',
author='<NAME>',
author_email='<EMAIL>',
description='A sphinx extension that allows generating pywave diagrams based on their textual representation',
long_description=open("README.rst").read(),
zip_safe=False,
classifiers=[],
platforms='any',
packages=find_packages(exclude=['example']),
include_package_data=True,
install_requires=requires,
py_modules=["pywaveform"],
python_requires='>=3.5',
setup_requires=[
'setuptools_scm',
],
namespace_packages=['sphinxcontrib'],
keywords = ['sphinx', 'pywaveform', 'documentation'],
)
<file_sep>/README.rst
Sphinx pywave extension
=========================
A sphinx extension that allows including pywave diagrams by using its text-based representation
.. image:: https://travis-ci.org/LudwigCRON/sphinx-pywave.svg?branch=master
:target: https://travis-ci.org/LudwigCRON/sphinx-pywave
Installation
------------
The pywave extension can be installed using pip:
::
pip install sphinxcontrib-pywave
and by adding **'sphinxcontrib.pywave'** to the extensions list in your conf.py file.
Directives
----------
The extension is useable in the form of an extra pywave directive, as shown below.
::
.. pywave::
{ "signal": [
{ "name": "clk", "wave": "P......" },
{ "name": "bus", "wave": "x.==.=x", "data": ["head", "body", "tail", "data"] },
{ "name": "wire", "wave": "0.1..0." }
]}
Alternatively, it can read the json from a file:
::
.. pywave:: mywave.json
When configured to generate images (see `Configuration`_) the directive will generate an image and include
it into the input. It allows for the same configuration as the image directive:
::
.. pywave:: mywave.json
:height: 100px
:width: 200 px
:scale: 50 %
:alt: alternate text
:align: right
The image can be turned into a figure by adding a caption:
::
.. pywave:: mywave.json
:caption: My wave figure
The extension can be configured (see `Configuration`_) to not generate an image out of the diagram description
itself, but to surround it with some html and js tags in the final html document that allow the images to be rendered
by the browser. This is the currently the default for HTML output.
Examples
--------
In the `example` folder, you can find a couple of examples (taken from the pywave tutorial), illustration the use of the extension.
<file_sep>/sphinxcontrib/pywave.py
# We need this for older python versions, otherwise it will not use the wavedrom module
from __future__ import absolute_import
import os
from os import path
from uuid import uuid4
import distutils.spawn
from docutils import nodes
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives.images import Image
from sphinx.errors import SphinxError
from sphinx.ext.graphviz import figure_wrapper
from sphinx.util.fileutil import copy_asset_file
from sphinx.locale import __
from sphinx.util.docutils import SphinxDirective
from sphinx.util.i18n import search_image_for_language
import json
# This exception was not always available..
try:
from json.decoder import JSONDecodeError
except ImportError:
JSONDecodeError = ValueError
# pywave module
import pywave
class pywavenode(nodes.General, nodes.Inline, nodes.Element):
"""
Special node for pywave figures
"""
pass
class PywaveDirective(Image, SphinxDirective):
"""
Directive to insert a wavedrom image.
It derives from image, but is plain html when inline javascript is used.
"""
has_content = True
required_arguments = 0
optional_arguments = 1
final_argument_whitespace = False
option_spec = Image.option_spec.copy()
option_spec['caption'] = directives.unchanged
has_content = True
def run(self):
# Store code in a special docutils node and pick up at rendering
node = pywavenode()
node['is_reg'] = False
code_from_file, filename = False, None
# get code format or the filename
if self.arguments:
# is not a file
if not "." in self.arguments[0]:
print("-" in self.arguments[0], self.arguments)
if "-" in self.arguments[0]:
t = self.arguments[0].split("-", 2)
node['input_format'], node['is_reg'] = t[0], "reg" in t[1].lower()
else:
node['input_format'] = self.arguments[0]
else:
code_from_file = True
# Read code from file
document = self.state.document
if self.content:
return [document.reporter.warning(
__('wavedrom directive cannot have both content and '
'a filename argument'), line=self.lineno)]
argument = search_image_for_language(self.arguments[0], self.env)
rel_filename, filename = self.env.relfn2path(argument)
self.env.note_dependency(rel_filename)
if not os.path.exists(filename):
return [document.reporter.warning(
__('External wavedrom json file %r not found or reading '
'it failed') % filename, line=self.lineno)]
# store the code inlined into a file
if self.content and not code_from_file:
# store into a temporary file
filename = "/tmp/pywave-%s.%s" % (
uuid4(), node["input_format"] if node.has_key("input_format") else "jsonml")
with open(filename, "w+") as fp:
for line in self.content:
fp.write(line + '\n')
node['file'] = filename
wd_node = node # point to the actual pywave node
# A caption option turns this image into a Figure
caption = self.options.get('caption')
if caption:
node = figure_wrapper(self, wd_node, caption)
self.add_name(node)
# remove generated file
if not self.arguments:
if os.path.exists(filename):
os.remove(filename)
# Run image directive processing for the options, supply dummy argument, otherwise will fail.
# We don't actually replace this node by the image_node and will also not make it a child,
# because intermediate steps, like converters, depend on the file being in sources. We don't
# want to generate any files in the user sources. Store the image_node private to this node
# and not in the docutils tree and use it later. Revisit this when the situation changes.
self.arguments = ["dummy"]
(wd_node['image_node'],) = Image.run(self)
return [node]
def determine_format(supported):
"""
Determine the proper format to render
:param supported: list of formats that the builder supports
:return: Preferred format
"""
order = ['image/svg+xml', 'application/pdf', 'image/png']
for file_format in order:
if file_format in supported:
return file_format
return None
def render_pywave(self, node, outpath, bname, file_format):
"""
Render a pywave image
"""
# Try to convert node, raise error with code on failure
try:
if not os.path.exists(outpath):
os.makedirs(outpath)
# SVG can be directly written and is supported on all versions
if file_format == 'image/svg+xml':
fname = "{}.{}".format(bname, "svg")
fpath = os.path.join(outpath, fname)
# input_path: str, output_path: str, file_format: str, is_reg: bool = False, dpi: float = 150.0
pywave.waveform.cli_main(
node['file'], fpath, "svg", node['is_reg'])
return fname
except JSONDecodeError as e:
raise SphinxError("Cannot render the following json code: \n{} \n\nError: {}".format(node['code'], e))
# It gets a bit ugly, if the output does not support svg. We use cairosvg, because it is the easiest
# to use (no dependency on installed programs). But it only works for Python 3.
try:
import cairo
except:
raise SphinxError(__("Cannot import 'cairo'. In Python 2 wavedrom figures other than svg are "
"not supported, in Python 3 ensure 'cairo' is installed."))
if file_format == 'application/pdf':
fname = "{}.{}".format(bname, "pdf")
fpath = os.path.join(outpath, fname)
# input_path: str, output_path: str, file_format: str, is_reg: bool = False, dpi: float = 150.0
pywave.waveform.cli_main(
node['file'], fpath, "cairo-pdf", node['is_reg'])
return fname
if file_format == 'image/png':
fname = "{}.{}".format(bname, "png")
fpath = os.path.join(outpath, fname)
# input_path: str, output_path: str, file_format: str, is_reg: bool = False, dpi: float = 150.0
pywave.waveform.cli_main(
node['file'], fpath, "cairo-png", node['is_reg'])
return fname
raise SphinxError("No valid wavedrom conversion supplied")
def visit_pywave(self, node):
"""
Visit the wavedrom node
"""
file_format = determine_format(self.builder.supported_image_types)
if format is None:
raise SphinxError(__("Cannot determine a suitable output format"))
# Create random filename
bname = "pywave-{}".format(uuid4())
outpath = path.join(self.builder.outdir, self.builder.imagedir)
# Render the wavedrom image
imgname = render_pywave(self, node, outpath, bname, file_format)
# Now we unpack the image node again. The file was created at the build destination,
# and we can now use the standard visitor for the image node. We add the image node
# as a child and then raise a SkipDepature, which will trigger the builder to visit
# children.
image_node = node['image_node']
image_node['uri'] = os.path.join(self.builder.imgpath, imgname)
node.append(image_node)
raise nodes.SkipDeparture
def builder_inited(app):
"""
Sets wavedrom_html_jsinline to False for all non-html builders for
convenience (use ifconf etc.)
"""
pass
def build_finished(app, exception):
"""
When the build is finished, we copy the output image file
to the build directory (the static folder)
"""
# Skip for non-html
if app.config.pywave_html:
return
# for others move image into _static folder
#copy_asset_file(path.join(app.builder.srcdir, app.config.offline_wavedrom_js_path), path.join(app.builder.outdir, '_static'), app.builder)
def setup(app):
"""
Setup the extension
"""
app.add_config_value('pywave_html', True, 'html')
app.add_directive('pywave', PywaveDirective)
app.connect('build-finished', build_finished)
app.connect('builder-inited', builder_inited)
app.add_node(pywavenode,
html = (visit_pywave, None),
latex = (visit_pywave, None),
)
| c60cd26d72439ced0fde2e5304b1895f89f8432f | [
"Python",
"reStructuredText"
] | 3 | Python | LudwigCRON/sphinx-pywave | d5bd0d80c199d3f99dea8a1d984dd316e2654f76 | a492d6b16fb7d6fe189bf8175b82ae0dd48183e1 | |
refs/heads/master | <file_sep>import LazyRoute from "./lazyRoute";
export {
LazyRoute
}<file_sep># parcel-demo
尝试使用parcel打包react项目
* 2018-2-2 增加分页面打包功能
* 不灵活的配置:零配置的 Parcel 关闭了很多配置项,在一些需要的配置的场景下无法改变。例如:
```javascript
无法控制输出文件名的 Hash 值和名称;
无法控制构建输出目录结构;
无法映射路径以缩短导入语句;
HTTP 开发服务器不支持 HistoryApi ;
```
* 缺点
```javascript
不支持 SourceMap :在开发模式下,Parcel 也不会输出 SourceMap,目前只能去调试可读性极低的代码;
不支持剔除无效代码 ( TreeShaking ):很多时候我们只用到了库中的一个函数,结果 Parcel 把整个库都打包了进来;
一些依赖会 让Parcel 出错:当你的项目依赖了一些 Npm 上的模块时,有些 Npm 模块会让 Parcel 运行错误;
HTTP 开发服务器不支持 HistoryApi ;
```<file_sep>import React, { Component } from 'react';
import { inject, observer } from 'mobx-react';
import { Button } from "antd";
@inject('appStore')
@observer
export default class Home extends Component {
render() {
const { appStore } = this.props;
return <div>
<p>{appStore.current}</p>
<Button onClick={() => { appStore.currentPlus()}}>+1</Button>
</div>
}
}
<file_sep>import React from 'react';
import ReactDOM from 'react-dom';
import { Provider } from 'mobx-react';
import { HashRouter as Router, Route } from 'react-router-dom';
import "babel-polyfill";
import { LazyRoute } from "./components";
import AppStore from './stores/appStore';
import App from "./app";
const appStore = new AppStore();
const stores = {
appStore
}
const MOUNT_NODE = document.getElementById('app');
ReactDOM.render(
<Provider
{...stores}
>
<Router>
<App>
<Route exact path="/" render={props => (
<LazyRoute {...props} component={import("./pages/home")} />
)} />
<Route exact path="/page1" render={props => (
<LazyRoute {...props} component={import("./pages/page1")} />
)}/>
<Route exact path="/page2" render={props => (
<LazyRoute {...props} component={import("./pages/page2")} />
)} />
</App>
</Router>
</Provider>,
MOUNT_NODE
);
| 45f436f588702d5b57506681b6092881b68e3ecc | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | wangxiaofeid/parcel-demo | 7e591adb889d2c287d88f645d37696f9a0d8804f | 245c1bb38930921f0e4e362407d42833862ce0e6 | |
refs/heads/master | <file_sep>var fmt_str = new Array();
fmt_str[0] = '(FLV, 320 x 240, Mono 22KHz MP3)'; // delete ?
fmt_str[5] = '(FLV, 400 x 240, Mono 44KHz MP3)';
fmt_str[6] = '(FLV, 480 x 360, Mono 44KHz MP3)'; // delete ?
fmt_str[34] = '(FLV, 640 x 360, Stereo 44KHz AAC)';
fmt_str[35] = '(FLV, 854 x 480, Stereo 44KHz AAC)';
fmt_str[13] = '(3GP, 176 x 144, Stereo 8KHz)'; // delete ?
fmt_str[17] = '(3GP, 176 x 144, Stereo 44KHz AAC)';
fmt_str[36] = '(3GP, 320 x 240, Stereo 44KHz AAC)';
fmt_str[18] = '(MP4(H.264), 640 x 360, Stereo 44KHz AAC)';
fmt_str[22] = '(MP4(H.264), 1280 x 720, Stereo 44KHz AAC)';
fmt_str[37] = '(MP4(H.264), 1920 x 1080, Stereo 44KHz AAC)';
fmt_str[38] = '(MP4(H.264), 4096 x 3072, Stereo 44KHz AAC)';
fmt_str[83] = '(MP4(H.264), 854 x 240, Stereo 44KHz AAC)';
fmt_str[82] = '(MP4(H.264), 640 x 360, Stereo 44KHz AAC)';
fmt_str[85] = '(MP4(H.264), 1920 x 520, Stereo 44KHz AAC)';
fmt_str[84] = '(MP4(H.264), 1280 x 720, Stereo 44KHz AAC)';
fmt_str[43] = '(WebM(VP8), 640 x 360, Stereo 44KHz Vorbis)';
fmt_str[44] = '(WebM(VP8), 854 x 480, Stereo 44KHz Vorbis)';
fmt_str[45] = '(WebM(VP8), 1280 x 720, Stereo 44KHz Vorbis)';
fmt_str[100] = '(WebM(VP8), 640 x 360, Stereo 44KHz Vorbis)';
fmt_str[101] = '(WebM(VP8), 854 x 480, Stereo 44KHz Vorbis)';
fmt_str[46] = '(WebM(VP8), 1920 x 540, Stereo 44KHz Vorbis)';
fmt_str[102] = '(WebM(VP8), 1280 x 720, Stereo 44KHz Vorbis)';
fmt_str[133] = '(MP4(H.264), 426 x 240, <span style="color:#f00;">no audio</span>)';
fmt_str[134] = '(MP4(H.264), 640 x 360, <span style="color:#f00;">no audio</span>)';
fmt_str[135] = '(MP4(H.264), 854 x 480, <span style="color:#f00;">no audio</span>)';
fmt_str[136] = '(MP4(H.264), 1280 x 720, <span style="color:#f00;">no audio</span>)';
fmt_str[137] = '(MP4(H.264), 1920 x 1080, <span style="color:#f00;">no audio</span>)';
fmt_str[138] = '(MP4(H.264), 4096 x 3072, <span style="color:#f00;">no audio</span>)';
fmt_str[139] = '(M4A, 48 kbit/s <span style="color:#f00;">audio only</span>)';
fmt_str[140] = '(M4A, 128 kbit/s <span style="color:#f00;">audio only</span>)';
fmt_str[141] = '(M4A, 256 kbit/s <span style="color:#f00;">audio only</span>)';
fmt_str[160] = '(MP4(H.264), 256 x 144, <span style="color:#f00;">no audio</span>)';
fmt_str[264] = '(MP4(H.264), 1920 x 1080, <span style="color:#f00;">no audio</span>)';
var fmt_ext = new Array();
fmt_ext[0] = '.flv'; // delete ?
fmt_ext[5] = '.flv';
fmt_ext[6] = '.flv'; // delete ?
fmt_ext[34] = '.flv';
fmt_ext[35] = '.flv';
fmt_ext[13] = '.3gp'; // delete ?
fmt_ext[17] = '.3gp';
fmt_ext[36] = '.3gp';
fmt_ext[18] = '.mp4';
fmt_ext[22] = '.mp4';
fmt_ext[37] = '.mp4';
fmt_ext[38] = '.mp4';
fmt_ext[83] = '.mp4';
fmt_ext[82] = '.mp4';
fmt_ext[85] = '.mp4';
fmt_ext[84] = '.mp4';
fmt_ext[43] = '.webm';
fmt_ext[44] = '.webm';
fmt_ext[45] = '.webm';
fmt_ext[100] = '.webm';
fmt_ext[101] = '.webm';
fmt_ext[46] = '.webm';
fmt_ext[102] = '.webm';
fmt_ext[133] = '.mp4';
fmt_ext[134] = '.mp4';
fmt_ext[135] = '.mp4';
fmt_ext[136] = '.mp4';
fmt_ext[137] = '.mp4';
fmt_ext[137] = '.mp4';
fmt_ext[139] = '.m4a';
fmt_ext[140] = '.m4a';
fmt_ext[141] = '.m4a';
fmt_ext[160] = '.mp4';
fmt_ext[264] = '.mp4';
function retrieverYouTubeUrl(){
//http://kej.tw/flvretriever
var inputUrl = $('#url_input').val();
var urlArray = inputUrl.split('=');
//http://www.youtube.com/get_video_info?eurl=http%3A%2F%2Fkej.tw%2F&sts=1586&video_id=LXZoSQDj0pA
var link = 'http://www.youtube.com/get_video_info?eurl=http%3A%2F%2Fkej.tw%2F&sts=1586&video_id='+urlArray[1];
$("a#down_link").attr('href', link);
window.location.href = link;
}
/*
function getYouTubeUrl(){
var rdata = $('#videoInfo').val();
var rdataArray = rdata.split('&');
var succ = 0;
for(i = 0; i < rdataArray.length; i++){
r0 = rdataArray[i].substr(0, 11);
if(r0 == 'fmt_url_map'){
r1 = unescape(rdataArray[i].substr(12));
var temp1 = r1.split(',');
var fmt = new Array;
var fmt_url = new Array;
for(j = 0; j < temp1.length; j++){
var temp2 = temp1[j].split('|');
fmt.push(parseInt(temp2[0], 10));
fmt_url.push(temp2[1]);
}
var dllinks = '';
for(var k in fmt){
if(dllinks.length > 0){
dllinks += '<br />';
}
dllinks += '<a href="' + fmt_url[k] + '"><b>Download' + fmt_str[fmt[k]] + '</b></a>';
}
$('#result_div').remove();
var div_dl = document.createElement('div');
$(div_dl).html(dllinks).css('padding', '7 0 7px 0');
$(div_dl).attr('id', 'result_div');
$('#videoInfo').after(div_dl);
succ = 1;
}
}
if(succ == 0){
var result;
var rdata_status;
var rdata_reason;
var rdata_temp;
for(i = 0; i < rdataArray.length; i++){
rdata_temp = rdataArray[i].split('=');
if(rdata_temp[0] == 'status'){
rdata_status = rdata_temp[1];
}
if(rdata_temp[0] == 'reason'){
rdata_reason = urldecode(rdata_temp[1]);
}
}
result = '<b>無法取得影片 URL</b><br />status : <span style="color:#f00;">' + rdata_status + '</span><br />' + 'reason : <span style="color:#f00;">' + rdata_reason + '</span>';
$('#result_div').remove();
var div_dl = document.createElement('div');
$(div_dl).html(result).css('padding', '7 0 7px 0');
$(div_dl).attr('id', 'result_div');
$('#videoInfo').after(div_dl);
}
}
*/
function getYouTubeUrl(){
var rdata = $('#videoInfo').val();
var rdataArray = rdata.split('&');
var succ = 0;
var url_classic = parseUrlsClassic(rdataArray);
var url_adaptive = parseUrlsAdaptive(rdataArray);
var url_alter = parseUrlsAlter(rdataArray, url_classic, url_adaptive);
var title = parseTitle(rdataArray);
var dllinks = '';
var webmlinks = '';
var dllinksAdaptive = '';
var dllinksAlter = '';
for(var i in url_classic){
if(url_classic[i].fmt == 43 || url_classic[i].fmt == 44 || url_classic[i].fmt == 45 || url_classic[i].fmt == 46 || url_classic[i].fmt == 100 || url_classic[i].fmt == 101 || url_classic[i].fmt == 102){
if(webmlinks.length > 0){
webmlinks += '<br />';
}
webmlinks += '<a href="' + unescape(url_classic[i].fmt_url) + "&signature=" + url_classic[i].fmt_sig + "&title=" + title + '" target="_blank"><b>Watch online ' + fmt_str[url_classic[i].fmt] + '</b></a>';
}else{
if(dllinks.length > 0){
dllinks += '<br />';
}
dllinks += '<a href="' + unescape(url_classic[i].fmt_url) + "&signature=" + url_classic[i].fmt_sig + "&title=" + title + '" target="_blank"><b>Download ' + fmt_str[url_classic[i].fmt] + '</b></a>';
}
}
if(webmlinks.length > 0){
if(dllinks.length > 0){
dllinks += '<br />';
}
dllinks += webmlinks;
}
if(url_alter.length > 0){
for(var i in url_alter){
if(dllinksAlter.length > 0){
dllinksAlter += '<br />';
}
dllinksAlter += '<a href="' + unescape(url_alter[i].fmt_url) + "&title=" + escape(title) + '" target="_blank"><b>Download ' + fmt_str[url_alter[i].fmt] + '</b></a>';
}
}
if(dllinksAlter.length > 0){
if(dllinks.length > 0){
dllinks += '<br /><br /><span style="color:#f00; font-weight:bold;">sadly 1080p\'s dead again...</span><br /><del>1080p & some other formats redirect download are back online and <span style="color:#f00;font-weight:bold;">testing</span>:<br />';
}
dllinks += dllinksAlter + '</del>';
}
for(var i in url_adaptive){
if(dllinksAdaptive.length > 0){
dllinksAdaptive += '<br />';
}
//dllinksAdaptive += '<a href="' + unescape(url_adaptive[i].fmt_url) + "&signature=" + url_adaptive[i].fmt_sig + "&title=" + escape(title) + '" target="_blank"><b>Download ' + fmt_str[url_adaptive[i].fmt] + '</b></a>';
dllinksAdaptive += '<a href="' + unescape(url_adaptive[i].fmt_url) + "&title=" + escape(title) + '" target="_blank"><b>Download ' + fmt_str[url_adaptive[i].fmt] + '</b></a>';
}
if(dllinksAdaptive.length > 0){
if(dllinks.length > 0){
dllinks += '<br /><br />special files (separated audio and video):<br />';
}
dllinks += dllinksAdaptive;
}
if(dllinks.length > 0){
$('#result_div').remove();
var div_dl = document.createElement('div');
$(div_dl).html(dllinks).css('padding', '7px 0 0 0');
$(div_dl).attr('id', 'result_div');
$('#videoInfo').after(div_dl);
$('#downloadInfo').css('display', 'block');
succ = 1;
}
if(succ == 0){
var result;
var rdata_status;
var rdata_reason;
var rdata_temp;
for(i = 0; i < rdataArray.length; i++){
rdata_temp = rdataArray[i].split('=');
if(rdata_temp[0] == 'status'){
rdata_status = rdata_temp[1];
}
if(rdata_temp[0] == 'reason'){
rdata_reason = urldecode(rdata_temp[1]);
}
}
result = '<b>無法取得影片 URL</b><br />status : <span style="color:#f00;">' + rdata_status + '</span><br />' + 'reason : <span style="color:#f00;">' + rdata_reason + '</span>';
$('#result_div').remove();
var div_dl = document.createElement('div');
$(div_dl).html(result).css('padding', '7 0 7px 0');
$(div_dl).attr('id', 'result_div');
$('#videoInfo').after(div_dl);
}
}
function parseUrlsClassic(rdataArray){
for(i = 0; i < rdataArray.length; i++){
r0 = rdataArray[i].substr(0, 26);
if(r0 == 'url_encoded_fmt_stream_map'){
r1 = unescape(rdataArray[i].substr(27));
var temp1 = r1.split(',');
var fmt = new Array;
var fmt_url = new Array;
var fmt_sig = new Array;
var items = [];
for(j = 0; j < temp1.length; j++){
var temp2 = temp1[j].split('&');
var item = {};
var temp_itag = -1;
var temp_type = '';
for(jj = 0; jj < temp2.length; jj++){
if(temp2[jj].substr(0, 5) == 'itag='){
temp_itag = parseInt(temp2[jj].substr(5), 10);
item.fmt = temp_itag;
}else if(temp2[jj].substr(0, 4) == 'url='){
item.fmt_url = temp2[jj].substr(4);
}else if(temp2[jj].substr(0, 4) == 'sig='){
item.fmt_sig = temp2[jj].substr(4);
}else if(temp2[jj].substr(0, 2) == 's='){
item.fmt_sig = SigHandlerAlternative(temp2[jj].substr(2));
}else if(temp2[jj].substr(0, 5) == 'type='){
temp_type = '(' + unescape(temp2[jj].substr(5)) + ')';
}
}
if(fmt_str[temp_itag] == 'undefined'){
fmt_str[temp_itag] = temp_type;
}
items.push(item);
}
return items;
}
}
}
function parseUrlsAdaptive(rdataArray){
for(i = 0; i < rdataArray.length; i++){
r0 = rdataArray[i].substr(0, 13);
if(r0 == 'adaptive_fmts'){
r1 = unescape(rdataArray[i].substr(14));
var temp1 = r1.split(',');
var fmt = new Array;
var fmt_url = new Array;
var fmt_sig = new Array;
var items = [];
for(j = 0; j < temp1.length; j++){
var temp2 = temp1[j].split('&');
var item = {};
var temp_itag = -1;
var temp_type = '';
for(jj = 0; jj < temp2.length; jj++){
if(temp2[jj].substr(0, 5) == 'itag='){
temp_itag = parseInt(temp2[jj].substr(5), 10);
item.fmt = temp_itag;
}else if(temp2[jj].substr(0, 4) == 'url='){
item.fmt_url = temp2[jj].substr(4);
}else if(temp2[jj].substr(0, 4) == 'sig='){
item.fmt_sig = temp2[jj].substr(4);
}else if(temp2[jj].substr(0, 2) == 's='){
item.fmt_sig = SigHandlerAlternative(temp2[jj].substr(2));
}else if(temp2[jj].substr(0, 5) == 'type='){
temp_type = '(' + unescape(temp2[jj].substr(5)) + ')';
}
}
if(fmt_str[temp_itag] == 'undefined'){
fmt_str[temp_itag] = temp_type;
}
items.push(item);
}
return items;
}
}
}
function parseUrlsAlter(rdataArray, url_classic, url_adaptive){
for(i = 0; i < rdataArray.length; i++){
r0 = rdataArray[i].substr(0, 7);
if(r0 == 'dashmpd'){
r1 = unescape(rdataArray[i].substr(8)).replace('http://www.youtube.com/api/manifest/dash/', '');
var temp1 = r1.split('/');
for(var j = 0; j < temp1.length; j ++){
if(temp1[j] == 'sig'){
temp1[j] = 'signature';
}
if(temp1[j] == 's'){
temp1[j] = 'signature';
temp1[j+1] = SigHandlerAlternative(temp1[j+1]);
}
}
var qstemp = [];
for(var j = 0; j < temp1.length; j += 2){
qstemp.push(temp1[j] + '=' + temp1[j+1]);
}
var qs = qstemp.join('&');
if (qs.toLowerCase().indexOf('ratebypass') == -1) {
qs = qs + '&ratebypass=yes';
}
var base_url = '';
for(var j in url_classic){
var tempurl = unescape(url_classic[j].fmt_url).split('?');
if(tempurl[0] !== '' && tempurl[0] !== undefined && tempurl[0].length > 0){
base_url = tempurl[0];
break;
}
}
var fmt_classic = [];
for(var j in url_classic){
fmt_classic[url_classic[j].fmt] = true;
}
var fmt_adaptive = [];
for(var j in url_adaptive){
fmt_adaptive[url_adaptive[j].fmt] = true;
}
var items = [];
var item35 = {};
var item37 = {};
var item38 = {};
if(fmt_adaptive[135] && fmt_classic[35] == undefined){
item35.fmt = 35;
item35.fmt_url = base_url + '?' + qs + '&itag=35';
items.push(item35);
}
if((fmt_adaptive[137] || fmt_adaptive[264]) && fmt_classic[37] == undefined){
item37.fmt = 37;
item37.fmt_url = base_url + '?' + qs + '&itag=37';
items.push(item37);
}
if(fmt_adaptive[138] && fmt_classic[38] == undefined){
item38.fmt = 38;
item38.fmt_url = base_url + '?' + qs + '&itag=38';
items.push(item38);
}
return items;
}
}
return [];
}
function parseTitle(rdataArray){
for(i = 0; i < rdataArray.length; i++){
r0 = rdataArray[i].substr(0, 5);
if(r0 == 'title'){
//return urldecode(rdataArray[i].substr(6));
return rdataArray[i].substr(6).replace(/%22/g, '');
}
}
}
function urldecode(str){
// Decodes URL-encoded string
//
// version: 1004.2314
// discuss at: http://phpjs.org/functions/urldecode // + original by: <NAME>
// + improved by: <NAME> (http://kevin.vanzonneveld.net)
// + input by: AJ
// + improved by: <NAME> (http://kevin.vanzonneveld.net)
// + improved by: <NAME> (http://brett-zamir.me) // + input by: travc
// + input by: <NAME> (http://brett-zamir.me)
// + bugfixed by: <NAME> (http://kevin.vanzonneveld.net)
// + improved by: <NAME>
// + input by: Ratheous // + improved by: Orlando
// + reimplemented by: <NAME> (http://brett-zamir.me)
// + bugfixed by: Rob
// % note 1: info on what encoding functions to use from: http://xkr.us/articles/javascript/encode-compare/
// % note 2: Please be aware that this function expects to decode from UTF-8 encoded strings, as found on // % note 2: pages served as UTF-8
// * example 1: urldecode('Kevin+van+Zonneveld%21');
// * returns 1: '<NAME>!'
// * example 2: urldecode('http%3A%2F%2Fkevin.vanzonneveld.net%2F');
// * returns 2: 'http://kevin.vanzonneveld.net/'
// * example 3: urldecode('http%3A%2F%2Fwww.google.nl%2Fsearch%3Fq%3Dphp.js%26ie%3Dutf-8%26oe%3Dutf-8%26aq%3Dt%26rls%3Dcom.ubuntu%3Aen-US%3Aunofficial%26client%3Dfirefox-a');
// * returns 3: 'http://www.google.nl/search?q=php.js&ie=utf-8&oe=utf-8&aq=t&rls=com.ubuntu:en-US:unofficial&client=firefox-a'
return decodeURIComponent(str.replace(/\+/g, '%20'));
}
function SigHandlerAlternative(s){
var sArray = s.split("");
var tmpA, tmpB;
tmpA = sArray[0];
tmpB = sArray[52];
sArray[0] = tmpB;
sArray[52] = tmpA;
tmpA = sArray[83];
tmpB = sArray[62];
sArray[83] = tmpB;
sArray[62] = tmpA;
sArray = sArray.slice(3);
sArray = sArray.reverse();
sArray = sArray.slice(3);
return sArray.join("");
} | 423e490c0c7aaa9bde7910dc0da5d04384f7e99b | [
"JavaScript"
] | 1 | JavaScript | peterwillcn/flvretriever | 59073980ab2b8a95cd098a63ed4ffcd7ad07b824 | f957e618fb1c01174524d12396865302e9adbd9e | |
refs/heads/master | <file_sep>"""This file contains code for use with "Think Bayes",
by <NAME>, available from greenteapress.com
Copyright 2012 <NAME>
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
"""This file contains a partial solution to a problem from
MacKay, "Information Theory, Inference, and Learning Algorithms."
Exercise 3.15 (page 50): A statistical statement appeared in
"The Guardian" on Friday January 4, 2002:
When spun on edge 250 times, a Belgian one-euro coin came
up heads 140 times and tails 110. 'It looks very suspicious
to me,' said <NAME>, a statistics lecturer at the London
School of Economics. 'If the coin were unbiased, the chance of
getting a result as extreme as that would be less than 7%.'
MacKay asks, "But do these data give evidence that the coin is biased
rather than fair?"
"""
# standard library
import sys
# 3rd party
import numpy as np
# local
import thinkbayes
import thinkplot
# how discrete is the set of hypotheses
N_BINS = 10001
class PingPong(thinkbayes.Suite):
"""Represents hypotheses about the probability of Mike beating Dean at
ping pong.
"""
def Likelihood(self, data, hypo):
"""Computes the likelihood of the data under the hypothesis.
hypo: float value of x, the probability of mike beating dean
data: string 'M' or 'D'
"""
x = hypo
if data == 'M':
return x
else:
return 1-x
class Pool(thinkbayes.Suite):
"""Represents hypotheses about the probability of Dean beating Mike at
pool.
"""
def Likelihood(self, data, hypo):
"""Computes the likelihood of the data under the hypothesis.
hypo: float value of x, the probability of dean beating mike
data: string 'H' or 'T'
"""
x = hypo
if data == 'D':
return x
else:
return 1-x
def main(mike_ping_pong_wins, mike_ping_pong_losses,
dean_pool_wins, dean_pool_losses):
ping_pong = PingPong(np.linspace(0, 1, N_BINS))
ping_pong_data = 'M' * mike_ping_pong_wins + 'D' * mike_ping_pong_losses
ping_pong.UpdateSet(ping_pong_data)
ping_pong_cdf = ping_pong.MakeCdf(name='ping_pong')
print 'MAP estimate of Mike beating Dean in ping pong: %.2f' % \
ping_pong.Mean()
pool = Pool(np.linspace(0, 1, N_BINS))
pool_wins, pool_losses = 3, 1
pool_data = 'D' * dean_pool_wins + 'M' * dean_pool_losses
pool.UpdateSet(pool_data)
pool_cdf = pool.MakeCdf(name='pool')
print 'MAP estimate of Dean beating Mike in pool: %.2f' % pool.Mean()
mike_is_more_better = 0
n_tries = 100000
mike_values = ping_pong_cdf.Sample(n_tries)
dean_values = pool_cdf.Sample(n_tries)
for mike_value, dean_value in zip(mike_values, dean_values):
# if mike_value == dean_value, give the tie to dean --- he
# needs it.
if mike_value > dean_value:
mike_is_more_better += 1
p_mike_is_more_better = mike_is_more_better / float(n_tries)
print 'Probability that Mike is more better: %.2f' % p_mike_is_more_better
thinkplot.Pmf(ping_pong, label='Mike beating Dean at Ping Pong')
thinkplot.Pmf(pool, label='Dean beating Mike at Pool')
thinkplot.Show()
def parseargs():
pingpong_record = sys.argv[1]
pool_record = sys.argv[2]
pingpong_mike, pingpong_dean = [int(i) for i in pingpong_record.split('-')]
pool_dean, pool_mike = [int(i) for i in pool_record.split('-')]
return pingpong_mike, pingpong_dean, pool_dean, pool_mike
if __name__ == '__main__':
main(*parseargs())
<file_sep>import random
import morebetter
def main(pingpong_mike, pingpong_dean, pool_dean, pool_mike):
# trick: pseudocount of 1 is a uniform beta prior
pseudocount = 1.0
total = 0
n_tries = 100000
for i in range(n_tries):
pingpong_bias = random.betavariate(pingpong_mike + pseudocount,
pingpong_dean + pseudocount)
pool_bias = random.betavariate(pool_dean + pseudocount,
pool_mike + pseudocount)
if pingpong_bias > pool_bias:
total += 1
p = float(total) / n_tries
print 'Probability that Mike is more better: %.2f' % p
if __name__ == '__main__':
main(*morebetter.parseargs())
| e663ed89347c81c7122adcfdb00afe63d3a5b052 | [
"Python"
] | 2 | Python | stringertheory/ThinkBayes | b7193dc76bd87c2766192bb663ddec0cc31329c5 | f870fc5d0e13ade9d78347122fa7f41098e7ba04 | |
refs/heads/master | <repo_name>PrayagPanta/Grade-Automation-System<file_sep>/blog/templates/blog/complaint_teacher.html
{% extends "blog/base.html" %}
{% block content %}
<h2 class="Info2 content-section">You have received following Complaints</h2>
<br>
<p class="Info content-section" style="height: 400px; overflow-y: scroll;">
No Complaints at this moment
</p>
{% endblock content %}
<file_sep>/course/views.py
from django.shortcuts import render,redirect
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.contrib.auth.models import User
from django.views.generic import ListView,UpdateView
from .models import Marks,Subject,Teaches
from users.models import StudentInfo,TeacherInfo
from django.contrib.auth.decorators import login_required
from .forms import NameForm,AddForm,NameForm2,UpdateForm
import os
#from mlmodel import analyze
# Create your views here.
files = []
@login_required
def newRecord(request):
if request.method == 'POST':
form = NameForm(request.POST)
if form.is_valid():
SID = form.cleaned_data.get('SID')
try:
users = User.objects.get(id=SID)
if( users.profile.role == 'T'):
return render(request,'course/courseDoesNotExist.html')
except:
return render(request,'course/courseDoesNotExist.html')
return redirect(addMarks,SID)
else:
return render(request,'course/Error.html')
else:
if( request.user.profile.role == 'T'):
form = NameForm()
return render(request, 'course/newRecord.html', {'form': form})
else:
return render(request,'course/Error.html')
@login_required
def searchRecord(request):
if request.method == 'POST':
form = NameForm2(request.POST)
if form.is_valid():
sid = form.cleaned_data.get('SID')
qno = form.cleaned_data.get('Qno')
try:
users = User.objects.get(id=sid)
except User.DoesNotExist:
return render(request,'course/courseDoesNotExist.html')
if( users.profile.role == 'T'):
return render(request,'course/Error.html')
return redirect(UpdateMarks,sid,qno,users)
else:
if( request.user.profile.role == 'T'):
form = NameForm2()
return render(request, 'course/searchRecord.html', {'form': form})
else:
return render(request,'course/Error.html')
@login_required
def addMarks(request,SID):
if request.method == 'POST':
form = AddForm(request.POST)
if form.is_valid():
obj = form.save(commit=False)
obj.Name = StudentInfo.objects.get(Sid=SID)
obj.Checker = TeacherInfo.objects.get(Tid=request.user.id)
obj.Subject = Teaches.objects.get(Name=obj.Checker).Subject
obj.Question_No = form.cleaned_data.get('Qno')
obj.save()
# if else condition to check if there are unmarked questions present
return render(request,'course/done.html')
else:
return render(request,'course/Error.html')
elif request.method == 'GET':
form = AddForm()
Subject = Teaches.objects.get(Name=TeacherInfo.objects.get(Tid=request.user.id) ).Subject
id = SID
# if files is empty put each and every content of the directory in the file
# else
url = 'media/answersheet/'+str(id)+'/'+str(Subject)
files = os.listdir(url)
imgurl="/media/answersheet/"+str(id)+"/"+str(Subject)+"/1.jpg"
# summary , marks, totalmarks = analyze(imgurl)
return render(request, 'course/addMarks.html', {'form': form,'imgurl':imgurl,'files':files})
else:
pass
@login_required
def UpdateMarks(request,sid,qno,users):
if request.method == 'GET':
form = UpdateForm()
marks = Marks.objects.filter(Question_No=qno,Subject=Teaches.objects.get(Name=TeacherInfo.objects.get(Tid=request.user.id) ).Subject,Name=sid).first()
return render(request,'course/update.html',{'form': form ,'marks':marks.Marks})
else:
form = UpdateForm(request.POST)
if form.is_valid():
NewMarks = form.cleaned_data.get('NewMarks')
Marks.objects.filter(Question_No=qno,Subject=Teaches.objects.get(Name=TeacherInfo.objects.get(Tid=request.user.id) ).Subject,Name=sid).update(Marks=NewMarks)
return render(request,'course/done2.html',{'NewMarks': NewMarks})
else:
return render(request,'course/Error.html')
#class PostCreateView(LoginRequiredMixin, CreateView):
# model = Post
# fields = ['title', 'content']
# def form_valid(self, form):
# form.instance.author = self.request.user
# return super().form_valid(form)
<file_sep>/course/migrations/0004_auto_20200229_1754.py
# Generated by Django 3.0.1 on 2020-02-29 12:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('course', '0003_auto_20200229_1736'),
]
operations = [
migrations.RenameField(
model_name='teaches',
old_name='Subect',
new_name='Subject',
),
]
<file_sep>/README.md
# Grade-Automation-System
To run project :
1) python manage.py runserver
2) Delete sqlite database
3) python manage.py makemigrations
4) python manage.py migrate
5) python manage.py createsuperuser ( also the teacher)
6) login as superuser to add students,subjects and other relevant details
<file_sep>/users/migrations/0002_auto_20200211_1915.py
# Generated by Django 3.0.1 on 2020-02-11 13:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('users', '0001_initial'),
('course', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='complaint',
name='Subject',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='course.Subject'),
),
migrations.AddField(
model_name='complaint',
name='Tid',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='users.TeacherInfo'),
),
]
<file_sep>/blog/forms.py
from django import forms
class ComplaintForm(forms.Form):
Subject = forms.CharField(max_length=100)
Complaint = forms.CharField(label="Complaint", max_length=100)
class Meta:
fields = ['Subject','Complaint']
<file_sep>/users/models.py
from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default='default.jpg', upload_to='profile_pics')
roles = [
('S', 'Student'),
('T', 'Teacher'),
]
role = models.CharField(
max_length=1,
choices=roles,
default='S'
)
def __str__(self):
return self.user.username
def __unicode__(self):
return self.user.username
class StudentInfo(models.Model):
Sid = models.OneToOneField(Profile, primary_key=True,on_delete=models.CASCADE)
Name = models.CharField(max_length=30)
School = models.CharField(max_length=80)
Exam_Center = models.CharField(max_length=80)
def __str__(self):
return self.Name
class TeacherInfo(models.Model):
Tid = models.OneToOneField(Profile, primary_key=True,on_delete=models.CASCADE)
Name = models.CharField(max_length=30)
School = models.CharField(max_length=80)
def __str__(self):
return self.Name
class Complaint(models.Model):
Sid = models.OneToOneField(StudentInfo, primary_key=True,on_delete=models.CASCADE)
Tid = models.OneToOneField(TeacherInfo,on_delete=models.CASCADE)
Subject = models.OneToOneField('course.Subject',on_delete=models.CASCADE)
Complaint = models.CharField(max_length=500)
<file_sep>/course/admin.py
from django.contrib import admin
from .models import Subject,Enrolled,Teaches,Marks
# Register your models here.
admin.site.register(Subject)
admin.site.register(Enrolled)
admin.site.register(Teaches)
admin.site.register(Marks)
<file_sep>/course/templates/course/addMarks.html
{% extends "blog/base2.html" %}
{% load static %}
{% load crispy_forms_tags %}
{% block extra_head %}
<link rel="stylesheet" type="text/css" href="{% static 'course/style.css' %}">
{% endblock %}
{% block side %}
<div class="cont">
<div class="point"><h3>Estimated Marks <br> 19/20 </h3> </div>
<div class="keyword"><h4>Answer Keywords</h4> <p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p> </div>
</div>
<form method="POST">
{% csrf_token %}
<fieldset class="form-group conta">
<center><legend class="keyword"><h4>Enter the marks for student</h4></legend></center>
{{ form|crispy }}
</fieldset>
<div class="form-group">
<button class="btn" type="submit">Next question</button>
</div>
</form>
{% endblock side %}
{% block content %}
<div class="content-section">
<form method="POST">
{% csrf_token %}
<fieldset class="form-group conta">
<h3 class="border-bottom mb-4" > Answer 1</h3>
<div class="summary"><h4>Summary of the Answer</h4><p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.{{ files }}</p></div>
<img src="{{ imgurl }}">
</fieldset>
</form>
</div>
{% endblock content %}
<file_sep>/course/models.py
from django.db import models
class Subject(models.Model):
Name = models.CharField(max_length=100)
Question_Paper = models.ImageField(default='default.jpg', upload_to='profile_pics')
Number_Of_Questions = models.IntegerField(default=0)
def __str__(self):
return self.Name
class Enrolled(models.Model):
Name = models.OneToOneField('users.StudentInfo',primary_key = True,on_delete=models.CASCADE)
Subect =models.OneToOneField(Subject,on_delete=models.CASCADE)
def __str__(self):
return self.Name.Name
class Teaches(models.Model):
Name = models.OneToOneField('users.TeacherInfo',primary_key = True,on_delete=models.CASCADE)
Subject =models.OneToOneField(Subject,on_delete=models.CASCADE)
def __str__(self):
return self.Name.Name
class Marks(models.Model):
Name = models.ForeignKey('users.StudentInfo',on_delete=models.CASCADE)
Checker = models.ForeignKey('users.TeacherInfo',on_delete=models.CASCADE)
Subject = models.ForeignKey(Subject,on_delete=models.CASCADE)
Question_No = models.CharField(max_length=4)
Marks = models.IntegerField()
def __str__(self):
return self.Name.Name
<file_sep>/course/urls.py
from django.urls import path
from . import views
urlpatterns = [
path('newRecord/', views.newRecord, name='new-Record'),
path('searchRecord/', views.searchRecord, name='search-Record'),
path('addMarks/<int:SID>/',views.addMarks,name='add-Marks'),
path('searchRecord/UpdateMarks/<str:sid>/<str:qno>/<str:users>/',views.UpdateMarks,name='update-Marks'),
]
<file_sep>/users/admin.py
from django.contrib import admin
from .models import Profile,StudentInfo,TeacherInfo,Complaint
from django.contrib.auth.models import User
admin.site.register(Profile)
admin.site.register(StudentInfo)
admin.site.register(TeacherInfo)
admin.site.register(Complaint)
#admin.site.register(CorrectsAnsof)
<file_sep>/users/forms.py
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class UserRegisterForm(UserCreationForm):
email = forms.EmailField()
role = forms.ChoiceField( choices=(('S','S') , ('T','T') ) )
class Meta:
model = User
fields = ['username','email','role','<PASSWORD>1', '<PASSWORD>']
class PasswordForm(forms.Form):
OldPassword = forms.CharField(label="Old Password", max_length=100)
NewPassword = forms.CharField(label="New Password",max_length=100)
NewPassword2 = forms.CharField(label="Re-type New Password",max_length=100)
class Meta:
fields = ['OldPassword','NewPassword','NewPassword2']
<file_sep>/blog/views.py
from django.shortcuts import render
from django.views.generic import ListView,DetailView
from django.contrib.auth.mixins import LoginRequiredMixin
from .models import Post
from course.models import Enrolled
from django.contrib.auth.decorators import login_required
from .forms import ComplaintForm
@login_required
def home(request):
if( request.user.profile.role == 'S'):
return render(request, 'blog/student.html',{'title':'Home Page'})
else:
return render(request, 'blog/teacher.html',{'title':'Home Page'})
@login_required
def about(request):
return render(request, 'blog/about.html', {'title': 'About'})
@login_required
def comp(request):
return render(request,'blog/complaint_teacher.html')
@login_required
def compS(request):
if request.method == 'GET':
form = ComplaintForm()
return render(request,'blog/complaint_student.html',{'form': form})
else:
return render(request,'blog/Noresults.html')
#class ViewEnrollment(ListView,LoginRequiredMixin):
#model = Enrolled
<file_sep>/course/migrations/0003_auto_20200229_1736.py
# Generated by Django 3.0.1 on 2020-02-29 11:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20200211_1915'),
('course', '0002_subject_number_of_questions'),
]
operations = [
migrations.AddField(
model_name='marks',
name='id',
field=models.AutoField(auto_created=True, default=1, primary_key=True, serialize=False, verbose_name='ID'),
preserve_default=False,
),
migrations.AlterField(
model_name='marks',
name='Name',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.StudentInfo'),
),
]
<file_sep>/users/migrations/0001_initial.py
# Generated by Django 3.0.1 on 2020-02-11 13:30
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(default='default.jpg', upload_to='profile_pics')),
('role', models.CharField(choices=[('S', 'Student'), ('T', 'Teacher')], default='S', max_length=1)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='StudentInfo',
fields=[
('Sid', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='users.Profile')),
('Name', models.CharField(max_length=30)),
('School', models.CharField(max_length=80)),
('Exam_Center', models.CharField(max_length=80)),
],
),
migrations.CreateModel(
name='TeacherInfo',
fields=[
('Tid', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='users.Profile')),
('Name', models.CharField(max_length=30)),
('School', models.CharField(max_length=80)),
],
),
migrations.CreateModel(
name='Complaint',
fields=[
('Sid', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='users.StudentInfo')),
('Complaint', models.CharField(max_length=500)),
],
),
]
<file_sep>/course/templates/course/update.html
{% extends "blog/base2.html" %}
{% load static %}
{% load crispy_forms_tags %}
{% block extra_head %}
<link rel="stylesheet" type="text/css" href="{% static 'course/style.css' %}">
{% endblock %}
{% block side %}
<div class="cont">
<div class="point"><h3>Previous Marks <br> {{ marks }} /20 </h3> </div>
<div class="keyword"><h4>Answer Keywords</h4> <p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p> </div>
</div>
<form method="POST">
{% csrf_token %}
<fieldset class="form-group conta">
<center><legend class="keyword"><h4>Enter the marks for student</h4></legend></center>
{{ form|crispy }}
</fieldset>
<div class="form-group">
<button class="btn" type="submit">Update</button><!--https://stackoverflow.com/questions/25345392/how-to-add-url-parameters-to-django-template-url-tag-->
<a class="btn" href="{% url 'search-Record' %}">Retain Previous</a>
</div>
</form>
{% endblock side %}
{% block content %}
<div class="content-section">
<form method="POST">
{% csrf_token %}
<fieldset class="form-group conta">
<h3 class="border-bottom mb-4" > Answer 1</h3>
<div class="summary"><h4>Summary of the Answer</h4><p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p></div>
<img src="/media/answersheet/answer.jpg">
</fieldset>
</form>
</div>
{% endblock content %}
<file_sep>/course/forms.py
from django import forms
from .models import Marks
class NameForm(forms.Form):
SID = forms.CharField(label="Student's ID", max_length=100)
class NameForm2(forms.Form):
SID = forms.CharField(label="Student's ID", max_length=100)
Qno = forms.CharField(label="Question Number",max_length=4)
class UpdateForm(forms.Form):
NewMarks = forms.IntegerField(label="Enter the Updated Mark")
class AddForm(forms.ModelForm):
Qno = forms.CharField(label="Enter the question number for the answer",max_length=4)
Marks = forms.IntegerField(label="Enter the Marks")
class Meta:
model = Marks
fields = ['Qno', 'Marks']
<file_sep>/blog/urls.py
from django.urls import path
from . import views
urlpatterns = [
path('', views.home, name='blog-home'),
path('about/', views.about, name='blog-about'),
path('complaints/',views.comp,name='comp'),
path('compS/',views.compS,name='compS')
]
<file_sep>/course/migrations/0001_initial.py
# Generated by Django 3.0.1 on 2020-02-11 13:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Subject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('Question_Paper', models.ImageField(default='default.jpg', upload_to='profile_pics')),
],
),
migrations.CreateModel(
name='Teaches',
fields=[
('Name', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='users.TeacherInfo')),
('Subect', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='course.Subject')),
],
),
migrations.CreateModel(
name='Marks',
fields=[
('Name', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='users.StudentInfo')),
('Question_No', models.CharField(max_length=4)),
('Marks', models.IntegerField()),
('Checker', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.TeacherInfo')),
('Subject', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='course.Subject')),
],
),
migrations.CreateModel(
name='Enrolled',
fields=[
('Name', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='users.StudentInfo')),
('Subect', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='course.Subject')),
],
),
]
| c96459d0a77ae8476e43d2533e1aba92d430de80 | [
"Markdown",
"Python",
"HTML"
] | 20 | HTML | PrayagPanta/Grade-Automation-System | 4ae54666c55ff3bf3ed5b70c8c6c703e3769ae35 | cc38592e50593ec641f2e8a44ffae536ae018ec6 | |
refs/heads/master | <repo_name>puyangsky/DockerHubCrawler<file_sep>/dockerhub/items.py
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class OfficialImageItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
name = scrapy.Field()
link = scrapy.Field()
linkmd5 = scrapy.Field()
class ClassifiedImageItem(scrapy.Item):
repo_name = scrapy.Field()
star_count = scrapy.Field()
pull_count = scrapy.Field()
short_description = scrapy.Field()
is_automated = scrapy.Field()
is_official = scrapy.Field()
link = scrapy.Field()
category = scrapy.Field()
<file_sep>/README.md
# DockerHubCrawler
Crawler of DockerHub using scrapy.
<file_sep>/dockerhub/pipelines.py
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy import signals
import json
import codecs
from twisted.enterprise import adbapi
from datetime import datetime
from hashlib import md5
import MySQLdb
import MySQLdb.cursors
import logging
logger = logging.getLogger()
handler = logging.FileHandler("/home/pyt/log/scrapy.log")
handler.setLevel(logging.WARN)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
class BasePipeline(object):
def __init__(self, dbpool):
self.dbpool = dbpool
@classmethod
def from_settings(cls, settings):
dbargs = dict(
host=settings['MYSQL_HOST'],
db=settings['MYSQL_DBNAME'],
user=settings['MYSQL_USER'],
passwd=settings['<PASSWORD>'],
charset='utf8',
cursorclass=MySQLdb.cursors.DictCursor,
use_unicode=True,
)
dbpool = adbapi.ConnectionPool('MySQLdb', **dbargs)
return cls(dbpool)
# 获取url的md5编码
def _get_linkmd5id(self, item):
# url进行md5处理,为避免重复采集设计
return md5(item['link']).hexdigest()
# 异常处理
def _handle_error(self, failue, item, spider):
logger.err("err %s" % failue)
class OfficialImagePipeline(BasePipeline):
# pipeline默认调用
def process_item(self, item, spider):
d = self.dbpool.runInteraction(self._do_upinsert, item, spider)
d.addErrback(self._handle_error, item, spider)
d.addBoth(lambda _: item)
return d
# 将每行更新或写入数据库中
def _do_upinsert(self, conn, item, spider):
linkmd5id = self._get_linkmd5id(item)
conn.execute("select 1 from officialimage WHERE urlmd5 = %s", linkmd5id)
ret = conn.fetchone()
if ret:
conn.execute("update officialimage set name = %s, url = %s, urlmd5 = %s",
(item['name'], item['link'], linkmd5id))
else:
conn.execute("insert into officialimage(name, url, urlmd5) values (%s, %s, %s)",
(item['name'], item['link'], linkmd5id))
print "insert into officialimage(name, url, urlmd5) values (%s, %s, %s)", \
(item['name'], item['link'], linkmd5id)
class ClassifiedImagePipeline(object):
def __init__(self, dbpool):
self.dbpool = dbpool
@classmethod
def from_settings(cls, settings):
dbargs = dict(
host=settings['MYSQL_HOST'],
db=settings['MYSQL_DBNAME'],
user=settings['MYSQL_USER'],
passwd=settings['<PASSWORD>'],
charset='utf8',
cursorclass=MySQLdb.cursors.DictCursor,
use_unicode=True,
)
dbpool = adbapi.ConnectionPool('MySQLdb', **dbargs)
return cls(dbpool)
def process_item(self, item, spider):
d = self.dbpool.runInteraction(self._do_upinsert, item, spider)
d.addErrback(self._handle_error, item, spider)
d.addBoth(lambda _: item)
return d
def _do_upinsert(self, conn, item, spider):
linkmd5id = self._get_linkmd5id(item)
print linkmd5id
conn.execute("select 1 from classifiedimage WHERE linkmd5 = %r", linkmd5id)
ret = conn.fetchone()
logger.warn("select 1 from classifiedimage WHERE linkmd5 = %r and ret is: %r" % (linkmd5id, ret))
if not ret:
# conn.execute("""
# update classifiedimage set name = %s, star = %s, pull = %s, description = %s,
# automated = %s, official = %s, link = %s, linkmd5 = %s, category = %s
# """, (item['repo_name'], item['star_count'], item['pull_count'], item['short_description'],
# item['is_automated'], item['is_official'], item['link'], linkmd5id, item['category']))
# else:
logger.warn("inserting one entry")
try:
conn.execute("""
insert into classifiedimage(name, star, pull, description, automated,
official, link, linkmd5, category) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
""", (item['repo_name'], item['star_count'], item['pull_count'], item['short_description'],
item['is_automated'], item['is_official'], item['link'], linkmd5id, item['category']))
logger.warn("insert successfully")
except Exception, e:
logger.error("insert into mysql error: %r", e)
# 获取url的md5编码
def _get_linkmd5id(self, item):
# url进行md5处理,为避免重复采集设计
return md5(item['link']).hexdigest()
# 异常处理
def _handle_error(self, failue, item, spider):
logger.error(failue)
<file_sep>/dockerhub/spiders/dockerhub_spider.py
import scrapy
from dockerhub.items import *
import json
from scrapy.http import Request
import time
class DockerhubSpider(scrapy.Spider):
name = "dockerhub"
allowed_domains = ["hub.docker.com"]
base_url = "https://hub.docker.com/explore/?page="
# base_query_url = "https://hub.docker.com/v2/search/repositories/?page=1&query=nginx&page_size=100"
# https: // docker - index - static.s3.amazonaws.com / sitemap / sitemap.xml
start_urls = []
for i in range(1, 10):
start_url = base_url + str(i)
start_urls.append(start_url)
def parse(self, response):
items = []
for sel in response.xpath('//ul/li'):
item = OfficialImageItem()
link = sel.xpath('a/@href').re("/_/.*")
if link:
# f.write("%s\n" % (link[0].split("/")[-2]))
item['name'] = link[0].split("/")[-2]
item['link'] = "https://hub.docker.com" + link[0]
with open("/home/pyt/dockerhub/images", "a+") as f:
f.write("%s\n" % item['name'])
# print item
items.append(item)
# return items
class SingleImageSpider(scrapy.Spider):
name = "singleimage"
allowed_domains = ["hub.docker.com"]
# base_url = "https://hub.docker.com/explore/?page="
base_query_url = "https://hub.docker.com/v2/search/repositories/?page=1&page_size=100&query="
download_delay = 1
start_urls = []
for line in open("/home/pyt/dockerhub/images", "r"):
start_url = base_query_url + line.strip("\n")
start_urls.append(start_url)
def parse(self, response):
url = response.url
# items = []
json_str = response.body
json_ob = json.loads(json_str)
count = json_ob['count']
print count
results = json_ob['results']
for result in results:
# time.sleep(1)
item = ClassifiedImageItem()
item['star_count'] = result['star_count']
item['pull_count'] = result['pull_count']
item['short_description'] = result['short_description'].encode('utf-8')
item['is_automated'] = result['is_automated']
item['is_official'] = result['is_official']
item['repo_name'] = result['repo_name'].encode('utf-8')
if item['is_official']:
item['link'] = "https://hub.docker.com/_/" + item['repo_name'].encode('utf-8')
else:
item['link'] = "https://hub.docker.com/r/" + item['repo_name'].encode('utf-8')
item['category'] = url.split("query=")[1].split("&")[0].replace('%2', '/').encode('utf-8')
yield item
next_page_link = json_ob['next']
# print next_page_link
if next_page_link:
yield Request(next_page_link, callback=self.parse)
def parse_item(self, item):
return item
| 0f832f1d7cd62ffd739bb09e2636cb4728284665 | [
"Markdown",
"Python"
] | 4 | Python | puyangsky/DockerHubCrawler | 898e0f5566c207c5047361b17bd6bbf2c71308c6 | c21d188d658eedfc5ce21b015aeb8c7b86a0bb12 | |
refs/heads/master | <repo_name>gsavickas/ReactApp<file_sep>/my-app/src/components/Todos.js
import React, { Component } from 'react';
import TodoItem from './TodoItem';
<<<<<<< HEAD
import PropTyes from 'prop-types';
=======
import PropTypes from 'prop-types';
>>>>>>> 4e468b374c322b0f8038fe54d37038832de4ecd8
class Todos extends Component {
render(){
return this.props.todos.map((todos) => (
<TodoItem key={todos.id} todos={todos}/>
));
}
}
<<<<<<< HEAD
// PropTypes
Todos.propTypes = {
todos: PropTyes.array.isRequired
}
=======
Todos.propsTypes = {
todos: PropTypes.array.isRequired
}
>>>>>>> 4e468b374c322b0f8038fe54d37038832de4ecd8
export default Todos;
<file_sep>/README.md
This is my first react app. React is a Java Script library build by facebook It allows developers the flexability to build responsive single page web applications.
| ff6ba1eaa93c9ec694412cf00eade3cde199ca41 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | gsavickas/ReactApp | 7a8cba48c5441e10172e658f0f54e5100fd48fd8 | c13dfe983bc128341362d26a19fba2773f9ac5e7 | |
refs/heads/master | <repo_name>henan715/scrapyShangHaiBook<file_sep>/txt2excel.py
# coding:utf-8
import xlrd
import xlwt
def saveToExcel():
"""
save the content from createBsObj(content) into excel.
:param content:
:return:
"""
wbk = xlwt.Workbook(encoding='utf-8', style_compression=0)
sheet = wbk.add_sheet("sheet1",cell_overwrite_ok=True)
wbk.save('data.xls')
xlsfile=r'./data.xls'
book = xlrd.open_workbook(xlsfile)
sheet_name = book.sheet_names()[0]
sheet1 = book.sheet_by_name(sheet_name)
nrows = sheet1.nrows
ncols = sheet1.ncols
file = open('./test.txt','r')
i=0
for line in file.readlines():
str=line.split(', ')
sheet.write(nrows+i,ncols,str[0].replace('(','').replace(',',''))
sheet.write(nrows+i,ncols+1,str[1].replace(',',''))
sheet.write(nrows+i,ncols+2,str[2].replace(',',''))
sheet.write(nrows+i,ncols+3,str[3].replace(',',''))
sheet.write(nrows+i,ncols+4,str[4].replace(',',''))
sheet.write(nrows+i,ncols+5,str[5].replace(',',''))
sheet.write(nrows+i,ncols+6,str[6].replace(',','').replace(')',''))
i=i+1
# sheet.write(nrows+i,ncols,bookTitle)
wbk.save('data.xls')
saveToExcel()<file_sep>/README.md
# scrapyShangHaiBook
2016年8月上海图书馆,上海书展爬虫,用于爬取所有展出的4万余本书的书名、作者、ISBN编号、出版社、价格、展览区域信息,保存为Excel。
## 主要库
- BeautifulSoup
- Urllib2
- xlrd
- xlwt
## 代码结构
代码分为两块:MainSpider用于下载数据,通过BeautifulSoup解析网页内容,然后存储为Excel,刚开始打算存数据库,后来发现数据量太小,直接存储Excel吧,text2excel用于将txt数据转换为Excel数据(后来废弃不用了)。

<file_sep>/MainSpider.py
# coding:utf-8
from bs4 import BeautifulSoup
from urllib2 import urlopen
import xlrd
import xlwt
import sys
def getHtml(pageNo):
"""
get html content and return the raw html page.
:param pageNo: the page number,from 0~2212.
:return:
"""
startUrl = 'http://search.shbookfair.cn/2014.htm?_gat=1&_ga=GA1.2.442062171.1470810485&page='+str(pageNo)
try:
html = urlopen(startUrl)
return html.read()
except Exception,e:
print "===Wrong===", str(e)
return None
def createBsObj(content):
"""
get html string from getHtml(pageNo), then transform it into a BeautifulSoup object, then get the table content
of the page.
:param content:
:return:
"""
if(len(content)==0):
print "===Your Content Is None!==="
return
bsObj = BeautifulSoup(content, 'html.parser',from_encoding="gb18030")
table = bsObj.find("table",{"class":"display"}).find("tbody").findAll("tr")
table_list=[[]]*len(table)
i=0
for trItem in table:
bookTitle = trItem.find("td",{"class":"bookTitle"}).get_text().strip().replace(',','-')
bookAuthor = trItem.find("td",{"class":"bookAuthor"}).get_text().strip().replace(' ','.')
bookEAN = trItem.find("td",{"class":"bookEAN"}).get_text().strip()
bookAlias = trItem.find("td",{"class":"pubAlias"}).get_text().strip()
bookPrice = trItem.find("td",{"class":"bookPrice"}).get_text().strip()
bookYear = trItem.find("td",{"class":"bookYear"}).get_text().strip()
bookZone = trItem.find("td",{"class":"zones"}).get_text().strip()
item_dict=(bookTitle, bookAuthor, bookEAN, bookAlias, bookPrice, bookYear, bookZone)
#print bookTitle
table_list[i]=item_dict
i=i+1
return table_list
def saveToExcel(content):
"""
save the content from createBsObj(content) into excel.
:param content:
:return:
"""
wbk = xlwt.Workbook(encoding='utf-8', style_compression=0)
sheet = wbk.add_sheet("sheet1",cell_overwrite_ok=True)
wbk.save('data.xls')
xlsfile=r'./data.xls'
book = xlrd.open_workbook(xlsfile)
sheet_name = book.sheet_names()[0]
sheet1 = book.sheet_by_name(sheet_name)
nrows = sheet1.nrows
ncols = sheet1.ncols
# sheet.write(nrows+i,ncols,bookTitle)
wbk.save('data.xls')
# main call function
if __name__=="__main__":
file = open('./test.txt','w+')
for i in range(1,2212):#2212
print 'No '+str(i)+' is running...'
try:
lst = createBsObj(getHtml(i))
for item in lst:
file.write(str(item).decode("unicode-escape").encode("utf-8").replace("u\'","").replace("\'","").replace("\"","")+'\n')
except Exception,e:
print e
continue
file.close() | 4c318e814840d83561cc2e868a2bdef99c659c3f | [
"Markdown",
"Python"
] | 3 | Python | henan715/scrapyShangHaiBook | 79236ee9f6880515ab030df8b38bccf14099dea1 | 35319d00728d6cda87133e89213b83fc79140e5f | |
refs/heads/master | <repo_name>prakashio/spring-cloud-cache<file_sep>/src/main/java/com/programmingarea/springcloudcache/repositories/UserRepository.java
package com.programmingarea.springcloudcache.repositories;
import com.programmingarea.springcloudcache.model.User;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
public interface UserRepository extends JpaRepository<User, Long> {
List<User> findByName(String name);
}
<file_sep>/src/main/java/com/programmingarea/springcloudcache/resource/UserResource.java
package com.programmingarea.springcloudcache.resource;
import com.programmingarea.springcloudcache.cache.UserCache;
import com.programmingarea.springcloudcache.model.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@RequestMapping("/api")
public class UserResource {
@Autowired
UserCache userCache;
@GetMapping("users/{name}")
public List<User> findUsersByName(@PathVariable String name) {
return userCache.findByName(name);
}
}
<file_sep>/src/main/resources/application.properties
spring.cache.jcache.config=encache.xml
<file_sep>/src/main/java/com/programmingarea/springcloudcache/cache/UserCache.java
package com.programmingarea.springcloudcache.cache;
import com.programmingarea.springcloudcache.model.User;
import com.programmingarea.springcloudcache.repositories.UserRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
public class UserCache {
@Autowired
UserRepository userRepository;
@Cacheable(value = "usersCache", key="#name")
public List<User> findByName(String name) {
System.out.println("Retriving from the database for name:"+ name);
return userRepository.findByName(name);
}
}
<file_sep>/src/main/java/com/programmingarea/springcloudcache/loader/loader.java
package com.programmingarea.springcloudcache.loader;
import com.programmingarea.springcloudcache.model.User;
import com.programmingarea.springcloudcache.repositories.UserRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.List;
@Component
public class loader {
@Autowired
UserRepository usersRepository;
@PostConstruct
public void load() {
List<User> usersList = getList();
usersRepository.saveAll(usersList);
}
public List<User> getList() {
List<User> usersList = new ArrayList<>();
usersList.add(new User("Paras", "Developer", "1"));
usersList.add(new User("Arun", "Deployment", "2"));
usersList.add(new User("Surya", "UI", "2"));
return usersList;
}
} | e35f0dce292d0128b64b1e6701f961f68266a0cc | [
"Java",
"INI"
] | 5 | Java | prakashio/spring-cloud-cache | b1615150966761d65cde2e1df09f100f2bb3379b | e02af58079bbad5703936cc0fbb9961859dc87f4 | |
refs/heads/master | <file_sep><?php
require("new_slice.php");
require("unary_minus.php");
require("tokenizer.php");
require("shunting_yard.php");
require("compare.php");
// always run this function first before doing anything else
// relied upon by tokenizer and shunting_yard
declare_globals();
?>
<file_sep><?php
function declare_globals() {
// operators
// presence of OR as operator for answers with multiple answers (i.e., roots of a quadratic)
// - is used as a function, not as an operator, to deal with unary minuses (i.e "-3+12PI")
$GLOBALS["operators"] = [
'+' => ['precedence' => 0, 'associativity' => 'left', 'commutativity' => true],
'*' => ['precedence' => 1, 'associativity' => 'left', 'commutativity' => true],
'/' => ['precedence' => 1, 'associativity' => 'left', 'commutativity' => false],
'%' => ['precedence' => 1, 'associativity' => 'left', 'commutativity' => false],
'^' => ['precedence' => 2, 'associativity' => 'right', 'commutativity' => false],
'OR' => ['precedence' => -1, 'associativity' => 'left', 'commutativity' => true]
];
// specials
$GLOBALS["specials"] = ['PI', 'E', 'I'];
// functions
$GLOBALS["functions"] = [
'SQRT',
'ABS',
'ACOS',
'COS',
'TAN',
'ATAN',
'ASIN',
'SIN',
'CEIL',
'FLOOR',
'LOG',
'LN'
];
// regexes for matching each type of token
$GLOBALS["format_regexes"] = [
"number" => "(\d+\.?\d*)",
"special" => "(PI|E|I)",
"lparen" => "(\()",
"rparen" => "(\))",
"function" => "(SQRT|ABS|ACOS|COS|ATAN|TAN|ASIN|SIN|CEIL|FLOOR|LOG|LN)",
"operator" => "(\+|\-|\*|\/|\^|OR)",
"suffix" => "(%|$)"
];
// replace patterns for preg_replace() for formatting tokens
$GLOBALS["format_replaces"] = [
"generic_implicit" => "$1 * $2",
"number_lparen_implicit" => "$1 * (",
"operator_space" => " $1 $2",
"paren_space" => " $1 "
];
}
function format($expr) {
// implicit multiplication
$regexes = [
// implicit multiplication
"/" . $GLOBALS["format_regexes"]["number"] . $GLOBALS["format_regexes"]["special"] . "/", // number, then special
"/" . $GLOBALS["format_regexes"]["special"] . $GLOBALS["format_regexes"]["number"] . "/", // special, then number
"/" . $GLOBALS["format_regexes"]["special"] . $GLOBALS["format_regexes"]["special"] . "/", // special, then special
"/(" . $GLOBALS["format_regexes"]["number"] . "|" . $GLOBALS["format_regexes"]["special"] . ")" . $GLOBALS["format_regexes"]["lparen"] . "/", // number or special, then lparen
"/" . $GLOBALS["format_regexes"]["rparen"] . "(" . $GLOBALS["format_regexes"]["number"] . "|" . $GLOBALS["format_regexes"]["special"] . ")/", // rparen, then number or special
"/" . $GLOBALS["format_regexes"]["number"] . $GLOBALS["format_regexes"]["function"] . "/", // number, then unary function
"/" . $GLOBALS["format_regexes"]["special"] . $GLOBALS["format_regexes"]["function"] . "/", // special, then unary function
// operator spacing
"/" . $GLOBALS["format_regexes"]["operator"] . "/", // add spaces before and after each operator
// parenthesis spacing
"/(" . $GLOBALS["format_regexes"]["lparen"] . "|" . $GLOBALS["format_regexes"]["rparen"] . ")/" // add spaces before and after lparen and rparen
];
$replaces = [
// implicit multiplication
$GLOBALS["format_replaces"]["generic_implicit"], // number, then special
$GLOBALS["format_replaces"]["generic_implicit"], // special, then number
$GLOBALS["format_replaces"]["generic_implicit"], // special, then special
$GLOBALS["format_replaces"]["number_lparen_implicit"], // number or special, then lparen
$GLOBALS["format_replaces"]["generic_implicit"], // rparen, then number or special
$GLOBALS["format_replaces"]["generic_implicit"], // number, then unary function
$GLOBALS["format_replaces"]["generic_implicit"], // special, then unary function
// operator spacing
$GLOBALS["format_replaces"]["operator_space"], //add spaces before and after each operator
// parenthesis spacing
$GLOBALS["format_replaces"]["paren_space"], // add spaces before and after lparen and rparen
];
return strtoupper(preg_replace($regexes, $replaces, $expr));
}
function tokenize($expr) {
// remove leading and trailing spaces
$expr = preg_replace('/\G\s|\s(?=\s*$)/', '', format($expr));
// replace >=2 spaces with only one space and explode to array
// then make all minuses unary
$tokens = fix_minus(array_map('trim', explode(' ', preg_replace('/[ ]+/', ' ', $expr))));
// get count removing parentheses to reduce steps in shunting_yard()
$count = 0;
foreach ($tokens as $token) {
$count = (!preg_match("/(" . $GLOBALS["format_regexes"]["lparen"] . "|" . $GLOBALS["format_regexes"]["rparen"] . ")/", $token)) ? $count + 1 : $count;
}
// return array with tokenized array and count
return [$tokens, $count];
}
?>
<file_sep><?php
// PHP's default array_slice doesn't support arrays without zero index.
// Therefore, this is a new function, written in PHP, to add this functionality.
// Assume keys are all numerical, consecutive, and in increasing order.
function new_slice($input_arr, $offset, $length = NULL, $preserve_keys = false) {
// error checking for $offset
if (!isset($input_arr[$offset]))
throw new \InvalidArgumentException(sprintf("Offset %d doesn't exist in array.", $offset));
else {
$output_arr = [];
// if length is set, we will iterate through ($offset + $length)
if (isset($length)) {
// if $preserve_keys is set to true, $output_arr is treated like
// an associative array with numerical keys
if ($preserve_keys) {
for ($cur = $offset, $end = $offset + $length; $cur < $end; $cur++) {
$output_arr[$cur] = $input_arr[$cur];
}
}
// otherwise the keys are ignored and elements are simply added
// to $output_arr
else {
for ($cur = $offset, $end = $offset + $length; $cur < $end; $cur++) {
$output_arr[] = $input_arr[$cur];
}
}
}
// same as above, but length is not set so we iterate through the
// end of the array
else {
if ($preserve_keys) {
for ($cur = $offset, $end = max(array_keys($input_arr)) + 1; $cur < $end; $cur++) {
$output_arr[$cur] = $input_arr[$cur];
}
}
else {
for ($cur = $offset, $end = max(array_keys($input_arr)) + 1; $cur < $end; $cur++) {
$output_arr[] = $input_arr[$cur];
}
}
}
return $output_arr;
}
}
?>
<file_sep><?php
function compare($arr1, $arr2) {
// immediately return true if arrays are the same
if ($arr1 == $arr2)
return true;
elseif (count($arr1) != count($arr2))
return false;
else {
// otherwise run usual block_compare
$compares = [];
$compares = block_compare($arr1, $arr2, $compares);
return !in_array(0, $compares);
}
}
// compare simplest order operation
// $e1, $e2 each have one unary operator and two special constant/numerical operands
function simple_compare($e1, $e2) {
// get first item in array
$e1_min = min(array_keys($e1));
$e2_min = min(array_keys($e2));
if ($e1[$e1_min]["symbol"] == $e2[$e2_min]["symbol"]) {
// compare linearly (first op1 and second op1, then same with op2)
if ($e1[$e1_min + 1]["value"] == $e2[$e2_min + 1]["value"] &&
$e1[$e1_min + 2]["value"] == $e2[$e2_min + 2]["value"]) {
return true;
}
// cross compare (first op2 and second op1 and converse) if commutative operator
elseif (($GLOBALS["operators"][$e1[$e1_min]["symbol"]]['commutativity'] === true) &&
$e1[$e1_min + 1] == $e2[$e2_min + 2] &&
$e1[$e1_min + 2] == $e2[$e2_min + 1]) {
return true;
}
else {
return false;
}
}
else {
return false;
}
}
// compare blocks
function block_compare($e1, $e2, $compares) {
// minimum key in array
$e1_min = min(array_keys($e1));
$e2_min = min(array_keys($e2));
// if lengths are different they are not the same
if (count($e1) != count($e2)) {
$compares[] = 0;
return $compares;
}
// if each block is a unary function block only, go to unary_func_compare
elseif ($e1[$e1_min]["type"] == "function" && $e2[$e2_min]["type"] == "function") {
$compares = unary_func_compare($e1, $e2, $compares);
return $compares;
}
else {
// expr operators
$e1_operator = $e1[$e1_min];
$e2_operator = $e2[$e2_min];
// find expr operand blocks
$e1_op1 = new_slice($e1, $e1[$e1_min]["op1_ndx"],
($e1[$e1_min]["op2_ndx"] - $e1[$e1_min]["op1_ndx"]), true);
$e1_op2 = new_slice($e1, $e1[$e1_min]["op2_ndx"], NULL, true);
$e2_op1 = new_slice($e2, $e2[$e2_min]["op1_ndx"],
($e2[$e2_min]["op2_ndx"] - $e2[$e2_min]["op1_ndx"]), true);
$e2_op2 = new_slice($e2, $e2[$e2_min]["op2_ndx"], NULL, true);
$e1_op1_min = min(array_keys($e1_op1));
$e1_op2_min = min(array_keys($e1_op2));
$e2_op1_min = min(array_keys($e2_op1));
$e2_op2_min = min(array_keys($e2_op2));
// simple block, for comparison at simple_compare
$e1_op1_issblock = (count($e1_op1) == 3) ? 1 : 0;
$e1_op2_issblock = (count($e1_op2) == 3) ? 1 : 0;
$e2_op1_issblock = (count($e2_op1) == 3) ? 1 : 0;
$e2_op2_issblock = (count($e2_op2) == 3) ? 1 : 0;
// complex block, should be returned to block_compare
$e1_op1_iscblock = (count($e1_op1) > 3) ? 1 : 0;
$e1_op2_iscblock = (count($e1_op2) > 3) ? 1 : 0;
$e2_op1_iscblock = (count($e2_op1) > 3) ? 1 : 0;
$e2_op2_iscblock = (count($e2_op2) > 3) ? 1 : 0;
// numerical/special value only, can be compared straightaway
$e1_op1_isval = (count($e1_op1) == 1) ? 1 : 0;
$e1_op2_isval = (count($e1_op2) == 1) ? 1 : 0;
$e2_op1_isval = (count($e2_op1) == 1) ? 1 : 0;
$e2_op2_isval = (count($e2_op2) == 1) ? 1 : 0;
// unary function only, for unary_func_compare
$e1_op1_isublock = ($e1_op1[$e1_op1_min]["type"] == "function") ? 1 : 0;
$e1_op2_isublock = ($e1_op2[$e1_op2_min]["type"] == "function") ? 1 : 0;
$e2_op1_isublock = ($e2_op1[$e2_op1_min]["type"] == "function") ? 1 : 0;
$e2_op2_isublock = ($e2_op2[$e2_op2_min]["type"] == "function") ? 1 : 0;
if ($e1_operator["symbol"] != $e2_operator["symbol"])
$compares[] = 0;
else {
// if we need to compare intermediately for purposes of
// mutual exclusivity, we have an array without tainting $compares
$interm_compares = [];
if ($e1_op1_issblock && $e2_op1_issblock && simple_compare($e1_op1, $e2_op1)) {
$compares[] = 1;
$compares = template_compare($e1_op2, $e2_op2, $compares);
}
elseif (($GLOBALS["operators"][$e1_operator["symbol"]]['commutativity'] === true)
&& $e1_op1_issblock && $e2_op2_issblock && simple_compare($e1_op1, $e2_op2)) {
$compares[] = 1;
$compares = template_compare($e1_op2, $e2_op1, $compares);
}
elseif ($e1_op1_iscblock && $e2_op1_iscblock &&
!in_array(0, block_compare($e1_op1, $e2_op1, $interm_compares))) {
$compares[] = 1;
$interm_compares = [];
$compares = template_compare($e1_op2, $e2_op2, $compares);
}
elseif (($GLOBALS["operators"][$e1_operator["symbol"]]['commutativity'] === true) &&
$e1_op1_iscblock && $e2_op2_iscblock &&
!in_array(0, block_compare($e1_op1, $e2_op2, $interm_compares))) {
$compares[] = 1;
$interm_compares = [];
$compares = template_compare($e1_op2, $e2_op1, $compares);
}
elseif ($e1_op1_isublock && $e2_op1_isublock &&
!in_array(0, unary_func_compare($e1_op1, $e2_op1, $interm_compares))) {
$compares[] = 1;
$interm_compares = [];
$compares = template_compare($e1_op2, $e2_op2, $compares);
}
elseif (($GLOBALS["operators"][$e1_operator["symbol"]]['commutativity'] === true) &&
$e1_op1_isublock && $e2_op2_isublock &&
!in_array(0, unary_func_compare($e1_op1, $e2_op2, $interm_compares))) {
$compares[] = 1;
$interm_compares = [];
$compares = template_compare($e1_op2, $e2_op1, $compares);
}
elseif ($e1_op1_isval && $e2_op1_isval &&
$e1_op1[$e1_op1_min]["value"] == $e2_op1[$e2_op1_min]["value"]) {
$compares[] = 1;
$compares = template_compare($e1_op2, $e2_op2, $compares);
}
elseif (($GLOBALS["operators"][$e1_operator["symbol"]]['commutativity'] === true)
&& $e1_op1_isval && $e2_op2_isval &&
$e1_op1[$e1_op1_min]["value"] == $e2_op2[$e2_op2_min]["value"]) {
$compares[] = 1;
$compares = template_compare($e1_op2, $e2_op1, $compares);
}
else {
$compares[] = 0;
}
return $compares;
}
}
}
// compare unary functions
function unary_func_compare($e1, $e2, $compares) {
// find minimum value in numerical array keys
$e1_min = min(array_keys($e1));
$e2_min = min(array_keys($e2));
// get function name
$e1_function = $e1[$e1_min];
$e2_function = $e2[$e2_min];
// get operands
$e1_op = new_slice($e1, $e1[$e1_min]["op_ndx"], NULL, true);
$e2_op = new_slice($e2, $e2[$e2_min]["op_ndx"], NULL, true);
$e1_op_min = min(array_keys($e1_op));
$e2_op_min = min(array_keys($e2_op));
if ($e1_function["name"] != $e2_function["name"]) {
$compares[] = 0;
}
else
$compares = template_compare($e1_op, $e2_op, $compares);
return $compares;
}
function template_compare($b1, $b2, $compares) {
$b1_min = min(array_keys($b1));
$b2_min = min(array_keys($b2));
// simple block
$b1_issblock = (count($b1) == 3) ? 1 : 0;
$b2_issblock = (count($b2) == 3) ? 1 : 0;
// complex block with unary functions
$b1_iscblock = (count($b1) > 3) ? 1 : 0;
$b2_iscblock = (count($b2) > 3) ? 1 : 0;
// numerical/special value only
$b1_isval = (count($b1) == 1) ? 1 : 0;
$b2_isval = (count($b2) == 1) ? 1 : 0;
// unary function only
$b1_isublock = ($b1[$b1_min]["type"] == "function") ? 1 : 0;
$b2_isublock = ($b2[$b2_min]["type"] == "function") ? 1 : 0;
// if simple block return to simple_compare
if ($b1_issblock && $b2_issblock)
$compares[] = simple_compare($b1, $b2) ? 1 : 0;
// if complex block feed to block_compare
elseif ($b1_iscblock && $b2_iscblock)
$compares = block_compare($b1, $b2, $compares);
// if unary function only block go to unary_func_compare
elseif ($b1_isublock && $b2_isublock)
$compares = unary_func_compare($b1, $b2, $compares);
// if single value compare it right here
elseif($b1_isval && $b2_isval)
$compares[] = ($b1[$b1_min]["value"] == $b2[$b2_min]["value"]) ? 1 : 0;
else
$compares[] = 0;
return $compares;
}
?>
<file_sep># MCGExpression
This is a PHP expression parser and comparison tool, written to grade MathCounts answers for http://github.com/saligrama/mathcountsgrading
Setup
-----
Clone the repository.
```bash
$ git clone http://github.com/saligrama/MCGExpression
```
Usage
-----
In `MCGExpression/main.php`, you'll find these contents.
```php
#!/usr/bin/env php
<?php
require("new_slice.php");
require("unary_minus.php");
require("tokenizer.php");
require("shunting_yard.php");
require("compare.php");
// always run this function first before doing anything else
// relied upon by tokenizer and shunting_yard
declare_globals();
?>
```
In this state, the system does nothing. You can add something like this to the end.
```php
$expr1 = "-3-7SQRT(8PI-1)";
$expr2 = "-7SQRT(-1+8PI)-3";
$e1_tok = tokenize($expr1);
$e2_tok = tokenize($expr2);
echo compare(
shunting_yard($e1_tok[0], $e1_tok[1]),
shunting_yard($e2_tok[0], $e2_tok[1])
);
```
Run the program:
```bash
$ ./main.php
```
The result from the code above:
1
You can change the values for `$expr1` and `$expr2` and play around with the system.
Constants and functions can be entered as below:
| Constant/function | Enter as |
| ----------------- | -------- |
| π | `PI` |
| √ | `SQRT` |
| e | `E` |
| i | `I` |
| sin, cos, tan, and other functions | Uppercased text representation |
For more details, please view the `declare_globals()` function in `MCGExpression/tokenizer.php`.
<file_sep><?php
function fix_minus($input) {
// get total minus count
$total_minus_count = get_total_minus_count($input);
// if there are no minuses return input without modifications
if ($total_minus_count == 0)
return $input;
// get count of fixed minuses
$fixed_minus_count = get_fixed_minus_count($input);
// while there are unfixed minus go through and correct them
while ($fixed_minus_count < $total_minus_count) {
// get first unfixed minus
$pos = get_unfixed_minus($input);
if ($pos !== null) {
// add plus before the minus if unary minus is not at beginning
if (add_plus($input, $pos))
array_splice($input, $pos, 0, '+');
// add lparen after minus
array_splice($input, $pos + (!(add_plus($input, $pos)) ? 1 : 2), 0, '(');
// complete the rparen
$input = complete_minus_parens($input, $pos + (!add_plus($input, $pos) ? 2 : 3));
}
$fixed_minus_count = get_fixed_minus_count($input);
}
return $input;
}
// minus is fixed once there is an lparen after it
// matching rparen is always added before function is run
function is_fixed($input, $pos) { return ($input[$pos] == '-' && $input[$pos + 1] == '('); }
// get first unfixed minus in array for correction
function get_unfixed_minus($input) {
for ($i = 0, $c = count($input); $i < $c; $i++) {
if ($input[$i] == '-' && !is_fixed($input, $i)) {
return $i;
}
}
return null;
}
// get number of fixed minuses
function get_fixed_minus_count($input) {
$fixed_minus_count = 0;
for ($i = 0, $c = count($input); $i < $c; $i++) {
$fixed_minus_count += is_fixed($input, $i) ? 1 : 0;
}
return $fixed_minus_count;
}
// get total number of minuses
// PHP builtin array_count_values throws errors if there are no minuses,
// so we build this simple function to do it
function get_total_minus_count($input) {
$total_minus_count = 0;
for ($i = 0, $c = count($input); $i < $c; $i++) {
$total_minus_count += ($input[$i] == '-') ? 1 : 0;
}
return $total_minus_count;
}
// check if a plus should be added before a minus
// do not add a plus if the minus is the first thing in the array
// or if it is the first thing after a left parenthesis
// or if it is the first thing after a lower precedence operator
function add_plus($input, $pos) {
return !($pos == 0 ||
(isset($input[$pos-1]) && ($input[$pos-1] == '(' ||
isset($GLOBALS['operators'][$input[$pos-1]]) &&
$GLOBALS['operators'][$input[$pos-1]]['precedence'] < 0)));
}
// inject rparen to complete minus operand wrapping
function complete_minus_parens($input, $op_start) {
$pos = $op_start;
// track open parens
$open_paren_count = 0;
// track if parens were ever open (for case of minuses in parenthesis)
$ever_open_paren = false;
for ($i = $pos, $c = count($input); $i < $c; $i++) {
// if no parens are open and we hit something with <= precedence as minus
// complete the rparen
if ($open_paren_count == 0 && isset($GLOBALS['operators'][$input[$i]]) &&
$GLOBALS['operators'][$input[$i]]['precedence'] <= 1) {
array_splice($input, $i, 0, ')');
break;
}
// if we hit the end of the array, complete the rparen
elseif ($i == max(array_keys($input))) {
array_splice($input, $i+1, 0, ')');
break;
}
// note if any parens were open
elseif ($input[$i] == '(') {
$open_paren_count++;
$ever_open_paren = true;
}
// if we hit an rparen there are two ways to insert the rparen
elseif($input[$i] == ')') {
// if there was never a paren open insert the minus rparen
// before rparen at $i
if (!$ever_open_paren) {
array_splice($input, $i, 0, ')');
break;
}
// mark that any open paren has been closed
$open_paren_count--;
// if an rparen has been closed, insert the minus rparen
// after rparen at $i
if ($open_paren_count == 0) {
array_splice($input, $i+1, 0, ')');
break;
}
}
}
return $input;
}
?>
<file_sep><?php
// modified Shunting-Yard algorithm with support for prefix notation,
// storing of operand locations, and unary functions
// see https://en.wikipedia.org/wiki/Shunting-yard_algorithm
function shunting_yard(array $tokens, $length) {
$stack = new \SplStack();
$output = new \SplQueue();
// iterate through tokens in reverse order to convert to prefix (Polish) notation
foreach (array_reverse($tokens) as $token) {
// if token is an operand (number or special constant) add to output queue
if (is_numeric($token) ||
preg_match("/" . $GLOBALS["format_regexes"]["special"] . "/", $token)) {
$output->enqueue([
'type' => 'operand',
'value' => $token
]);
$last_popped_lparen = 0;
}
// if we have an operator push it to the stack while moving existing
// operators on the stack to output queue
elseif (isset($GLOBALS["operators"][$token])) {
$o1 = $token;
while (has_operator($stack) && ($o2 = $stack->top()) && has_lower_precedence($o1, $o2)) {
$c = count($output);
$output->enqueue([
'type' => 'operator',
'symbol' => $stack->pop(),
'op1_ndx' => $length - $c, // store metadata for easy comparison
'op2_ndx' => getoperand_base($output, $c)
]);
$last_popped_lparen = 0;
}
$stack->push($o1);
}
elseif (')' === $token) {
$stack->push($token); // wait untill we see an lparen
}
elseif ('(' === $token) {
// if something is on the stack it is either an rparen or an operator
// we now move the operators to the output queue, again storing metadata
while (count($stack) > 0 && ')' !== $stack->top()) {
$c = count($output);
$output->enqueue([
'type' => 'operator',
'symbol' => $stack->pop(),
'op1_ndx' => $length - $c,
'op2_ndx' => getoperand_base($output, $c)
]);
}
if (count($stack) === 0) {
throw new \InvalidArgumentException(sprintf('Mismatched parenthesis in input: %s', json_encode($tokens)));
}
// pop off lparen
$stack->pop();
$last_popped_lparen = 1;
}
// if it's a unary function (including minus) and the last argument
// found was an lparen we directly move it to the stack while storing metadata
elseif (preg_match("/" . preg_replace("/(\))/", "|-$1", $GLOBALS["format_regexes"]["function"]) . "/", $token)) {
if ($last_popped_lparen || $stack->top() == ')') {
$c = count($output);
$output->enqueue([
'type' => 'function',
'name' => $token,
'op_ndx' => $c - 1
]);
}
else {
throw new \InvalidArgumentException(sprintf('Function %s does not have parenthesis', $token));
}
}
else {
throw new \InvalidArgumentException(sprintf('Invalid token: %s', $token));
}
}
// add to queue any last operators remaining in the stack
while (has_operator($stack)) {
$c = count($output);
$output->enqueue([
'type' => 'operator',
'symbol' => $stack->pop(),
'op1_ndx' => $length - $c,
'op2_ndx' => getoperand_base($output, $c)
]);
}
if (count($stack) > 0) {
throw new \InvalidArgumentException(sprintf('Mismatched parenthesis or misplaced number in input: %s', json_encode($tokens)));
}
// reverse the queue to make it normal prefix notation
$pn = array_reverse(iterator_to_array($output));
// fix all operand locations afer reverse
// we couldn't do this above since finding operands relies on locations
// set in the existing queue
$i = 0;
while(isset($pn[$i])) {
if ($pn[$i]["type"] == "operator")
$pn[$i]["op2_ndx"] = $length - $pn[$i]["op2_ndx"] - 1;
elseif($pn[$i]["type"] == "function")
$pn[$i]["op_ndx"] = $length - $pn[$i]["op_ndx"] - 1;
$i++;
}
return $pn;
}
// check if there's an operator on the top of the stack
function has_operator(\SplStack $stack) {
return count($stack) > 0 && ($top = $stack->top()) && isset($GLOBALS["operators"][$top]);
}
// check if precedence of $o1 is less than that of $o2
function has_lower_precedence($o1, $o2) {
$op1 = $GLOBALS["operators"][$o1];
$op2 = $GLOBALS["operators"][$o2];
return ($op1['associativity'] === 'left'
&& $op1['precedence'] === $op2['precedence'])
|| $op1['precedence'] < $op2['precedence'];
}
// finds operands for an operator
function getoperand_base(\SplQueue $output, $ndx) {
// if the next item in queue is an operator, we find the end of its second operand
if ($output[$ndx - 1]["type"] == "operator")
return end_of_opchain($output, $output[$ndx - 1]["op2_ndx"]) - 1;
// if it is a unary function, find the end of its other operand
elseif ($output[$ndx - 1]["type"] == "function")
return end_of_opchain($output, $output[$ndx - 1]["op_ndx"]) - 1;
// otherwise if it is a value, return the next item after that value
else
return $ndx - 2;
}
// iterate until we find a number or special as an operator index (unary function)
// or the second index of operand (for an operator)
function end_of_opchain(\SplQueue $output, $ndx) {
if ($output[$ndx]["type"] == "operator")
return end_of_opchain($output, $output[$ndx]["op2_ndx"]);
elseif($output[$ndx]["type"] == "function")
return end_of_opchain($output, $output[$ndx]["op_ndx"]);
else
return $ndx;
}
?>
| 6f1c7c0c22025308cfd9cbcc12e8ddc3aa4c90d9 | [
"Markdown",
"PHP"
] | 7 | PHP | saligrama/MCGExpression | 77a30dd4b24800a70e6fb1bdc5c17806a93cb4f5 | f7220fec49af6399aab3be6010b9425a4555c273 | |
refs/heads/master | <file_sep>##PROGRAMA ##
def romeu_julieta(val: int) -> str:
if val == 3:
return 'queijo'
if val == 5:
return 'goiabada'
<file_sep>#### Unittest - Introdução #####
'''
Introdução
PROBLEMA
RODAR
python -m unittest <file_unit.py>
'''
import unittest
from programa import romeu_julieta
class Test(unittest.TestCase):
def setUp(self):
pass
def test_romeujuliete_3(self):
self.assertEqual( romeu_julieta(3), 'queijo')
def test_romeujuliete_5(self):
self.assertEqual( romeu_julieta(5),'goiabada')
if __name__ == '__main__':
unittest.main() | f4e6be54d43ffab67b5a3524953dde66a994e3c4 | [
"Python"
] | 2 | Python | MarLops/PYTHON_ESTUDO | d642db048d8f75c13e66a2deb923e760bec0fbcb | 7de54bb5f2b0cba005de32bb98a4148a1c31b2b4 | |
refs/heads/dev | <repo_name>GabrielBourdette/symfony<file_sep>/src/Controller/ProductController.php
<?php
namespace App\Controller;
use App\Entity\Product;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Component\HttpFoundation\Session\SessionInterface;
class ProductController extends AbstractController
{
protected $NB_PER_PAGE = 6;
public function __construct(SessionInterface $session)
{
$this->session = $session;
}
/**
* @Route("/product/{page}", name="product_index", requirements={"page" = "\d+"}, defaults={"page" = 1})
*/
public function list_products($page) {
$repository = $this->getDoctrine()
->getManager()
->getRepository('App:Product');
$listProducts = $repository->getProducts($page, $this->NB_PER_PAGE);
$nbPages = ceil(count($listProducts) / $this->NB_PER_PAGE);
return $this->render('Product/index.html.twig', array(
'listProducts' => $listProducts,
'nbPages' => $nbPages,
'page' => $page,
'cardQuantity' => ($this->session->get('card_quantity') > 0) ? $this->session->get('card_quantity') : 0
));
}
/**
* @Route("/product/view/{slug}", name="product_view")
*/
public function view_product($slug)
{
if(empty($slug)) {
return $this->redirectToRoute('product_index', array(
'listProducts' => $this->listProducts
));
}
$repository = $this->getDoctrine()
->getManager()
->getRepository('App:Product');
$product = $repository->findOneBy(array('slug' => $slug));
if (null === $product) {
throw new NotFoundHttpException("Le produit n'existe pas.");
}
return $this->render('Product/view.html.twig', array(
'product' => $product,
'cardQuantity' => ($this->session->get('card_quantity') > 0) ? $this->session->get('card_quantity') : 0
));
}
}
<file_sep>/tests/Controller/CardContollerTest.php
<?php
namespace App\Tests\Controller;
use App\Controller\CardController;
use Symfony\Bundle\FrameworkBundle\Test\WebTestCase;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\HttpFoundation\Session\Session;
use Symfony\Component\HttpFoundation\Session\Storage\MockArraySessionStorage;
class MainControllerTest extends WebTestCase
{
public function test_home() {
$client = static::createClient();
$crawler = $client->request('GET', '/product');
$this->assertEquals(200, $client->getResponse()->getStatusCode());
echo "\n Code réponse de la page Accueil : ".$client->getResponse()->getStatusCode();
$this->assertCount(6, $crawler->filter('.product_bloc'));
echo "\n Nombre de produits sur la page Accueil : ".$crawler->filter('.product_bloc')->count();
}
// commande : php bin/phpunit
}<file_sep>/src/Repository/ProductRepository.php
<?php
namespace App\Repository;
use Doctrine\ORM\EntityRepository;
use Doctrine\ORM\Tools\Pagination\Paginator;
class ProductRepository extends EntityRepository
{
public function getProducts($page, $nbPerPage)
{
$query = $this->createQueryBuilder('p')
->orderBy('p.title', 'ASC')
->getQuery();
$query
->setFirstResult(($page-1) * $nbPerPage)
->setMaxResults($nbPerPage);
return new Paginator($query, true);
}
}<file_sep>/src/Controller/CardController.php
<?php
namespace App\Controller;
use App\Entity\Product;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Component\HttpFoundation\Session\SessionInterface;
class CardController extends AbstractController
{
protected $total;
protected $listProducts;
protected $listProducts_qt;
public function __construct(SessionInterface $session) {
$this->total = 0;
$this->listProducts = array();
$this->cardQuantity = 0;
$this->session = $session;
}
/**
* @Route("/card", name="card_index")
*/
public function card_view() {
$session_list = is_array($this->session->get('products_list')) ? $this->session->get('products_list') : array() ;
$products_quantities = array();
if(count($session_list) >= 1) {
foreach ($session_list as $product_id => $product_qt) {
$new_product = $repository = $this->getDoctrine()
->getManager()
->getRepository('App:Product')
->find($product_id);
$this->listProducts[] = $new_product;
$products_quantities[$product_id] = $product_qt;
$this->total += $new_product->getPrice() * $product_qt;
$this->cardQuantity += $product_qt;
}
}
$this->session->set('card_quantity',$this->cardQuantity);
return $this->render('Card/card.html.twig', array(
'listProducts' => $this->listProducts,
'productsQuantities' => $products_quantities,
'cardTotal' => $this->total,
'cardQuantity' => $this->session->get('card_quantity')
));
}
/**
* @Route("/card/add/{id_product}", name="add_to_card")
*/
public function card_add($id_product) {
$session_list = $this->session->get('products_list');
$product_qt = isset($_GET['product_qt']) ? $_GET['product_qt'] : 1;
$session_list[$id_product] = (isset($session_list[$id_product]) ? $session_list[$id_product] : 0) + $product_qt;
$this->session->set('products_list', $session_list);
$this->session->set('card_quantity', $this->session->get('card_quantity') + $product_qt);
$this->session->getFlashBag()->add('infos', 'Produit ajouté au panier !');
return $this->redirectToRoute('card_index', array(
'listProducts' => $this->listProducts
));
}
/**
* @Route("/card/one_more/{id_product}", name="card_one_more")
*/
public function card_one_more($id_product) {
$session_list = $this->session->get('products_list');
$session_list[$id_product] = $session_list[$id_product] + 1;
$this->session->set('products_list', $session_list);
$this->session->set('card_quantity', $this->session->get('card_quantity') + 1);
$this->session->getFlashBag()->add('infos', 'Produit ajouté au panier !');
return $this->redirectToRoute('card_index', array(
'listProducts' => $this->listProducts
));
}
/**
* @Route("/card/one_less/{id_product}", name="card_one_less")
*/
public function card_one_less($id_product) {
$session_list = $this->session->get('products_list');
$session_list[$id_product] = $session_list[$id_product] - 1;
$this->session->set('products_list', $session_list);
$this->session->set('card_quantity', $this->session->get('card_quantity') - 1);
$this->session->getFlashBag()->add('infos', 'Produit retiré du panier !');
return $this->redirectToRoute('card_index', array(
'listProducts' => $this->listProducts
));
}
/**
* @Route("/card/remove/{id_product}", name="card_remove_one")
*/
public function card_remove_one($id_product) {
$session_list = $this->session->get('products_list');
$previous_qt = $session_list[$id_product];
unset($session_list[$id_product]);
$this->session->set('products_list', $session_list);
$this->session->set('card_quantity', $this->session->get('card_quantity') - $previous_qt);
$this->session->getFlashBag()->add('infos', 'Produit retiré du panier !');
return $this->redirectToRoute('card_index', array(
'listProducts' => $this->listProducts
));
}
/**
* @Route("/card/remove_all", name="empty_card")
*/
public function card_delete() {
$this->session->set('products_list', array());
$this->session->set('card_quantity', 0);
$this->session->getFlashBag()->add('infos', 'Panier vide !');
return $this->redirectToRoute('card_index', array(
'listProducts' => $this->listProducts,
'cardQuantity' => count($this->session->get('products_list'))
));
}
}<file_sep>/src/DataFixtures/AppFixtures.php
<?php
namespace App\DataFixtures;
use App\Entity\Product;
use Doctrine\Bundle\FixturesBundle\Fixture;
use Doctrine\Common\Persistence\ObjectManager;
class AppFixtures extends Fixture
{
public function load(ObjectManager $manager)
{
$product = new Product('espadrilles_multicolores', 'espadrilles multicolores', 'bleu jaune vert rouge', 25.0, 'multicolore.jpg');
$manager->persist($product);
$product = new Product('espadrilles_bleues', 'espadrilles bleues', 'bleu bleu beu', 15.50, 'bleu.jpg');
$manager->persist($product);
$product = new Product('espadrilles_jaunes', 'espadrilles jaunes', 'jaune jaune jaune', 19.50, 'jaune.jpg');
$manager->persist($product);
$product = new Product('espadrilles_rouges', 'espadrilles rouges', 'rouge rouge rouge', 18.50, 'rouge.jpg');
$manager->persist($product);
$product = new Product('espadrilles_vertes', 'espadrilles vertes', 'vert vert vert', 19.50, 'vert.jpg');
$manager->persist($product);
$product = new Product('espadrilles_noires', 'espadrilles noires', 'noir noir noir', 19.0, 'noir.jpg');
$manager->persist($product);
$product = new Product('espadrilles_blanches', 'espadrilles blanches', 'blanc blanc blanc', 21.50, 'blanc.jpg');
$manager->persist($product);
$product = new Product('espadrilles_blanches_bleues', 'espadrilles blanches et bleues', 'blanc bleu blanc', 14.50, 'bleu_blanc.jpg');
$manager->persist($product);
$product = new Product('espadrilles_blanches_rouges', 'espadrilles blanches et rouges', 'blanc rouge blanc', 20.50, 'rouge_blanc.jpg');
$manager->persist($product);
$product = new Product('espadrilles_beiges', 'espadrilles beiges', 'beige beige beige', 16.50, 'beige.jpg');
$manager->persist($product);
$product = new Product('espadrilles_violettes', 'espadrilles violettes', 'violet violet violet', 19.50, 'violet.jpg');
$manager->persist($product);
$product = new Product('espadrilles_oranges', 'espadrilles oranges', 'orange orange orange', 19.0, 'orange.jpg');
$manager->persist($product);
$manager->flush();
// commande : php bin/console doctrine:fixtures:load
}
}
<file_sep>/src/Entity/Product.php
<?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass="App\Repository\ProductRepository")
* @ORM\HasLifecycleCallbacks()
*/
class Product
{
/**
* @ORM\Column(name="id", type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
*/
protected $id;
/**
* @ORM\Column(name="slug", type="string", length=150)
*/
protected $slug;
/**
* @ORM\Column(name="title", type="string", length=255)
*/
protected $title;
/**
* @ORM\Column(name="price", type="float")
*/
protected $price;
/**
* @ORM\Column(name="description", type="text")
*/
protected $description;
/**
* @ORM\Column(name="img", type="text")
*/
protected $img;
public function setSlug($slug) { $this->slug = $slug; }
public function setTitle($title) { $this->title = $title; }
public function setPrice($price) { $this->price = $price; }
public function setDescription($description) { $this->description = $description; }
public function setImg($img) { $this->img = $img; }
public function getId() { return $this->id; }
public function getSlug() { return $this->slug; }
public function getTitle() { return $this->title; }
public function getPrice() { return $this->price; }
public function getDescription() { return $this->description; }
public function getImg() { return $this->img; }
public function __construct($slug = "", $title = "", $desc = "", $price = 0, $img = "") {
$this->setSlug($slug);
$this->setTitle($title);
$this->setDescription($desc);
$this->setPrice($price);
$this->setImg($img);
}
} | 7a53f4a53fdff4710198c84ad126e39005fbb3d1 | [
"PHP"
] | 6 | PHP | GabrielBourdette/symfony | 130cbacf90cec9ae078f3b5cf057ade81dfa0965 | 442492b97e7a0cbdbf929ba8c4969d9fa46e144f | |
refs/heads/main | <repo_name>insignificantGuy/Exam-Result-Analysis-System<file_sep>/app.js
const express = require('express')
const app = express();
const path = require('path')
const data = require('./data/23.json')
const bodyParser = require('body-parser');
const fs = require("fs");
var http = require('http');
const { ChartJS, ChartConfiguration, ChartComponentLike } = require('chart.js');
app.set('view engine', 'hbs')
app.use(bodyParser.urlencoded({
extended: false
}))
// const pdf2excel = require('pdf-to-excel');
// try {
// const options = {
// // when current pdf page number changes call this function(optional)
// onProcess: (e) => console.warn(`${e.numPage} / ${e.numPages}`),
// // pdf start page number you want to convert (optional, default 1)
// start: 96,
// // pdf end page number you want to convert (optional, default )
// end: 101,
// }
// pdf2excel.genXlsx('./data/exam.pdf', 'bar.xlsx', options);
// } catch (err) {
// console.log(err);
// }
app.use(bodyParser.json())
app.use(express.static(path.join(__dirname, 'public')))
app.get('/', (req, res) => res.render("index"))
app.get('/ResultGenerator', (req, res) => res.render('ResultGenerator'))
app.get('/ResultAnalysis', (req, res) => res.render('ResultAnalysis'))
var gpa = 0;
app.post('/generateResult', (req, res) => {
const regNo = (req.body.regNo);
const result = data[regNo];
var subs = 0;
let resultCard = ''
if (result !== undefined) {
const keys = Object.keys(result)
let internal = 0
let external = 0
let total = 0
let grade = ''
let gpa = 0
let subjectName = ''
let credit = 0
let subs = 0;
keys.forEach((key) => {
subjectName = result[key].sub
internal = result[key].int
external = result[key].ext
total = result[key].tot
grade = result[key].grade
if (result[key].grade === 'S')
gpa += 10;
else if (result[key].grade === 'A')
gpa += 9;
else if (result[key].grade === 'B')
gpa += 8;
else if (result[key].grade === 'C')
gpa += 7;
else if (result[key].grade === 'D')
gpa += 6;
else if (result[key].grade === 'E')
gpa += 5;
else
gpa += 0;
subs++;
credit = result[key].credit
resultCard += `
<div class="offset-md-3 col-md-6 col-sm-12 offset-sm-0">
<div class="card shadow-sm">
<div class="card-body">
<table id="subject-table" class="table table-striped">
<tbody>
<tr>
<td>
<div class="subject-name">
${subjectName}
<span
class="badge badge-secondary marks-badge int"
>INT: ${internal}</span>
<span
class="badge badge-secondary marks-badge ext"
>EXT: ${external}</span>
<span
class="badge badge-secondary marks-badge tot"
>TOT: ${total}</span>
</div>
</td>
<td>
<div class="subject-grade">
${grade}
</div>
</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>`
})
gpa = gpa / subs.toFixed(3);
res.render('ResultGenerator', {
resultCard, gpa
})
} else
res.render('ResultGenerator', {
resultCard: "Not found"
})
});
app.post("/analyzeResult",(req,res)=>{
var yourData = data[req.body.yourRegNo];
var friends = data[req.body.friendsRegNo];
var subs = Object.keys(yourData);
var yourSub = [];
var friendsSub = [];
for(var i=0;i<subs.length;i++){
yourSub.push(yourData[subs[i]]);
friendsSub.push(friends[subs[i]]);
}
var yourResult = [];
var friendsResult = [];
for(var i=0;i<yourSub.length;i++){
var your = yourSub[i];
var friends = friendsSub[i];
if(your!=null){
yourResult.push(yourSub[i].tot);
}
else{
yourResult.push(0);
}
if(friends!=null){
friendsResult.push(friendsSub[i].tot);
}
else{
friendsResult.push(0);
}
}
res.render('ResultAnalysis', {
yourResult, friendsResult
})
})
app.listen(3000, console.log("Server running on 3000"))<file_sep>/README.md
# Exam-Result-System | 6a1da4d54d48bfb2474001a382be8226b14fa6db | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | insignificantGuy/Exam-Result-Analysis-System | 049ed2b5dce6228cdac7a04339a3ca96cba86e3f | 7e521ca2a300e4e5f3d17e7b9a963674e84e9e01 | |
refs/heads/master | <file_sep>#!/usr/bin/env bash
#
# Bootstrap Script
#
# bin/bootstrap: Resolve all dependencies the application requires to run.
set -e
MISSING=0
RED='\033[0;31m'
#GREEN='\033[0;32m'
NC='\033[0m' # No Color
cd "$(dirname "$0")/.."
# Ensure Homebrew dependencies are installed if using a Brewfile
if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ]; then
brew bundle check >/dev/null 2>&1 || {
echo "==> Installing Homebrew dependencies..."
brew bundle
}
fi
# Ensure Elixir is installed
command -v elixir >/dev/null 2>&1 || {
echo -e "${RED}This app requires Elixir, but it was not found on your system.${NC}"
echo -e "${RED}Install it using the instructions at http://elixir-lang.org${NC}"
echo ""
MISSING=$((MISSING + 1))
}
# Ensure Node.js is installed if this is a phoenix application
if [ -f "package.json" ]; then
# Ensure Node.js is installed
command -v npm >/dev/null 2>&1 || {
echo -e "${RED}This app requires Node.js to build assets, but it was not found on your system.${NC}"
echo -e "${RED}Install it using the instructions at http://nodejs.org${NC}"
echo ""
MISSING=$((MISSING + 1))
}
fi
if [ $MISSING -gt 0 ]
then
exit 1
fi
if [ -f "mix.exs" ]; then
# Ensure Hex is installed
command -v mix hex >/dev/null 2>&1 || {
mix local.hex --force
}
# Ensure Rebar is installed
if [ ! -f "$HOME/.mix/rebar3" ]; then
mix local.rebar --force
fi
# Install Mix dependencies
mix deps.get || { echo -e "${RED}Mix dependencies could not be installed!${NC}"; exit 1; }
fi
if [ -f "package.json" ]; then
npm install || { echo -e "${RED}Node dependencies could not be installed!${NC}"; exit 1; }
fi
<file_sep>#!/usr/bin/env bash
#
# Build Script
#
# bin/build: Build the escript file for this application.
set -e
cd "$(dirname "$0")/.."
# Ensure everything in the app is up to date.
bin/update
echo "Buiding CLI application..."
cd apps/fallout_hacker_cli
# Run the main function
mix escript.build
cd ../..
<file_sep>#!/usr/bin/env bash
#
# Update Script
#
# bin/update: Update application to run for its current checkout.
set -e
cd "$(dirname "$0")/.."
bin/bootstrap
# Update the DB
#echo "==> Updating the DB..."
# Update DB.
#mix ecto.migrate
<file_sep># FalloutHacker
This application attempts to help you with the hacking mini-game that is featured
in the Fallout game series.
This mini-game essentially gives you a list of possible passwords and four tries
to guess the password. An incorrect guess will give likeness score which is the
number of characters that match. If all four tries get used, the terminal becomes
locked permanently. I highly recommend you quick save before hacking.
This application allows you to enter all the passwords separated by spaces.
It will show an error if the words are not all the same length. If they are the
same length, then it will give a "best" guess for you to try. You are then
prompted for the likeness score for your guess. This repeats until the password
is found.
## Example
```
$ bin/run
Enter possible passwords: foo bar baz
Your best guess is "BAR".
Enter likeness: 2
The password is "BAZ".
```
## The Algorithm
The algorithm for finding a password is pretty basic. Each word is given a
score by calculating the likeness with all words in the list. For example, `FOO`
would get scores of `[3, 0, 0]`. The final score for a word is the number of
unique likenesses. `FOO` has the unique likenesses of `[3, 0]` or a score of `2`.
The final scores for each word would be: `FOO=2, BAR=3, BAZ=3`. The "best" guess
returned is the result with the highest score.
Once a likeness is provided, the list of possible passwords gets filtered and the
password or a new "best" guess is returned.
I put "best" in quotes because there are better algorithms to use but this one is
simple and generally works. Upgrade your hacking perk and remember to quick save
and this should work just fine.
<file_sep>#!/usr/bin/env bash
#
# tmux Script
#
# bin/tmux: Open tmux with windows for a full dev environment.
set -e
MISSING=0
RED='\033[0;31m'
#GREEN='\033[0;32m'
NC='\033[0m' # No Color
TMUX_SESS=fallhack-dev
cd "$(dirname "$0")/.."
command -v tmux >/dev/null 2>&1 || {
echo -e "${RED}This app requires tmux, but it was not found on your system.${NC}"
MISSING=$((MISSING + 1))
}
command -v vim >/dev/null 2>&1 || {
echo -e "${RED}This app requires vim, but it was not found on your system.${NC}"
MISSING=$((MISSING + 1))
}
command -v mix >/dev/null 2>&1 || {
echo -e "${RED}This app requires mix, but it was not found on your system.${NC}"
MISSING=$((MISSING + 1))
}
if [ $MISSING -gt 0 ]
then
exit 1
fi
# Create tmux session
tmux new -s $TMUX_SESS -n $(pwd) -d
# Rename bash window
tmux rename-window -t "$TMUX_SESS:1" "bash"
# Open a vim window
tmux new-window -t "$TMUX_SESS:2" -n "vim" "vim $(pwd)"
# Open a test window
tmux new-window -t "$TMUX_SESS:3" -n "test" "mix test.watch"
# Select vim window
tmux select-window -t:2
# Attach to tmux session
tmux attach -t $TMUX_SESS
<file_sep>#!/usr/bin/env bash
#
# CiBuild Script
#
# bin/cibuild: Setup environment for CI to run tests.
set -e
cd "$(dirname "$0")/.."
echo "Tests started at…"
date "+%H:%M:%S"
export MIX_ENV="test"
# run tests
echo "Running tests …"
date "+%H:%M:%S"
# run tests.
bin/test
<file_sep>#!/usr/bin/env bash
#
# Setup Script
#
# bin/setup: Setup application for the first time after cloning, or set it
# back to the initial first unused state.
set -e
cd "$(dirname "$0")/.."
bin/bootstrap
# Setup the DB
#echo "==> Setting up the DB..."
# Reset DB to a fresh state.
#mix ecto.reset
echo "==> App is now ready to go!"
<file_sep>#!/usr/bin/env bash
#
# Test Watch Script
#
# bin/watch: Run tests continuously on file change.
set -e
cd "$(dirname "$0")/.."
[ -z "$DEBUG" ] || set -x
# Ensure everything in the app is up to date.
bin/update
echo "==> Running tests…"
mix test.watch
<file_sep>#!/usr/bin/env bash
#
# Run Script
#
# bin/run: Launch the application and any extra required processes locally.
set -e
RED='\033[0;31m'
#GREEN='\033[0;32m'
NC='\033[0m' # No Color
cd "$(dirname "$0")/.."
# Ensure everything in the app is up to date.
bin/update
echo "Running application..."
echo ""
# Run the main function
case "$1" in
CLI|cli|"")
mix run -e "FalloutHacker.CLI.main(true)"
;;
*)
echo -e "${RED}Unrecognized command: $1${NC}"
esac
<file_sep>#!/usr/bin/env bash
#
# Console Script
#
# bin/console: Launch a console for the application.
set -e
cd "$(dirname "$0")/.."
# Ensure everything in the app is up to date.
bin/update
# Run IEx console
iex -S mix
| e93a5302315a28389ad393abe6bfb6ea487b73c9 | [
"Markdown",
"Shell"
] | 10 | Shell | unclesnottie/fallout_hacker | b60365bc3accd42e4eead49ffc345ba82c08e7a3 | d0c773dcc40a69a04b78e73fd914f04a4666a48a | |
refs/heads/master | <repo_name>sss22213/ds4_driver<file_sep>/CMakeLists.txt
cmake_minimum_required(VERSION 2.8.3)
project(ds4_driver)
find_package(catkin REQUIRED COMPONENTS
message_generation
std_msgs
sensor_msgs
)
add_message_files(
DIRECTORY msg
FILES Feedback.msg Report.msg Status.msg Trackpad.msg
)
catkin_python_setup()
generate_messages(
DEPENDENCIES
std_msgs
sensor_msgs
)
catkin_package()
# catkin_install_python(PROGRAMS
# nodes/ds4_driver_node.py
# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
# )
<file_sep>/nodes/ds4_driver_node.py
#!/usr/bin/env python
from ds4_driver.logger import Logger
from ds4_driver.controller_ros import ControllerRos
from ds4drv.backends import BluetoothBackend, HidrawBackend
from ds4drv.exceptions import BackendError
import rospy
import signal
import sys
class SignalHandler(object):
def __init__(self, controller):
self.controller = controller
def __call__(self, signum, frame):
rospy.loginfo('Shutting down...')
self.controller.exit()
sys.exit(0)
def main():
rospy.init_node('ds4_driver_node')
device_addr = rospy.get_param('~device_addr', None)
backend_type = rospy.get_param('~backend', 'bluetooth')
controller = ControllerRos()
sigint_handler = SignalHandler(controller)
# Since backend.devices is a non-ROS iterator that doesn't consider
# rospy.is_shutdown(), the program freezes upon receiving SIGINT when
# using rospy.on_shutdown. Thus, we need to define our shutdown sequence
# using signal.signal as is done in the original ds4drv script.
signal.signal(signal.SIGINT, sigint_handler)
if backend_type == 'bluetooth':
backend = BluetoothBackend(Logger('backend'))
else:
backend = HidrawBackend(Logger('backend'))
try:
backend.setup()
except BackendError as err:
rospy.logerr(err)
rospy.signal_shutdown(str(err))
sys.exit(1)
# rospy.loginfo(device_addr)
if len(device_addr) < 8:
# scan exist device address
for device in backend.devices:
rospy.loginfo('Connected to {0}'.format(device.name))
if device_addr in (None, '', device.device_addr):
controller.setup_device(device)
if not controller.is_alive():
controller.start()
controller.loop.register_event('device-report', controller.cb_report)
else:
rospy.loginfo("...but it's not the one we're looking for :(")
else:
# connect exist device
device = None
while device==None:
device = backend.specify_addr_connect(device_addr)
controller.setup_device(device)
controller.start()
controller.loop.register_event('device-report', controller.cb_report)
if __name__ == '__main__':
main()
<file_sep>/nodes/demo.py
#!/usr/bin/env python
import rospy
from ds4_driver.msg import Feedback, Status
class Handler(object):
def __init__(self, status_topic='status', feedback_topic='set_feedback'):
self._min_interval = 0.1
self._last_pub_time = rospy.Time()
self._prev = Status()
self._led = {
'r': 0,
'g': 0,
'b': 0,
'flashing': False,
}
self._pub_feedback = rospy.Publisher(feedback_topic, Feedback, queue_size=1)
rospy.Subscriber(status_topic, Status, self.cb_status, queue_size=1)
def cb_status(self, msg):
"""
:type msg: Status
"""
now = rospy.Time.now()
if (now - self._last_pub_time).to_sec() < self._min_interval:
return
feedback = Feedback()
feedback.set_rumble = True
feedback.rumble_small = abs(msg.axis_left_y)
feedback.rumble_big = abs(msg.axis_right_y)
# Set LED using the touchpad
touch = msg.touch0
if touch.active and msg.button_circle:
feedback.set_led = True
self._led['r'] = touch.x
if touch.active and msg.button_triangle:
feedback.set_led = True
self._led['g'] = touch.x
if touch.active and msg.button_cross:
feedback.set_led = True
self._led['b'] = touch.x
feedback.led_r = self._led['r']
feedback.led_g = self._led['g']
feedback.led_b = self._led['b']
# Turn on/off flash with PS button
if not self._prev.button_ps and msg.button_ps:
feedback.set_led_flash = True
if self._led['flashing']:
feedback.led_flash_off = 0
else:
feedback.led_flash_on = 0.2
feedback.led_flash_off = 0.2
self._led['flashing'] = not self._led['flashing']
self._pub_feedback.publish(feedback)
self._prev = msg
self._last_pub_time = now
def main():
rospy.init_node('sample')
Handler()
rospy.spin()
if __name__ == '__main__':
main()
<file_sep>/nodes/ds4_twist_node.py
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist, TwistStamped
from ds4_driver.msg import Status
class StatusToTwist(object):
def __init__(self):
self._stamped = rospy.get_param('~stamped', False)
if self._stamped:
self._cls = TwistStamped
self._frame_id = rospy.get_param('~frame_id', 'base_link')
else:
self._cls = Twist
self._inputs = rospy.get_param('~inputs')
self._scales = rospy.get_param('~scales')
self._attrs = []
for attr in Status.__slots__:
if attr.startswith('axis_') or attr.startswith('button_'):
self._attrs.append(attr)
self._pub = rospy.Publisher('cmd_vel', self._cls, queue_size=1)
rospy.Subscriber('status', Status, self.cb_status, queue_size=1)
def cb_status(self, msg):
"""
:param msg:
:type msg: Status
:return:
"""
input_vals = {}
for attr in self._attrs:
input_vals[attr] = getattr(msg, attr)
to_pub = self._cls()
if self._stamped:
to_pub.header.stamp = rospy.Time.now()
to_pub.header.frame_id = self._frame_id
twist = to_pub.twist
else:
twist = to_pub
for vel_type in self._inputs:
vel_vec = getattr(twist, vel_type)
for k, expr in self._inputs[vel_type].items():
scale = self._scales[vel_type].get(k, 1.0)
val = eval(expr, {}, input_vals)
setattr(vel_vec, k, scale * val)
self._pub.publish(to_pub)
def main():
rospy.init_node('ds4_twist')
StatusToTwist()
rospy.spin()
if __name__ == '__main__':
main()
| c679ae7bb0fa64daf1bd07d91c4e705545a1607a | [
"Python",
"CMake"
] | 4 | CMake | sss22213/ds4_driver | f2b61b9acd08221b375d299ec5edec91e6312d53 | e2318c5463f883e6322564954909b515f61f54bc | |
refs/heads/master | <file_sep>import { API_BASE_URL } from './Constants';
import { authHeader } from '../Utilities';
export const employeeService = {
getAllEmployees
};
function getAllEmployees() {
const requestOptions = {
method: 'GET',
headers: authHeader()
};
return fetch(`${API_BASE_URL}/employees`, requestOptions).then(handleResponse);
}
function handleResponse(response) {
return response.text().then(text => {
const data = text && JSON.parse(text);
if (!response.ok) {
const error = (data && data.message) || response.statusText;
return Promise.reject(error);
}
return data;
});
}<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
export const Header = () => {
return (
<div className="row pt-4 pb-2 d-flex align-items-center header">
<h1 className="col-6"><Link to="/">RS Solutions</Link></h1>
<div className="col-6 text-right">
<Link to="/login" className="logout">Logout</Link>
</div>
</div>
)
}<file_sep>import { employeeConstants } from '../Constants';
export function employees(state = {}, action) {
switch (action.type) {
case employeeConstants.GET_ALL_EMPLOYEES_REQUEST:
return { loading: true };
case employeeConstants.GET_ALL_EMPLOYEES_SUCCESS:
return {
list: action.employees
};
case employeeConstants.GET_ALL_EMPLOYEES_FAILURE:
return {
error: action.error
};
default:
return state
}
}<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { Header } from '../../Components';
import { employeeActions } from '../../State/Actions';
class DashboardPage extends React.Component {
componentDidMount() {
this.props.getAllEmployees();
}
render() {
const { employee, employees } = this.props;
return (
<>
<Header/>
<div className="mt-3">
<h3>Employee List</h3>
{employees.loading && <em>Loading employees...</em>}
{employees.error && <span className="text-danger">ERROR: {employees.error}</span>}
{employees.list &&
<div className="table-responsive mt-3">
<table className="table w-100">
<thead>
<tr>
<th>Full Name</th>
<th>Age</th>
<th>Gender</th>
<th>Email Id</th>
<th>Phone Number</th>
</tr>
</thead>
<tbody>
{employees.list.map((employee, index) =>
<tr key={employee.id}>
<td>{employee.name}</td>
<td>{employee.age}</td>
<td>{employee.gender}</td>
<td>{employee.email}</td>
<td>{employee.phoneNo}</td>
</tr>
)}
</tbody>
</table>
</div>
}
</div>
</>
);
}
}
function mapState(state) {
const { employees } = state;
return { employees };
}
const actionCreators = {
getAllEmployees: employeeActions.getAllEmployees
}
const connectedDashboardPage = connect(mapState, actionCreators)(DashboardPage);
export { connectedDashboardPage as DashboardPage };<file_sep>import { combineReducers } from 'redux';
import { registration } from './RegistrationReducer';
import { login } from './LoginReducer';
import { employees } from './EmployeeReducer';
const rootReducer = combineReducers({
registration,
login,
employees
});
export default rootReducer;<file_sep>import React from 'react';
import { BrowserRouter as Router, Route, Switch, Redirect } from 'react-router-dom';
import { connect } from 'react-redux';
import './App.css';
import { PrivateRoute } from './Components';
import { RegisterPage } from './Pages/Register';
import { LoginPage } from './Pages/Login';
import { DashboardPage } from './Pages/Dashboard';
import { history } from './Utilities';
class App extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className="container-fluid">
<div className="container">
<div className="col-sm-12 col-sm-offset-2">
<Router history={history}>
<Switch>
<PrivateRoute exact path="/" component={DashboardPage} />
<Route path="/login" component={LoginPage} />
<Route path="/register" component={RegisterPage} />
<Redirect from="*" to="/" />
</Switch>
</Router>
</div>
</div>
</div>
);
}
}
const connectedApp = connect()(App);
export { connectedApp as App };
<file_sep>export const employeeConstants = {
GET_ALL_EMPLOYEES_REQUEST: 'GET_ALL_EMPLOYEES',
GET_ALL_EMPLOYEES_SUCCESS: 'GET_ALL_EMPLOYEES_SUCCESS',
GET_ALL_EMPLOYEES_FAILURE: 'GET_ALL_EMPLOYEES_FAILURE',
};
| cc98b0b40d5030c135cef1076c51140a3525b5ff | [
"JavaScript"
] | 7 | JavaScript | RichaSharma-UI/rs-solutions | 3793571cb609957a294e837dbc8e2a677c59fcc1 | dcf292cd86ee7c5980e9d9b9c69f295aab908e25 | |
refs/heads/master | <file_sep><?php
if(!isset($_POST['formulario']))
{
$destinatario = "<EMAIL>";
$asunto = $_POST['contacto'];
$carta = "De: ".$_POST['nombre']."<br>";
$carta .= "Correo: ".$_POST['correo']."<br>";
$carta .= "Teléfono: ".$_POST['telefono']."<br>";
$carta .= "Mensaje: ".$_POST['situacion'];
mail($destinatario, $asunto, $carta);
header('location:index.php?enviado=1');
}else{
header('location:index.php?enviado=0');
}
?><file_sep> <div class="sidebar-widget-area">
<h5 class="title">INFORMACIÓN</h5>
<div class="widget-content ">
<!-- Single Blog Post -->
<?php
$apiUrl = 'https://www.mindicador.cl/api';
//Es necesario tener habilitada la directiva allow_url_fopen para usar file_get_contents
if ( ini_get('allow_url_fopen') ) {
$json = file_get_contents($apiUrl);
} else {
//De otra forma utilizamos cURL
$curl = curl_init($apiUrl);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
$json = curl_exec($curl);
curl_close($curl);
}
$dailyIndicators = json_decode($json);
?>
<div class="single-blog-post post-style-2 d-flex justify-content-center widget-post">
<!-- Post Content -->
<div class="post-content">
<a href="#" class="headline">
<h5 class="mb-0"> UF:<?php echo $dailyIndicators->uf->valor; ?> </h5>
</a>
</div>
</div>
<!-- Single Blog Post -->
<div class="single-blog-post post-style-2 d-flex justify-content-center widget-post">
<!-- Post Content -->
<div class="post-content">
<a href="#" class="headline">
<h5 class="mb-0"><?php echo 'Dólar: ' . $dailyIndicators->dolar->valor; ?></h5>
</a>
</div>
</div>
<!-- Single Blog Post -->
<div class="single-blog-post post-style-2 d-flex justify-content-center widget-post">
<!-- Post Content -->
<div class="post-content">
<a href="#" class="headline">
<h5 class="mb-0"><?php echo 'Euro: $' . $dailyIndicators->euro->valor; ?></h5>
</a>
</div>
</div>
<!-- Single Blog Post -->
<div class="single-blog-post post-style-2 d-flex justify-content-center widget-post">
<!-- Post Content -->
<div class="post-content">
<a href="#" class="headline">
<h5 class="mb-0"><?php echo 'IPC:' . $dailyIndicators->ipc->valor; ?></h5>
</a>
</div>
</div>
<!-- Single Blog Post -->
<div class="single-blog-post post-style-2 d-flex justify-content-center widget-post">
<!-- Post Content -->
<div class="post-content">
<a href="#" class="headline">
<h5 class="mb-0"><?php echo 'UTM:' . $dailyIndicators->utm->valor; ?></h5>
</a>
</div>
</div>
<div class="single-blog-post post-style-2 d-flex justify-content-center widget-post">
<!-- Post Content -->
<div class="post-content">
<a href="#" class="headline">
<h5 class="mb-0"><?php echo 'IVP:' . $dailyIndicators->ivp->valor; ?></h5>
</a>
</div>
</div>
<div class="single-blog-post post-style-2 d-flex justify-content-center widget-post">
<!-- Post Content -->
<div class="post-content">
<a href="#" class="headline">
<h5 class="mb-0"><?php echo 'Imacec:' . $dailyIndicators->imacec->valor; ?></h5>
</a>
</div>
</div>
</div>
</div>
<file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="description" content="">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>Abogados B&M</title>
<link rel="icon" href="img/core-img/favicon.ico">
<link rel="stylesheet" href="style.css">
</head>
<body>
<!-- Preloader Start -->
<div id="preloader">
<div class="preload-content">
<div id="world-load"></div>
</div>
</div>
<?php include('barra.php') ?>
<div class="hero-area height-400 bg-img background-overlay" style="background-image: url(img/blog-img/bg5.jpg);"></div>
<div class="regular-page-wrap section-padding-100">
<div class="container">
<div class="row justify-content-center">
<div class="col-12 col-md-10 col-lg-8">
<div class="post-meta">
<h3>Sobre Nosotros</h3>
</div>
<div class="page-content">
<h6>
Somos Abogados especialistas, titulados de la Universidad de Valparaíso. Contamos con abogados con estudios de Post Grado, expertos en Derecho Civil. Junto a nosotros encontrará un equipo de trabajo completo, comprometido, brindamos una asesoría integral desde su inicio hasta el final, personalizada y pronta.
</h6>
<div class="mt-30">
<img src="img/core-img/logo.png">
</div>
</div>
</div>
</div>
</div>
</div>
<!-- ***** Footer Area Start ***** -->
<?php include('footer.php') ?>
<!-- ***** Footer Area End ***** -->
<!-- jQuery (Necessary for All JavaScript Plugins) -->
<script src="js/jquery/jquery-2.2.4.min.js"></script>
<!-- Popper js -->
<script src="js/popper.min.js"></script>
<!-- Bootstrap js -->
<script src="js/bootstrap.min.js"></script>
<!-- Plugins js -->
<script src="js/plugins.js"></script>
<!-- Active js -->
<script src="js/active.js"></script>
</body>
</html><file_sep> <div class="hero-area height-600 bg-img background-overlay" style="background-image: url(img/blog-img/bg1.jpg);">
<div class="container h-100">
<div class="row h-100 align-items-center justify-content-center">
<div class="col-12 col-md-8 col-lg-6">
<div class="single-blog-title text-center">
<!-- Catagory -->
<img src="img/core-img/logo.png">
<h3 class="mt-30">Visítenos en Viña del Mar.</h3>
<h3>!Llame Ahora!</h3>
<h3 class="mt-15">+56955278325</h3>
</div>
</div>
<div class="col-12 col-md-8 col-lg-6 formulario">
<div class="single-blog-title text-center">
<form method="post" action="/abogadosplus/correo.php" name="formulario">
<div class="form-group">
<select class="form-control" id="sel1" name="contacto">
<option value="contactar">Contactar como:</option>
<option value="Reunion con">Agendar Reunión</option>
<option value="Llamar a">Llámenme</option>
</select>
</div>
<div class="form-group">
<input type="text" name="nombre" class="form-control" placeholder="Nombre" required>
</div>
<div class="form-group">
<input type="email" name="correo" class="form-control" placeholder="Correo Electrónico" required>
</div>
<div class="form-group">
<input type="number" name="telefono" class="form-control" placeholder="Teléfono/Celular" required>
</div>
<div class="form-group">
<textarea type="text" name="situacion" class="form-control" placeholder="Situación" rows="3" required></textarea>
</div>
<button type="submit" class="btn btn-warning"><b>Enviar Consulta</b></button>
</form>
</div>
<?php
if(isset($_GET['enviado'])==1)
{
echo '<div class="container aviso mt-30" >';
echo'<div class="row text-center">';
echo'<div class="col-12"><b>ENVIADO CON ÉXITO</b></div>';
echo'</div>';
echo'</div>';
}else{
}
?>
</div>
</div>
</div>
<file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="description" content="">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- The above 4 meta tags *must* come first in the head; any other head content must come *after* these tags -->
<!-- Title -->
<title>Abogados B&M</title>
<!-- Favicon -->
<link rel="icon" href="img/core-img/favicon.ico">
<!-- Style CSS -->
<link rel="stylesheet" href="style.css">
</head>
<body>
<!-- Preloader Start -->
<div id="preloader">
<div class="preload-content">
<div id="world-load"></div>
</div>
</div>
<!-- Preloader End -->
<?php include('barra.php') ?>
<?php include('formulario-general.php') ?>
<div class="hero-post-area">
<div class="container">
<div class="row">
<div class="col-12">
<div class="hero-post-slide">
<!-- Single Slide -->
<div class="single-slide d-flex align-items-center">
<div class="post-number">
<p>I</p>
</div>
<div class="post-title">
<a href="#">I N I C I O</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="main-content-wrapper section-padding-100">
<div class="container">
<div class="row justify-content-center">
<?php include('servicios.php'); ?>
<div class="col-12 col-md-8 col-lg-4">
<div class="post-sidebar-area wow fadeInUpBig" data-wow-delay="0.2s">
<?php include('informacion.php') ?>
<div class="sidebar-widget-area">
<h5 class="title">CONTÁCTATE CON NOSOTROS</h5>
<div class="widget-content">
<div class="social-area d-flex justify-content-between">
<a href="#"><i class="fa fa-facebook"></i></a>
<a href="#"><i class="fa fa-instagram"></i></a>
<a href="#"><i class="fa fa-google"></i></a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- ***** Footer Area Start ***** -->
<?php include('footer.php') ?>
<!-- ***** Footer Area End ***** -->
<!-- jQuery (Necessary for All JavaScript Plugins) -->
<script src="js/jquery/jquery-2.2.4.min.js"></script>
<!-- Popper js -->
<script src="js/popper.min.js"></script>
<!-- Bootstrap js -->
<script src="js/bootstrap.min.js"></script>
<!-- Plugins js -->
<script src="js/plugins.js"></script>
<!-- Active js -->
<script src="js/active.js"></script>
</body>
</html><file_sep><?php header('location:index.php') ?>
<file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="description" content="">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- The above 4 meta tags *must* come first in the head; any other head content must come *after* these tags -->
<!-- Title -->
<title>Abogados B&M</title>
<!-- Favicon -->
<link rel="icon" href="img/core-img/favicon.ico">
<!-- Style CSS -->
<link rel="stylesheet" href="style.css">
</head>
<body>
<!-- Preloader Start -->
<div id="preloader">
<div class="preload-content">
<div id="world-load"></div>
</div>
</div>
<!-- Preloader End -->
<!-- ***** Header Area Start ***** -->
<?php include('barra.php') ?>
<!-- ***** Header Area End ***** -->
<!-- ********** Hero Area Start ********** -->
<?php include('formulario-general.php') ?>
<div class="hero-post-area">
<div class="container">
<div class="row">
<div class="col-12">
<div class="hero-post-slide">
<!-- Single Slide -->
<div class="single-slide d-flex align-items-center">
<div class="post-number">
<p>D</p>
</div>
<div class="post-title">
<a href="#">D E R E C H O - F A M I L I A R</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- ********** Hero Area End ********** -->
<div class="main-content-wrapper section-padding-100">
<div class="container">
<div class="row justify-content-center">
<!-- ============= Post Content Area ============= -->
<div class="col-12 col-lg-8">
<div class="single-blog-content mb-100">
<!-- Post Meta -->
<div class="post-meta">
<h3>Derecho de Familia</h3>
</div>
<!-- Post Content -->
<div class="post-content">
<blockquote class="mb-30">
<h6>El Derecho de familia es considerado como el conjunto de normas e instituciones jurídicas que intentan regular las relaciones personales y patrimoniales de los integrantes de una familia, entre sí y respecto a terceros. Tales relaciones se originan a partir del matrimonio y del parentesco.
<ul class="mt-15 sub">
<i class="fa fa-play"></i> Divorcios<br>
<i class="fa fa-play"></i> Alimentos<br>
<i class="fa fa-play"></i> Cuidado Personal <br>
<i class="fa fa-play"></i> Medidas de Protección a menores<br>
<i class="fa fa-play"></i> Salida del país
</ul>
</h6>
</blockquote>
<!-- Post Tags -->
<ul class="post-tags">
<li><a href="contact.php">Contactanos</a></li>
</ul>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col-12 col-md-6 col-lg-4">
<!-- Single Blog Post -->
<div class="single-blog-post">
<!-- Post Thumbnail -->
<img src="img/blog-img/bg13.jpg" alt="">
<!-- Catagory -->
</div>
</div>
<div class="col-12 col-md-6 col-lg-4">
<!-- Single Blog Post -->
<div class="single-blog-post">
<!-- Post Thumbnail -->
<img src="img/blog-img/bg14.jpg" alt="">
<!-- Catagory -->
</div>
</div>
<div class="col-12 col-md-6 col-lg-4">
<!-- Single Blog Post -->
<div class="single-blog-post">
<!-- Post Thumbnail -->
<img src="img/blog-img/bg15.jpg" alt="">
<!-- Catagory -->
</div>
</div>
</div>
</div>
</div>
<!-- ***** Footer Area Start ***** -->
<?php include('footer.php') ?>
<!-- ***** Footer Area End ***** -->
<!-- jQuery (Necessary for All JavaScript Plugins) -->
<script src="js/jquery/jquery-2.2.4.min.js"></script>
<!-- Popper js -->
<script src="js/popper.min.js"></script>
<!-- Bootstrap js -->
<script src="js/bootstrap.min.js"></script>
<!-- Plugins js -->
<script src="js/plugins.js"></script>
<!-- Active js -->
<script src="js/active.js"></script>
</body>
</html><file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="description" content="">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- The above 4 meta tags *must* come first in the head; any other head content must come *after* these tags -->
<!-- Title -->
<title>Abogados B&M</title>
<!-- Favicon -->
<link rel="icon" href="img/core-img/favicon.ico">
<!-- Style CSS -->
<link rel="stylesheet" href="style.css">
</head>
<body>
<!-- Preloader Start -->
<div id="preloader">
<div class="preload-content">
<div id="world-load"></div>
</div>
</div>
<!-- Preloader End -->
<!-- ***** Header Area Start ***** -->
<?php include('barra.php') ?>
<!-- ***** Header Area End ***** -->
<!-- ********** Hero Area Start ********** -->
<div class="hero-area height-600 bg-img background-overlay" style="background-image: url(img/blog-img/bg4.jpg);">
<div class="container h-100">
<div class="row h-100 align-items-center justify-content-center">
<div class="col-12 col-md-8 col-lg-6 formulario">
<div class="single-blog-title text-center">
<!-- Catagory -->
<form method="post" action="/abogadosplus/correo.php" >
<div class="form-group">
<select class="form-control" id="sel1" name="contacto">
<option value="contactar">Contactar como:</option>
<option value="Reunion con">Agendar Reunión</option>
<option value="Llamar a">Llámenme</option>
</select>
</div>
<div class="form-group">
<input type="text" name="nombre" class="form-control" placeholder="Nombre" required>
</div>
<div class="form-group">
<input type="email" name="correo" class="form-control" placeholder="Correo Electrónico" required>
</div>
<div class="form-group">
<input type="number" name="telefono" class="form-control" placeholder="Teléfono" required>
</div>
<div class="form-group">
<textarea type="text" name="situacion" class="form-control" placeholder="Situación" rows="3" required></textarea>
</div>
<button type="submit" class="btn btn-warning"><b>Enviar Consulta</b></button>
</form>
</div>
</div>
</div>
</div>
</div>
<!-- Google Maps: If you want to google map, just uncomment below codes -->
<!--
<div class="map-area">
<div id="googleMap" class="googleMap"></div>
</div>
-->
<!-- ***** Footer Area Start ***** -->
<?php include('footer.php') ?>
<!-- ***** Footer Area End ***** -->
<!-- jQuery (Necessary for All JavaScript Plugins) -->
<script src="js/jquery/jquery-2.2.4.min.js"></script>
<!-- Popper js -->
<script src="js/popper.min.js"></script>
<!-- Bootstrap js -->
<script src="js/bootstrap.min.js"></script>
<!-- Plugins js -->
<script src="js/plugins.js"></script>
<!-- Active js -->
<script src="js/active.js"></script>
<!-- Google Maps: If you want to google map, just uncomment below codes -->
<!--
<script src="https://maps.googleapis.com/maps/api/js?key=<KEY>"></script>
<script src="js/map-active.js"></script>
-->
</body>
</html> | 1db2a00223400aff3c787dd54207d9b07065c92b | [
"Markdown",
"PHP"
] | 8 | PHP | krizthyan/Abogados_PageLanding | 881dad6fccc2f6a5dcd0c39df78894b18016985f | 74c979e89db81db558ff3efa4ad5c783dec1ab83 | |
refs/heads/master | <file_sep>package firstWeek;
import java.io.FileInputStream;
import java.security.KeyStore;
import java.security.KeyStore.PrivateKeyEntry;
import java.security.KeyStore.TrustedCertificateEntry;
import java.security.KeyStoreException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.cert.Certificate;
import java.util.Base64;
import java.util.Base64.Encoder;
public class KeyStoreTest {
/**
* 下面为生成keyStore命令 需要在jdk的bin目录下执行
* keytool -genkey -alias test -keyalg RSA -validity 20000 -keystore test.keystore
* @throws KeyStoreException
* @throws Exception
*/
public static void testTrustKeyStore() throws KeyStoreException, Exception {
KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType());
FileInputStream fis = new FileInputStream("D:\\test.keystore");
String storePassword = "<PASSWORD>";
"".getBytes();
ks.load(fis, storePassword.toCharArray());
fis.close();
String alais = "test";
PrivateKey prk = (PrivateKey) ks.getKey(alais, storePassword.toCharArray());
Certificate c = ks.getCertificate(alais);
PublicKey pk = c.getPublicKey();
encode(pk.getEncoded());
//内部类接口
TrustedCertificateEntry tf = new KeyStore.TrustedCertificateEntry(c);
Certificate c1 = tf.getTrustedCertificate();
encode(c1.getPublicKey().getEncoded());
PrivateKeyEntry pke = new KeyStore.PrivateKeyEntry(prk, ks.getCertificateChain(alais));
encode(prk.getEncoded());
}
public static void encode(byte[] data) {
Encoder encoder = Base64.getEncoder();
byte[] b = encoder.encode(data);
System.out.println(new String(b));
}
public static void main(String[] args) {
try {
testTrustKeyStore();
} catch (KeyStoreException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
| 86665b2fbfeaf79874c18ac1c64823d5a7e1fe61 | [
"Java"
] | 1 | Java | ps271828/Cipher | c633c5de2ad51631dd69e331f6627325b4c9ef7b | 311ef96ec25d3b9cfdd14cbeb528427dabd53907 | |
refs/heads/master | <repo_name>godblessforhimself/ThuSearch<file_sep>/readme.md
# 校园搜索引擎
## 资源抓取
### Heritrix3.2.0配置:
#### Seed:使用尽可能多的种子
```
<prop key="seeds.textSource.value">
# URLS HERE
http://news.tsinghua.edu.cn
http://www.cs.tsinghua.edu.cn/
http://graduate.tsinghua.edu.cn
http://member.artmuseum.tsinghua.edu.cn
http://search.tsinghua.edu.cn
http://ticket.artmuseum.tsinghua.edu.cn
http://tv.tsinghua.edu.cn
http://www.artmuseum.tsinghua.edu.cn
http://www.tsinghua.edu.cn
http://www.tuef.tsinghua.edu.cn
http://yz.tsinghua.edu.cn
http://zhaopin.rsc.tsinghua.edu.cn
http://student.tsinghua.edu.cn
http://www.dangjian.tsinghua.edu.cn
http://academic.tsinghua.edu.cn
http://myhome.tsinghua.edu.cn
http://career.tsinghua.edu.cn
http://pt.tsinghua.edu.cn
http://kyybgxx.cic.tsinghua.edu.cn/kybg/index.jsp
http://guofang.tsinghua.edu.cn
http://info.itc.tsinghua.edu.cn
http://thdag.cic.tsinghua.edu.cn
http://xsg.tsinghua.edu.cn
http://info.tsinghua.edu.cn
</prop>
```
#### SurtsSource
```
<property name="surtsSource">
<bean class="org.archive.spring.ConfigString">
<property name="value">
<value>
# example.com
# http://www.example.edu/path1/
+http://(cn,edu,tsinghua,
</value>
</property>
</bean>
</property>
</bean>
```
#### MatchesListRegexDecideRule:过滤掉非pdf,doc,html和图书馆的链接
```
<bean class="org.archive.modules.deciderules.MatchesListRegexDecideRule">
<property name="decision" value="REJECT"/>
<property name="listLogicalOr" value="true" />
<property name="regexList">
<list>
<value>^(?:(?!\.html)(?!\.htm)(?!\.pdf)(?!\.PDF)(?!\.doc)(?!\.docx)(?!\.DOC)(?!\.DOCX)(?!\.htmlx)(?!\.HTML)(?!\.HTM)(?!\.HTMLX).)+</value>
<value>[\S]*lib.tsinghua.edu.cn[\S]*</value>
<value>[\S]*166.111.120.[\S]*</value>
</list>
</property>
</bean>
```
#### 将默认的warcWriter改为MirorWritor
```
<bean id="warcWriter" class="org.archive.modules.writer.MirrorWriterProcessor"> </bean>
```
#### 资源抓取遇到的困难
- 按照作业说明提供的1.x版本heritrix的配置抓取不到文档。
- heritrix 3.x 配置只能通过修改crawl-beans.cxml文档。在建立heritrix 3.x源码版本,建立自己的抓取项目时遇到了一些依赖和maven配置的问题,暂未解决。
- heritrix 3.x 抓取文档需要2-3天时间。
### 参考 https://github.com/ChenGuangbinTHU/THUSearchEngine/blob/master/WebPreprocess/crawler-beans.cxml
## 资源预处理
### 总体流程:抓取的资源在mirror目录中。遍历mirror目录,根据文件后缀(doc,pdf,html) ,调用不同的解析方法。解析出有效文本,进行格式处理后保存到mysql中。
通过访问处理后的文本,生成三个xml文件,用于indexing步骤。
#### fileParser.java 进行目录遍历,解析文本,存入mysql的java代码
- main:Files.walkFileTree进行遍历目录,对于每个文件,调用fileParser p.parse(filename)
- fileParser.parse(String path): 根据path的后缀,调用runPDFParser(path)\runDocParser(path)\runHtmlParser(path)或直接返回;
- fileParser.runPDFParser(String path): 使用apache PDFBox解析PDF格式文件,将该pdf对应的url,content,type存入mysql ;
- fileParser.runDocParser(String path): 根据doc文件的版本,分别调用POI库中不同的解析方法获得word文档的文本内容。将word文档的url,content,type存入mysql。
- fileParser.runHtmlParser(String path): 使用Jsoup提取html中的title,h1,h2,a href内容。打开文档前,用codec(path)判断编码。提取前,用正则表达式过滤掉html中的注释内容。content:getChinese获取html中的中文部分。将url,title,h1,h2,content,outlinks,type存入mysql。title,h1,h2作了长度限制。
#### MySql.java 提供了mysql的接口,main负责读取数据库,pagerank的图和映射文件的生成,pagerank的计算,indexing需要的xml文件的生成。
- MySql(): 初始化时与SchoolSearch数据库建立连接,然后初始化statement。
- createTables(): 创建pdf,doc,html三个表
- DropAllTables(): 删除pdf,doc,html三个表
- update(String sql):用语句sql更新数据库。
- buildMap_Graph(): 遍历mysql schoolsearch.html,根据每项的url,outlinks建立mapping.txt和graph.txt文件。
- calculate_pagerank(): 根据mapping.txt graph.txt,进行pagerank计算,写入pagerank.txt文件。
- write2xml(): 读取schoolsearch html,pdf,doc三个表,分别生成三个xml文件
#### 资源预处理遇到的问题
- Jsoup存在bug,解析html时:解析了注释的代码;返回正文时,把正文和javascript代码一起返回了。
- 所以预处理时,先过滤了注释的内容,content不直接通过Jsoup获取,而是手动解析中文部分。
- 编码问题:一部分网页的编码和<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />中设置的不同。如academic.tsinghua.edu.cn/index.html。实际上编码是gbk
通过使用fileParser.codec(String filename)->fileParser.isUtf(String filename)先判断编码,再用文件流指定编码读入文件。
- 抓取的一些pdf\doc文档损坏。暂时不记录这些损坏文件的数据。未解决。
- 抓取的一些doc文件缺失一些css,无法通过POI解析。暂未解决。
- 遍历一次mirror/下的文件,约3小时。可以并行提速。
<file_sep>/search_engine/fileParser/MySql.java
import org.apache.commons.lang3.StringEscapeUtils;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.UnknownHostException;
import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MySql {
static final String JDBC_DRIVER = "com.mysql.jdbc.Driver";
static final String DB_URL = "jdbc:mysql://localhost:3306/SchoolSearch?characterEncoding=UTF-8&useSSL=false";
static final String USER = "root";
static final String PASS = "";
Connection conn = null;
Statement stmt = null;
public PreparedStatement p_insertpdf = null;
public PreparedStatement p_insertdoc = null;
public PreparedStatement p_insert8 = null;
public PreparedStatement p_selectpdf = null;
public PreparedStatement p_selectdoc = null;
public PreparedStatement p_selecthtml = null;
public static String[] tableName = {"pdf", "doc", "html"};
MySql()
{
getConnection();
getStatement();
System.out.println("Database initalize finish!");
}
public void getConnection() {
try {
Class.forName("com.mysql.jdbc.Driver");
System.out.println("连接数据库...");
conn = DriverManager.getConnection(DB_URL, USER, PASS);
} catch(ClassNotFoundException e){
System.out.println("加载数据库驱动失败:\n"+e.toString());
return;
}
catch (SQLException e) {
System.out.println("获取连接失败" + e.toString());
return;
}
}
public void getStatement()
{
try {
stmt = conn.createStatement();
p_insertpdf = conn.prepareStatement("INSERT INTO pdf VALUES (?,?,?);");
p_insertdoc = conn.prepareStatement("INSERT INTO doc VALUES (?,?,?);");
p_insert8 = conn.prepareStatement("INSERT INTO html VALUES (?,?,?,?,?,?,?,?);");
p_selectpdf = conn.prepareStatement("SELECT * FROM pdf;");
p_selecthtml = conn.prepareStatement("SELECT * FROM html;",ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
p_selecthtml.setFetchDirection(ResultSet.FETCH_REVERSE);
p_selecthtml.setFetchSize(Integer.MIN_VALUE);
p_selectdoc = conn.prepareStatement("SELECT * FROM doc;");
}catch (SQLException e) {
System.out.println("getStatement failed " + e.toString());
}
}
public void createTables()
{
try{
String sql = "";
stmt.executeUpdate("CREATE DATABASE IF NOT EXISTS SchoolSearch character set utf8mb4;");
sql = "CREATE TABLE IF NOT EXISTS pdf " +
"(url VARCHAR(255) not null primary key, " +
"type VARCHAR(4), " +
"content MEDIUMTEXT) DEFAULT CHARSET=utf8mb4";
stmt.executeUpdate(sql);
sql = "CREATE TABLE IF NOT EXISTS doc " +
"(url VARCHAR(255) not null primary key, " +
"type VARCHAR(4), " +
"content MEDIUMTEXT) DEFAULT CHARSET=utf8mb4";
stmt.executeUpdate(sql);
sql = "CREATE TABLE IF NOT EXISTS html " +
"(url VARCHAR(255) not null primary key, " +
"type VARCHAR(4), " +
"title VARCHAR(255), " +
"h1 VARCHAR(255), " +
"h2 VARCHAR(255), " +
"content MEDIUMTEXT, " +
"outlinks TEXT, " +
"pagerank FLOAT) DEFAULT CHARSET=utf8mb4";
stmt.executeUpdate(sql);
}
catch (SQLException se)
{
se.printStackTrace();
}
}
public void DropAllTables()
{
try {
String sql = "DROP TABLE pdf;";
stmt.executeUpdate(sql);
sql = "DROP TABLE html;";
stmt.executeUpdate(sql);
sql = "DROP TABLE doc;";
stmt.executeUpdate(sql);
} catch (SQLException e) {
e.printStackTrace();
}
}
public ResultSet read(String sqlSelect){
ResultSet rst = null;
try{
rst = stmt.executeQuery(sqlSelect);
System.out.println("查询成功");
}
catch(SQLException e){
System.out.println(e.toString());
}
return rst;
}
public int update(String sqlUpdate){
int nRecord = 0;
try{
nRecord = stmt.executeUpdate(sqlUpdate);
}
catch(SQLException e){
System.out.println(e.toString());
}
return nRecord;
}
public Map<String,Double> getPR()
{
Map<String,Double> m = new HashMap<String,Double>();
long t0 = System.currentTimeMillis();
try{
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream("pagerank.txt"),"utf-8"));
String line="";
while ((line = in.readLine()) != null)
{
//System.out.println(line);
//Thread.sleep(10000);
int p1 = line.indexOf(" ");
int p2 = line.indexOf(" ", p1 + 1);
String url = line.substring(p2 + 1);
Double pagerank = Double.valueOf(line.substring(p1 + 1, p2));
//System.out.println("url="+url + " pagerank = " +pagerank);
m.put(url, pagerank);
}
in.close();
System.out.println("get pagerank: time " + (System.currentTimeMillis() - t0)/1000 + " seconds");
}catch (Exception e){
e.printStackTrace();
}
return m;
}
public void closeAll(){
try{
stmt.close();
conn.close();
}
catch(SQLException e){
System.out.println(e.toString());
return;
}
}
public static String processString(String s)
{
s = s.replace( "&", "&" );
s = s.replace( "<", "<" );
s = s.replace( ">", ">" );
s = s.replace( "\"", """ );
s = s.replace( "\'", "'" );
s = s.replaceAll("[\\000]+", "");
s = s.replaceAll("\\s{2,}","\n");
StringBuffer out = new StringBuffer(); // Used to hold the output.
char current; // Used to reference the current character.
if (s == null || ("".equals(s)))
return ""; // vacancy test.
for (int i = 0; i < s.length(); i++)
{
current = s.charAt(i); // NOTE: No IndexOutOfBoundsException caught here; it should not happen.
if ((current == 0x9) ||
(current == 0xA) ||
(current == 0xD) ||
((current >= 0x20) && (current <= 0xD7FF)) ||
((current >= 0xE000) && (current <= 0xFFFD)) ||
((current >= 0x10000) && (current <= 0x10FFFF)))
out.append(current);
}
return out.toString();
}
public BufferedWriter write2file(String filename)
{
File file = new File(filename);
try{
if (!file.exists())
file.createNewFile();
OutputStream fw=new FileOutputStream(file);
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fw,"utf-8"));
bw.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n");
bw.write("<pics>\n" +
"\t<category name=\""+filename.replace(".xml", "")+"\">\n");
return bw;
}catch (Exception e)
{
e.printStackTrace();
}
return null;
}
public void end(BufferedWriter bw)
{
try {
bw.write("</category>\n" +
"</pics>");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
bw.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void write2xml()
{
String f1 = "SchoolSearch_pdf.xml";
String f2 = "SchoolSearch_doc.xml";
String f3 = "SchoolSearch_html.xml";
try{
long t0 = System.currentTimeMillis();
int cnt = 0;
ResultSet rst = p_selectpdf.executeQuery();
BufferedWriter bw = write2file(f1);
while (rst.next())
{
String url = processString(rst.getString("url"));
String content = processString(rst.getString("content"));
bw.write("<pic url=\""+url+"\" type=\"pdf\" content=\""+content+ "\" />\n");
cnt += 1;
}
end(bw);
bw = write2file(f2);
rst = p_selectdoc.executeQuery();
while (rst.next())
{
String url = processString(rst.getString("url"));
String content = processString(rst.getString("content"));
bw.write("<pic url=\"" + url + "\" type=\"doc\" content=\"" + content + "\" />\n");
cnt += 1;
}
end(bw);
rst = p_selecthtml.executeQuery();
Map<String,Double> pagerank = getPR();
t0 = System.currentTimeMillis();
bw = write2file(f3);
while (rst.next())
{
String url = rst.getString("url");
String content = processString(rst.getString("content"));
String title = processString(rst.getString("title"));
String h1 = processString(rst.getString("h1"));
String h2 = processString(rst.getString("h2"));
String anchor = processString(rst.getString("outlinks"));
Double pr = pagerank.get(url);
url = processString(url);
if (pr == null)
{
System.out.println(url + ": pagerank not found");
pr = 1E-7;
}
bw.write("<pic url=\""+url+"\" type=\"html\" title=\""+title+"\" h1=\""+h1+"\" h2=\""+h2+"\" content=\""+content+ "\" anchor=\"" + anchor + "\" pagerank=\""+String.valueOf(pr)+"\" />\n");
cnt += 1;
if (cnt % 10000 == 0)
{
System.out.println(cnt+":time " + (System.currentTimeMillis() - t0) / 1000 + " seconds");
}
}
end(bw);
}catch (Exception e)
{
e.printStackTrace();
}
}
public static Map<Integer,String> loadMap()
{
Map<Integer,String> m = new HashMap<Integer,String>();
try{
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream("mapping.txt"),"utf-8"));
String line="";
while ((line = in.readLine()) != null)
{
String part[] = line.split(":");
Integer src = Integer.valueOf(part[0]);
m.put(src,part[1]);
}
in.close();
}catch (Exception e){
e.printStackTrace();
}
return m;
}
public static ArrayList<ArrayList> loadEdge()
{
ArrayList<ArrayList> m = new ArrayList<ArrayList>();
try{
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream("graph.txt"),"utf-8"));
String line="";
while ((line = in.readLine()) != null)
{
ArrayList<Integer> a = new ArrayList<Integer>();
String part[] = line.split("->");
Integer src = Integer.valueOf(part[0]);
a.add(src);
if (part.length==1)
continue;
String[] dsts = part[1].split(",");
for (String dst: dsts)
{
a.add(Integer.valueOf(dst));
}
if (a.size() > 1)
{
m.add(a);
//System.out.println(a.toString());
}
}
in.close();
}catch (Exception e){
e.printStackTrace();
}
return m;
}
public void buildMap_Graph()
{
try
{
long t0 = System.currentTimeMillis();
PreparedStatement p = conn.prepareStatement("SELECT url,outlinks FROM html;");
ResultSet rst = p.executeQuery();
System.out.println("Query use " + (System.currentTimeMillis() - t0)/1000 + " seconds");
t0=System.currentTimeMillis();
Map<String,Integer> map = new HashMap<String,Integer>();
ArrayList<String> indexing = new ArrayList<String>();
File graph = new File("graph.txt");
if (!graph.exists())
graph.createNewFile();
OutputStream fw=new FileOutputStream(graph);
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fw,"utf-8"));
int count = 0;
int loop = 0;
while (rst.next())
{
String forGraph = "";
String url = rst.getString("url");
Integer mapindex;
if ((mapindex = map.get(url)) != null)
{
forGraph += Integer.toString(mapindex) + "->";
}
else
{
map.put(url, count);
indexing.add(url);
forGraph += Integer.toString(count) + "->";
count += 1;
}
String outlinks = rst.getString("outlinks");
if (outlinks.equals(""))
continue;
String links[] = outlinks.split("\n");
for (String link: links)
{
if (link.equals(""))
continue;
int towrite = 0;
Integer i = map.get(link);
if (i == null)
{
map.put(link, count);
indexing.add(link);
towrite = count;
count += 1;
}
else
{
towrite = i;
}
if (forGraph.endsWith("->"))
{
forGraph += Integer.toString(towrite);
}
else
{
forGraph += "," + Integer.toString(towrite);
}
}
bw.write(forGraph + "\n");
loop += 1;
/*if (loop % 5000 == 0)
{
System.out.println(loop + ":time passed " + (System.currentTimeMillis() - t0) / 1000 + " seconds");
}*/
}
p.close();
rst.close();
bw.close();
fw.close();
File mapping = new File("mapping.txt");
if (!mapping.exists())
mapping.createNewFile();
fw=new FileOutputStream(mapping);
bw = new BufferedWriter(new OutputStreamWriter(fw,"utf-8"));
for (int i = 0; i < indexing.size(); i ++)
{
bw.write(Integer.toString(i) + ":" + indexing.get(i) + "\n");
}
bw.close();
fw.close();
System.out.println("build graph and map use " + (System.currentTimeMillis() - t0) / 1000 + " seconds");
}catch (Exception e)
{
e.printStackTrace();
}
}
public void calculate_pagerank()
{
try{
double alpha = 0.15;
int TN = 30;
long t0 = System.currentTimeMillis();
Map<Integer,String> mapping = loadMap();
int N = mapping.size();
int[] outdegrees = new int[N];
for (int i = 0; i < N; i ++)
{
outdegrees[i] = 0;
}
double[] PRs = new double[N];
double[] I = new double[N];
double S = 0;
ArrayList<ArrayList> edges = loadEdge();
for (int i = 0, n = edges.size(); i < n; i ++)
{
ArrayList<Integer> edge = edges.get(i);
Integer src = edge.get(0);
int outdegree = edge.size() - 1;
outdegrees[src] = outdegree;
}
for (int i = 0; i < N; i ++)
{
PRs[i] = 1 / (double)N;
I[i] = alpha / (double) N;
if (outdegrees[i] == 0)
{
S += PRs[i];
}
}
System.out.println("time " + (System.currentTimeMillis() - t0) / 1000 + " seconds");
t0 = System.currentTimeMillis();
for (int k = 1; k <= TN; k ++)
{
for (int i = 0, n = edges.size(); i < n; i ++)
{
ArrayList<Integer> edge = edges.get(i);
Integer src = edge.get(0);
int outdegree = edge.size() - 1;
for (int j = 0; j < outdegree; j ++)
{
Integer dst = edge.get(j + 1);
I[dst] += (1 - alpha) * PRs[src] / (double) outdegrees[src];
}
}
S = 0.0;
for (int i = 0; i < N; i ++)
{
PRs[i] = I[i] + (1 - alpha) * S / N;
I[i] = alpha / (double) N;
if (outdegrees[i] == 0)
{
S += PRs[i];
}
}
//System.out.println("k = " + k + " time " + (System.currentTimeMillis() - t0) / 1000 + " seconds");
}
File pagerank = new File("pagerank.txt");
if (!pagerank.exists())
pagerank.createNewFile();
OutputStream fw=new FileOutputStream(pagerank);
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fw,"utf-8"));
for (int i = 0; i < N; i ++)
{
bw.write(Integer.toString(i) + " " + Double.toString(PRs[i]) + " " + mapping.get(i) + "\n");
}
bw.close();
fw.close();
System.out.println("calculating pagerank use " + (System.currentTimeMillis() - t0) / 1000 + " seconds");
}catch (Exception e){
e.printStackTrace();
}
}
public void updatePR2DB()
{
try{
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream("pagerank.txt"),"utf-8"));
String str = "";
PreparedStatement p = conn.prepareStatement("UPDATE html SET pagerank = ? WHERE url = ?;");
long t0 = System.currentTimeMillis();
int cnt = 0;
while ((str = in.readLine()) != null)
{
String[] part = str.split(" ");
String url = part[2];
double d = Double.valueOf(part[1]);
float pagerank = (float)d;
p.setFloat(1, pagerank);
p.setString(2, url);
p.executeUpdate();
cnt += 1;
if (cnt % 500 == 0)
{
System.out.println(cnt + ":use " + (System.currentTimeMillis() - t0)/1000 + " seconds");
}
}
System.out.println("updatePR2DB use " + (System.currentTimeMillis() - t0)/1000 + " seconds");
in.close();
}catch (Exception e)
{
e.printStackTrace();
}
}
public static void main(String[] args) throws SQLException {
MySql ms = new MySql();
//ms.buildMap_Graph();
//ms.calculate_pagerank();
ms.write2xml();
}
} | a7a1aa12cbd64d41ef0bbef9c5844549360ffbad | [
"Markdown",
"Java"
] | 2 | Markdown | godblessforhimself/ThuSearch | a290631feb7e4f0690f05ae7f74e2fa2ebe66295 | 00539551b3cba584b4eae3cedcf7d3a3ce65c329 | |
refs/heads/master | <file_sep>Introduction to Machine Learning with Python
===
Following along the book by <NAME> & <NAME>
Setup
---
Requires `python3`, `pip3`, `virtualenv`, and `graphviz`.
Brew install graphviz.
```sh
$ brew install graphviz
```
Create the virtual environment, and activate the virtual environment.
```sh
$ virtualenv venv
$ source venv/bin/activate
```
Then, install the required dependencies.
```sh
> pip3 install -r requirements.txt
```
Start with:
```sh
> jupyter notebook
```
<file_sep>%matplotlib inline
import numpy as np
import scipy as sp
import pandas as pd
import matplotlib.pyplot as plt
import sklearn
import mglearn
# 1. load the dataset
from sklearn.datasets import load_iris
iris_dataset = load_iris()
# iris dataset
# {
# target_names: ['setosa', 'versicolor', 'virginica'],
# feature_names: [
# 'sepal length (cm)',
# 'sepal width (cm)',
# 'petal length (cm)',
# 'petal width (cm)'
# ],
# data: [
# [5.1, 3.5, 1.4, 0.2], ...
# ],
# target: [0, 0, 0, ..., 1, 1, 1, ..., 2, 2, 2]
# }
# 2. shuffle and separate test set from training set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(
iris_dataset['data'], iris_dataset['target'], random_state=0)
# X_train.shape (112, 4) 75% data
# X_test.shape (38, 4) 25% data
# y_train.shape (112,) 75% target
# y_train.shape (38,) 25% target
# 3. Look at the data. Making a pair-plot by converting numpy
# arrays into pandas data frames (data, labels)
iris_dataframe = pd.DataFrame(X_train, columns=iris_dataset.feature_names)
grr = pd.scatter_matrix(iris_dataframe, c=y_train, figsize=(15, 15), marker='o',
hist_kwds={'bins': 20}, s=60, alpha=0.8, cmap=mglearn.cm3)
# 4. Run k-nearest neighbors
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier(n_neighbors=1)
knn.fit(X_train, y_train)
# 5. Make a prediction
X_new = np.array([[5, 2.9, 1, 0.2]])
prediction = knn.predict(X_new)
print("Predicted target name: {}".format(iris_dataset['target_names'][prediction]))
# 6. Determine accuracy
y_pred = knn.predict(X_test)
accuracy = np.mean(y_pred == y_test)
print("Test set score: {:.2f}".format(accuracy))
<file_sep>%matplotlib inline
import numpy as np
import scipy as sp
import pandas as pd
import matplotlib.pyplot as plt
import sklearn
import mglearn
# 1. load the dataset
from sklearn.datasets import load_breast_cancer
cancer = load_breast_cancer()
# breast cancer dataset
# {
# target_names: ['malignant', 'benign'],
# feature_names: [
# 'mean radius',
# 'mean texture',
# 'mean perimeter',
# 'mean area',
# ...
# ],
# data: [
# [1.79900000e+01, 1.03800000e+01, ...],
# ...
# ],
# target: [0, 1, ...]
# }
# 2. See benign vs. malignant frequencies in the data
frequencies = zip(cancer.target_names, np.bincount(cancer.target))
print("Sample counts per class:\n{}".format(frequencies))
| d4664da169f07d0f16030194581451f313a0dd79 | [
"Markdown",
"Python"
] | 3 | Markdown | sts-sadr/introduction-to-machine-learning-with-python-book | 42984a190e4796b3106ec3238ce22244d29486a9 | 8763aa0efa20ec04343337637e3eacf012a15101 | |
refs/heads/master | <file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step8;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Exchanger;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* @Title ExchangerUse.java
* @Description Exchanger简单使用
* @Date 2015/11/25 14:48
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class ExchangerUse {
static Exchanger<String> exchanger = new Exchanger<String>();
static ThreadPoolExecutor executor = new ThreadPoolExecutor(2,2,0, TimeUnit.SECONDS,new ArrayBlockingQueue<Runnable>(10));
public static void main(String[] args) {
// System.out.println(Runtime.getRuntime().availableProcessors());
executor.execute(new Runnable() {
public void run() {
String A = "银行流水A";
try {
exchanger.exchange(A);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
executor.execute(new Runnable() {
public void run() {
String B = "银行流水B";
try {
String tmp = exchanger.exchange(B);
System.out.println("A和B数据是否一致:"+B.equals(tmp)+"\nA数据:"+tmp+"\nB数据:"+B);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
executor.shutdown();
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step8;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
/**
* @Title CyclicBarrierUse.java
* @Description CyclicBarrier简单使用
* @Date 2015/11/25 11:12
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class CyclicBarrierUse {
static CyclicBarrier cyclicBarrier = new CyclicBarrier(2);
// static CyclicBarrier cyclicBarrier = new CyclicBarrier(3);
public static void main(String[] args) {
new Thread(new Runnable() {
public void run() {
try {
cyclicBarrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName()+"打印1");
}
},"子线程1").start();
try {
cyclicBarrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName()+"打印2");
}
}
<file_sep>/**
* 版权申明
*/
package step6;
import com.mj.currencyprogramming.step6.ForkJoinCounter;
import org.junit.Test;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.Future;
/**
* @Title ForkJoinCounterTest.java
* @Description ForkJoinCounter单元测试类
* @Date 2015/11/24 16:56
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class ForkJoinCounterTest{
private ForkJoinPool forkJoinPool = new ForkJoinPool();
private ForkJoinCounter forkJoinCounter = new ForkJoinCounter(1, 1000000000);
@Test
public void testForkJoinCounter(){
long startTime = System.currentTimeMillis();
Future<Long> result = forkJoinPool.submit(forkJoinCounter);
try {
System.out.println(result.get());
System.out.println("多线程耗时:"+(System.currentTimeMillis()-startTime));
if (forkJoinCounter.isCompletedAbnormally()){
System.out.println(forkJoinCounter.getException());
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
@Test
public void testSiganlThreadCounter(){
long startTime = System.currentTimeMillis();
long sum = 0;
for (long i=0; i<=1000000000; i++){
sum += i;
}
System.out.println(sum);
System.out.println("单线程耗时:"+(System.currentTimeMillis()-startTime));
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step4;
/**
* @Title Interrupted
* @Description Interrupted线程中断
* @Date 2015/11/17 9:19
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class Interrupted {
/**
* ==============================================================
* 线程中断,如果线程抛出了InterruptedException,会清除中断标识
* 位。
* ==============================================================
*/
static class SleepRunnable implements Runnable{
public void run() {
while (true){
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
static class BusyRunnable implements Runnable{
public void run() {
while (true){
}
}
}
public static void main(String[] args) throws InterruptedException {
Thread sleepThread = new Thread(new SleepRunnable(), "SleepThread");
sleepThread.setDaemon(true);
Thread busyThread = new Thread(new BusyRunnable(), "BusyThread");
busyThread.setDaemon(true);
sleepThread.start();
busyThread.start();
Thread.sleep(5000);
sleepThread.interrupt();
busyThread.interrupt();
System.out.println("sleepThread.isInterrupted:"+sleepThread.isInterrupted());
System.out.println("busyThread.isInterrupted:"+busyThread.isInterrupted());
Thread.sleep(1000);
}
}
<file_sep>/**
* 版权申明
*/
package step5;
import com.mj.currencyprogramming.step5.MonopolizeLock;
import org.junit.Test;
/**
* @Title MonopolizeLockTest.java
* @Description MonopolizeLock测试类
* @Date 2015/11/24 14:16
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class MonopolizeLockTest {
/**
* 重入锁和不可重入锁的区别.
* synchronized和ReentrantLock都是可重入锁.
*/
private MonopolizeLock monopolizeLock = new MonopolizeLock();
@Test
public void testMonopolizeLock(){
new Thread(new Runnable() {
public void run() {
method1();
}
}, "T0").start();
}
private void method0(){
monopolizeLock.lock();
try{
System.out.println(Thread.currentThread().getName()+"获得了锁");
}finally {
monopolizeLock.unlock();
}
}
private void method1(){
monopolizeLock.lock();
try{
System.out.println(Thread.currentThread().getName()+"获得了锁");
method0();
}finally {
monopolizeLock.unlock();
}
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step5;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* @Title ConditionUse.java
* @Description ConditionUse,Condition对象的使用
* @Date 2015/11/24 10:19
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class ConditionUse {
/**
* ==================================================================
* Condition类似于Object对象上面隐式的监控器(wait、notify等方法)
* 它与Lock实现对象绑定,也即是依赖于Lock实现对象
* ==================================================================
*/
private Lock lock = new ReentrantLock();
private Condition condition = lock.newCondition();
public void conditionWait(){
lock.lock();
try{
System.out.println(Thread.currentThread().getName()+"获得锁");
System.out.println(">>>>>执行任务1");
System.out.println("被挂起...");
condition.await();
System.out.println(">>>>>执行任务2");
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
lock.unlock();
}
}
public void conditionNotify(){
lock.lock();
try{
condition.signal();
System.out.println(Thread.currentThread().getName()+"唤醒被挂起对象");
}finally {
lock.unlock();
}
}
public static void main(String[] args) throws InterruptedException {
final ConditionUse conditionUse = new ConditionUse();
new Thread(new Runnable() {
public void run() {
conditionUse.conditionWait();
}
}, "Thread0").start();
Thread.sleep(3000);
new Thread(new Runnable() {
public void run() {
conditionUse.conditionNotify();
}
}, "Thread1").start();
Thread.sleep(3000);
}
}
<file_sep>/**
* 版权申明
*/
package step8;
import com.mj.currencyprogramming.step8.UseJoinCompareCountDownLatch;
import org.junit.Test;
/**
* @Title UseJoinCompareCountDownLatchTest.java
* @Description UseJoinCompareCountDownLatch测试类
* @Date 2015/11/25 11:01
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class UseJoinCompareCountDownLatchTest {
@Test
public void useJoin() throws InterruptedException {
UseJoinCompareCountDownLatch.useJoin();
}
@Test
public void useCountDownLatch() throws InterruptedException {
UseJoinCompareCountDownLatch.useCountDownLatch();
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step7;
import java.util.concurrent.atomic.AtomicIntegerArray;
/**
* @Title AtomicIntegerArrayUse.java
* @Description AtomicIntegerArray简单使用
* @Date 2015/11/25 10:07
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class AtomicIntegerArrayUse {
/**
* ========================================================================
* 使用AtomicIntegerArray操作数组时,原数组值不变
* AtomicLongArray:原子更新长整形数组里的元素
* AtomicReferenceArray:原子更新引用数组里的元素
* 底层都是使用Unsafe去做CAS
* ========================================================================
*/
static int[] data = new int[]{1,2,3,4};
static AtomicIntegerArray atomicIntegerArray = new AtomicIntegerArray(data);
public static void main(String[] args) throws InterruptedException {
for (int i=0; i<2; i++){
new Thread(new Runnable() {
public void run() {
System.out.println(Thread.currentThread().getName()+">>>>>>>>>>>>AtomicIntegerArray.getAndAdd(2,10):"
+atomicIntegerArray.getAndAdd(2,10)+",AtomicIntegerArray.get(2):"+atomicIntegerArray.get(2));
System.out.println("data[2]:"+data[2]);
}
},"线程"+i).start();
}
Thread.currentThread().join();
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step1;
/**
* @Title DeadLock.java
* @Description 死锁
* @Date 2015/11/13 11:20
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class DeadLock {
private static Object a = new Object();
private static Object b = new Object();
public static void main(String[] args) throws InterruptedException {
Thread t1 = new Thread(new Runnable() {
public void run() {
synchronized (a){
System.out.println(Thread.currentThread().getName()+"获得a资源");
try {
Thread.sleep(10*1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName()+"请求获得b资源...");
synchronized (b){
System.out.println(Thread.currentThread().getName()+"获得b资源");
System.out.println(Thread.currentThread().getName()+"执行任务");
}
}
}
});
Thread t2 = new Thread(new Runnable() {
public void run() {
synchronized (b){
System.out.println(Thread.currentThread().getName()+"获得b资源");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName()+"请求获得a资源...");
synchronized (a){
System.out.println(Thread.currentThread().getName()+"获得a资源");
System.out.println(Thread.currentThread().getName()+"执行任务");
}
}
}
});
t1.start();
t2.start();
Thread.sleep(600*1000);
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step8;
import java.util.Map;
import java.util.concurrent.*;
/**
* @Title BankWaterService.java
* @Description 银行流水计算
* @Date 2015/11/25 11:26
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class BankWaterService implements Runnable {
private CyclicBarrier cyclicBarrier = new CyclicBarrier(4, this);
private Executor executor = Executors.newFixedThreadPool(4);
private ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(4,4,0, TimeUnit.SECONDS,new ArrayBlockingQueue<Runnable>(10));
private ConcurrentHashMap<String,Integer> map = new ConcurrentHashMap<String, Integer>();
public void count(){
for (int i=0; i<4; i++){
final int finalI = i;
executor.execute(new Runnable() {
public void run() {
map.put(Thread.currentThread().getName(), finalI);
try {
cyclicBarrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
}
});
}
}
public void run() {
int sum = 0;
for (Map.Entry<String,Integer> entry:map.entrySet()){
sum += entry.getValue();
}
map.put("sum",sum);
System.out.println("sum:"+sum);
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step4;
import java.util.concurrent.*;
/**
* @Title CountDownLatchTest2.java
* @Description CountDownLatchTest2,业务场景2
* @Date 2015/11/18 15:46
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class CountDownLatchTest2 {
/**
* =========================================================================
* CountDownLatch执行场景2:
* 将可以一个大任务划为几个可以并发执行的小任务,并通过线程池进行处理
* 当所有小任务执行完毕后,进行任务完成后处理。
* =========================================================================
*/
public static void main(String[] args) throws InterruptedException {
int taskNum = 10;
CountDownLatch doneSignal = new CountDownLatch(taskNum);
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(3, 3, 0, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(10),
Executors.defaultThreadFactory());
for (int i=0; i<taskNum; i++){
threadPoolExecutor.execute(new TaskRunnable(doneSignal, i));
}
doneSignal.await();
allTaskExecuteComplete();
threadPoolExecutor.shutdown();
}
private static void allTaskExecuteComplete(){
System.out.println("所有任务执行完毕.");
}
static class TaskRunnable implements Runnable{
private final CountDownLatch doneSignal;
private final int taskId;
public TaskRunnable(CountDownLatch doneSignal, int taskId){
this.doneSignal = doneSignal;
this.taskId = taskId;
}
public void run() {
try {
doWork(taskId);
doneSignal.countDown();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void doWork(int taskId) throws InterruptedException {
System.out.println("线程"+taskId+" 执行了任务[id="+taskId+"]");
Thread.sleep(3000);
}
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step5;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* @Title BoundedQueue.java
* @Description BoundedQueue,使用Lock、Condition实现一个有界队列
* @Date 2015/11/24 10:49
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class BoundedQueue {
private Lock lock = new ReentrantLock();
private Condition notFull = lock.newCondition();
private Condition notEmpty = lock.newCondition();
private Object[] bqueue;
private int addIndex, removeIndex, count;
public BoundedQueue(int size){
bqueue = new Object[size];
}
public void add(Object obj){
lock.lock();
try{
System.out.println(Thread.currentThread().getName()+"获得了资源锁");
while (count == bqueue.length){
System.out.println(Thread.currentThread().getName()+":队列已满,不能添加资源");
notFull.await();
}
bqueue[addIndex] = obj;
System.out.println(Thread.currentThread().getName()+":队列未满,添加资源["+obj+"]");
if (++addIndex == bqueue.length){
addIndex = 0;
}
++count;
notEmpty.signal();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
lock.unlock();
}
}
public Object remove(){
lock.lock();
try{
System.out.println(Thread.currentThread().getName()+"获得了资源锁");
while (count == 0){
System.out.println(Thread.currentThread().getName()+":队列为空,不能消费资源");
notEmpty.await();
}
Object obj = bqueue[removeIndex];
System.out.println(Thread.currentThread().getName()+":队列不为空,消费资源["+obj+"]");
if (++removeIndex == bqueue.length){
removeIndex = 0;
}
--count;
notFull.signal();
return obj;
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
lock.unlock();
}
return null;
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step4;
/**
* @Title ShutdownThread.java
* @Description ShutdownThread介绍两种终止线程的方法
* @Date 2015/11/17 9:45
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class ShutdownThread {
/**
* ============================================================
* 通过线程中断和设置标志位对线程停止,而不调用stop方法
* ============================================================
*/
static class DemoRunnable implements Runnable{
private volatile boolean on = true;
private long i;
public void run() {
while (on && !Thread.currentThread().isInterrupted()){
i++;
}
System.out.println(Thread.currentThread().getName()+" count i:"+i);
}
public void cancel(){
on = false;
}
}
public static void main(String[] args) throws InterruptedException {
DemoRunnable demoRunnable1 = new DemoRunnable();
DemoRunnable demoRunnable2 = new DemoRunnable();
Thread t1 = new Thread(demoRunnable1, "threadShutdownByInterrupt");
Thread t2 = new Thread(demoRunnable2, "threadShutdownByFlag");
t1.start();
t2.start();
Thread.sleep(3000);
t1.interrupt();
demoRunnable2.cancel();
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step8;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
/**
* @Title CyclicBarrierUse2
* @Description CyclicBarrier简单使用
* @Date 2015/11/25 11:15
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class CyclicBarrierUse2 {
static CyclicBarrier cyclicBarrier = new CyclicBarrier(2,new RunnableA());
public static void main(String[] args) {
new Thread(new Runnable() {
public void run() {
try {
cyclicBarrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName()+"执行了任务1");
}
},"子线程1").start();
try {
cyclicBarrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName()+"执行了任务2");
}
static class RunnableA implements Runnable{
public void run() {
System.out.println(Thread.currentThread().getName()+",这是优先线程,先执行.");
}
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step7;
import java.util.concurrent.atomic.AtomicReference;
/**
* @Title AtomicReferenceUse.java
* @Description AtomicReference简单使用
* @Date 2015/11/25 10:17
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class AtomicReferenceUse {
/**
* ======================================================================
* AtomicReference:原子更新引用类型
* AtomicReferenceFieldUpdater:原子更新引用类型的字段
* AtomicMarkableReference:原子更新带有标记位的引用类型
* ======================================================================
*/
static AtomicReference<User> atomicReference = new AtomicReference<User>();
public static void main(String[] args) throws InterruptedException {
final User initUser = new User("张三", 22);
atomicReference.set(initUser);
final User updateUser = new User("李四", 30);
for (int i=0; i<2; i++){
new Thread(new Runnable() {
public void run() {
atomicReference.compareAndSet(initUser, updateUser);
System.out.println(Thread.currentThread().getName()+":"+atomicReference.get());
}
},"线程"+i).start();
}
Thread.currentThread().join();
}
static class User{
private String name;
private int age;
public User(String name, int age) {
this.name = name;
this.age = age;
}
@Override
public String toString() {
return "User{" +
"name='" + name + '\'' +
", age=" + age +
'}';
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step1;
/**
* @Title CurrencyCompareSerial.java
* @Description 并发和串行效率比较
* @Date 2015/11/13 9:09
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class CurrencyCompareSerial {
public static int a = 0;
/**
* 并发,多线程
* @param counter
* @throws InterruptedException
*/
private void currency(final int counter) throws InterruptedException {
long startTime = System.currentTimeMillis();
Thread thread = new Thread(new Runnable() {
public void run() {
for (int i=0; i<counter; i++){
a += 3;
}
}
});
thread.start();
int b = 0;
for (int i=0; i<counter; i++){
b--;
}
thread.join();
System.out.println("并发\t\t"+counter+"\t\t"+(System.currentTimeMillis()-startTime)+"\t\t"+a+"\t\t"+b);
}
/**
* 串行,单线程
* @param counter
*/
private void serial(int counter){
long startTime = System.currentTimeMillis();
int a = 0;
for (int i=0; i<counter; i++){
a += 3;
}
int b = 0;
for (int i=0; i<counter; i++){
b--;
}
System.out.println("串行\t\t"+counter+"\t\t"+(System.currentTimeMillis()-startTime)+"\t\t"+a+"\t\t"+b);
}
public static void main(String[] args) throws InterruptedException {
System.out.println("模式\t\t数量\t\t耗时\t\ta值\t\tb值");
CurrencyCompareSerial currencyCompareSerial = new CurrencyCompareSerial();
currencyCompareSerial.currency(10000);
currencyCompareSerial.serial(10000);
System.out.println("===================================================================");
currencyCompareSerial.currency(100000);
currencyCompareSerial.serial(100000);
System.out.println("===================================================================");
currencyCompareSerial.currency(1000000);
currencyCompareSerial.serial(1000000);
System.out.println("===================================================================");
currencyCompareSerial.currency(10000000);
currencyCompareSerial.serial(10000000);
System.out.println("===================================================================");
currencyCompareSerial.currency(1000000000);
currencyCompareSerial.serial(1000000000);
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step8;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
/**
* @Title CyclicBarrierUse3.java
* @Description CyclicBarrier与CountDownLatch区别使用
* @Date 2015/11/25 14:07
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class CyclicBarrierUse3 {
/**
* ===========================================================
* CountDownLatch的计数器只能使用一次,CyclicBarrier的计数器
* 可以使用多次,通过reset重置。CyclicBarrier还提供了其他方法
* getNumberWaiting、isBroken等
* ===========================================================
*/
static CyclicBarrier cyclicBarrier = new CyclicBarrier(2);
public static void main(String[] args) {
Thread thread = new Thread(new Runnable() {
public void run() {
try {
cyclicBarrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
}
});
thread.start();
thread.interrupt();
System.out.println("numberWaiting:"+cyclicBarrier.getNumberWaiting());
try {
cyclicBarrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
System.out.println(cyclicBarrier.isBroken());
}
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step4;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* @Title Deprecated.java
* @Description 线程的暂停、恢复和停止
* @Date 2015/11/17 9:30
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class Deprecated {
/**
* ==========================================================
* 线程的暂停、恢复和停止,不推荐使用。
* 原因:线程在上面操作时,不会释放已获得的锁,容易引起死锁.
* ==========================================================
*/
static class PrintRunnable implements Runnable{
public void run() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd hh:mm:ss");
while (true){
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName()+" run at "+sdf.format(new Date()));
}
}
}
public static void main(String[] args) throws InterruptedException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd hh:mm:ss");
Thread printThread = new Thread(new PrintRunnable(), "printThread");
printThread.setDaemon(true);
printThread.start();
Thread.sleep(3000);
printThread.suspend();
System.out.println("printThread suspend at "+sdf.format(new Date()));
Thread.sleep(3000);
printThread.resume();
System.out.println("printThread resume at "+sdf.format(new Date()));
Thread.sleep(3000);
printThread.stop();
System.out.println("printThread stop at "+sdf.format(new Date()));
Thread.sleep(3000);
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step4.simplehttpserver;
/**
* @Title ThreadPool.java
* @Description ThreadPool,模拟的线程池
* @Date 2015/11/18 17:26
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public interface ThreadPool<Job extends Runnable> {
/**
* 执行一个job,该job应该实现Runnable接口
* @param job 任务
*/
void execute(Job job);
/**
* 关闭线程池
*/
void shutdown();
/**
* 增加工作者线程,由工作者线程处理线程
* @param num 增加的数量
*/
void addWorkers(int num);
/**
* 减少工作者线程
* @param num 减少的数量
*/
void removeWorkers(int num);
/**
* 获取正在等待执行的任务数量
* @return 正在等待执行的任务数量
*/
int getJobSize();
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step10;
import java.util.Map;
import java.util.concurrent.*;
/**
* @Title FutrueTaskUse.java
* @Description FutrueTaskUse
* @Date 2015/11/25 17:36
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class FutrueTaskUse {
static final Map<String,Future<String>> map = new ConcurrentHashMap<String, Future<String>>();
public static void main(String[] args) {
for (int i=0; i<2; i++){
new Thread(new Runnable() {
public void run() {
System.out.println(Thread.currentThread().getName()+":"+FutrueTaskUse.executeTask("Task001"));
}
},"线程"+i).start();
}
}
public static String executeTask(final String taskName){
while (true){
Future<String> future = map.get(taskName);
if (future == null){
Callable<String> task = new Callable<String>() {
public String call() throws Exception {
return taskName;
}
};
FutureTask<String> futureTask = new FutureTask<String>(task);
future = map.put(taskName, futureTask);
if (future == null){
future = futureTask;
futureTask.run();
}
}
try{
Thread.sleep(1000);
return future.get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step8;
import java.util.concurrent.CountDownLatch;
/**
* @Title UseJoinCompareCountDownLatch.java
* @Description UseJoinCompareCountDownLatch,使用join和CountDownLatch完成同样的功能
* @Date 2015/11/25 10:55
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class UseJoinCompareCountDownLatch {
public static void useJoin() throws InterruptedException {
Thread t1 = new Thread(new Runnable() {
public void run() {
System.out.println(Thread.currentThread().getName()+"执行步骤1");
}
}, "t1");
Thread t2 = new Thread(new Runnable() {
public void run() {
System.out.println(Thread.currentThread().getName()+"执行步骤2");
}
}, "t2");
t1.start();
t2.start();
t1.join();
t2.join();
System.out.println("任务执行完毕.");
}
public static void useCountDownLatch() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(2);
Thread t1 = new Thread(new Runnable() {
public void run() {
System.out.println(Thread.currentThread().getName()+"执行步骤1");
countDownLatch.countDown();
}
}, "t1");
Thread t2 = new Thread(new Runnable() {
public void run() {
System.out.println(Thread.currentThread().getName()+"执行步骤2");
countDownLatch.countDown();
}
}, "t2");
t1.start();
t2.start();
countDownLatch.await();
System.out.println("任务执行完毕.");
}
}
<file_sep>/**
* 版权申明
*/
package com.mj.currencyprogramming.step6;
import java.util.concurrent.RecursiveTask;
/**
* @Title ForkJoinCounter.java
* @Description ForkJoinCounter,使用Fork/Join框架
* @Date 2015/11/24 16:48
* @Author jiayi <EMAIL>
* @Version 2.0
*/
public class ForkJoinCounter extends RecursiveTask<Long>{
private long taskNeedCount = 1000000;
private long start;
private long end;
public ForkJoinCounter(long start, long end){
this.start = start;
this.end = end;
}
@Override
protected Long compute() {
long sum = 0;
boolean isCanCompute = (end-start) <= taskNeedCount;
if (isCanCompute){
for (long i=start; i<=end; i++){
sum += i;
}
}else{
long middle = (start+end)/2;
ForkJoinCounter leftCounter = new ForkJoinCounter(start, middle);
ForkJoinCounter rightCounter = new ForkJoinCounter(middle+1, end);
leftCounter.fork();
rightCounter.fork();
long leftCounterResult = leftCounter.join();
long rightCounterResult = rightCounter.join();
sum = leftCounterResult + rightCounterResult;
}
return sum;
}
}
| 0011fedb9060a3d1ab3b621ac6a757d8c4d07693 | [
"Java"
] | 22 | Java | MichaelJY91/CurrencyProgramming | 7a2d9084436eb20beb2ff57cd2fc88a736e3745b | 565079db3d3e38e2a58bf2c375db7788292e19c7 | |
refs/heads/master | <file_sep>TechBerry Upload
===============
This is the framework for the techberry upload platform that will be eventually liscenced and open-sourced
Credits,
Int
Iyop45
<file_sep><?php
ob_start('sanitize_output');
include 'db_connect.php';
include 'functions.php';
sec_session_start();
include 'content.php';
$globalInitialize = new globalInitialize($mysqli,$_SERVER['PHP_SELF']);
$domain = $_SERVER['HTTP_HOST'];
$path = $_SERVER['SCRIPT_NAME'];
$preurl = "http://" . $domain . $path;
$url = preg_replace('/index.php/','',$preurl);
docs();
?>
<head>
<?php
$globalInitialize->head();
head(0,0);
?>
<title>TechBerry</title>
<link rel="icon" href="http://techberry.org/favicon.png" type="image/x-icon"/>
</head>
<body>
<?php
$globalInitialize->preBody();
alert_bar();
?>
<div id="<?=$prefixMain?>bg"></div>
<div id="<?=$prefixGlobal . $subPrefixHeader?>">
<div id="<?=$prefixGlobal . $subPrefixHeader . '_' . $subPrefixWrap?>">
<?php
notifications($mysqli, $url);
banner(1, 'home');
?>
</div>
<div id="<?=$prefixMain?>overlay2" onclick="close_bar(this);" style="display:none"></div>
<div class="<?=$prefixGlobal?>wrap">
<div id="<?=$prefixGlobal?>page" style="height:100%">
<div id="<?=$prefixGlobal?>bar" style="overflow:auto;margin-bottom:40px; !important">
<!-- THIS IS WHERE CONTENT IS PLACED -->
</div>
</div>
</div>
<?php
$globalInitialize->preFooter();
echo $footer;
$globalInitialize->postBody();
?>
</body>
<?php
ob_end_flush();
function sanitize_output($buffer) {
$search = array(
'/\>[^\S ]+/s',
'/[^\S ]+\</s',
'/(\s)+/s'
);
$replace = array(
'>',
'<',
'\\1'
);
$buffer = preg_replace($search,$replace,$buffer);
return $buffer;
}
?>
| ea9f7098211dda0d71ef2627d375bded818401c4 | [
"Markdown",
"PHP"
] | 2 | Markdown | iyop45/techberryupload | 9c92a3c73fec82419fb6946e7c38c6c02cd118a1 | fa86fb397f974ab287eba2eb1f32ef6e9ed122a4 | |
refs/heads/master | <repo_name>Baumdiggity/bootleggers-moonshine<file_sep>/README.md
# bootleggers-moonshine
<file_sep>/Assets/Scripts/PlayerMovement.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PlayerMovement : MonoBehaviour
{
[SerializeField] float turnForce = 1.5f;
[SerializeField] float forwardThrust = 3f;
[SerializeField] float maxSpeed = 7f;
Rigidbody2D _body;
// Start is called before the first frame update
void Start()
{
_body = gameObject.GetComponent<Rigidbody2D>();
}
// Update is called once per frame
void FixedUpdate()
{
//Applies inverse Torque to rotate player
if (Input.GetButton("Horizontal"))
{
_body.AddTorque(-Input.GetAxis("Horizontal") * turnForce);
}
//Applies input relative directional force
if (Input.GetButton("Vertical"))
{
_body.AddRelativeForce(Vector2.right * forwardThrust * Input.GetAxis("Vertical"));
}
//Enforces Max Speed
_body.velocity = Vector2.ClampMagnitude(_body.velocity, maxSpeed);
}
}
| e69aa0e4c9207fa2bc480cd523916888287e8b03 | [
"Markdown",
"C#"
] | 2 | Markdown | Baumdiggity/bootleggers-moonshine | 993eb33f101ad1b9e24f06fffb00f9608ee48030 | 8d7d8e2a19241f06f9ded0b35708f791335a9d1e | |
refs/heads/master | <repo_name>yazidlouda/DrofSnar<file_sep>/Drofsnar/Program.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
namespace Drofsnar
{
class Program
{
public static void Main(string[] args)
{
int points = 5000;
int lives = 3;
int vulnerableBirdHunterCounter = 0;
bool hasRun = true;
string[] gameSequence = File.ReadAllText(@"C:\Users\yalou\OneDrive\Bureau\Drofsnar.txt").Split(',');
Dictionary<string, int> birds = new Dictionary<string, int>()
{
{ "Bird", 10 },
{ "CrestedIbis", 100 },
{ "GreatKiskudee", 300 },
{ "RedCrossbill", 500 },
{ "Red-neckedPhalarope", 700 },
{ "EveningGrosbeak", 1000 },
{ "GreaterPrairieChicken", 2000 },
{ "IcelandGull", 3000 },
{ "Orange-belliedParrot", 5000 },
{ "VulnerableBirdHunter", 200 },
};
while (lives>0)
{
foreach (string nextBird in gameSequence)
{
foreach(var bird in birds)
{
if (nextBird == bird.Key)
{
if (nextBird == "VulnerableBirdHunter")
{
int tempPoint = birds["VulnerableBirdHunter"] * (int)Math.Pow(2, vulnerableBirdHunterCounter);
points += tempPoint;
vulnerableBirdHunterCounter++;
}
else
{
points += bird.Value;
}
}
}
if (nextBird == "InvincibleBirdHunter")
{
lives--;
vulnerableBirdHunterCounter = 0;
}
if (points >= 10000 && hasRun == true)
{
lives++;
hasRun = false;
}
Console.WriteLine($"{nextBird,-30} {points,-10} {lives}");
if (lives == 0)
{
break;
}
}
}
Console.WriteLine($"\nThe total points: {points}");
}
}
}
| 7fba5e6f3bd2008f14a0c6dc1b710784c365f1b0 | [
"C#"
] | 1 | C# | yazidlouda/DrofSnar | 99ecd78f4c86fe9f122c8ed6655999f9204295c2 | 531a29f7dce7a2358fdfd524883cff2e77fb15dd | |
refs/heads/master | <repo_name>jp-tran/dsa<file_sep>/design/rate_limiter.py
"""
Design a rate limiter that limits the number of times clients can hit an API within
a given time window.
Time complexity: amortized O(1)
Each item is only added to and removed from the queue once.
Space complexity: O(n)
where n is the number of valid hits within any given time window
"""
from collections import deque, defaultdict
class RateLimiter:
def __init__(self, max_hits, window_seconds):
self.queue = deque() # queue item is a tuple (client id, hit time)
self.clients = defaultdict(int)
self.max_hits = max_hits
self.window = window_seconds
def hit(self, client_id, current_time):
""" If the client `client_id` has called hit() fewer than max_hit
times in the last window_seconds, record the hit and return True
else return False
"""
# pop from left end of queue
while self.queue and self.queue[0][1] < current_time - self.window:
earliest = self.queue.popleft()
if self.clients[earliest[0]] == 1:
del self.clients[earliest[0]]
else:
self.clients[earliest[0]] -= 1
# append new hit to queue if it's a valid hit
if self.clients[client_id] < self.max_hits:
queue.append((client_id, current_time))
self.clients[client_id] += 1
return True
return False<file_sep>/problems/cyclic_sort/find_first_positive_num.py
"""
Given an unsorted array containing numbers, find the smallest missing positive number in it.
Time complexity: O(n)
Space complexity: O(1)
"""
def solution(A):
i = 0
# cyclic sort all numbers A[i] in which 0 < A[i] <= len(A)
# to their corresponding index
while i < len(A):
j = A[i] - 1 # index the item should be at
if A[i] > 0 and A[i] <= len(A) and A[i] != A[j]:
A[i], A[j] = A[j], A[i] # swap
else:
i += 1
# find the first number that is not at its correct index
for idx, num in enumerate(A):
if num != idx + 1:
return idx + 1
return len(A) + 1
<file_sep>/problems/top_k_elements/schedule_tasks.py
from collections import Counter, deque
import heapq
"""
You are given a list of tasks that need to be run, in any order, on a server.
Each task will take one CPU interval to execute but once a task has finished,
it has a cooling period during which it can’t be run again. If the cooling period
for all tasks is ‘K’ intervals, find the minimum number of CPU intervals that the
server needs to finish all tasks.
Time complexity: O(N*log(N) + N*k)
Space complexity: O(N)
"""
def schedule_tasks(tasks, k):
num_tasks_remaining = len(tasks)
# count the number of times each task occurs
task_counts = Counter(tasks)
# put all tasks and their associated counts into a max heap
max_heap = []
for task, freq in task_counts.items():
heapq.heappush(max_heap, (-freq, task))
# process tasks
num_intervals = 0
queue = deque()
while num_tasks_remaining > 0:
num_intervals += 1
if max_heap:
freq, task = heapq.heappop(max_heap) # O(log(N)) operation
freq += 1
num_tasks_remaining -= 1
else:
freq, task = 1, 'idle'
queue.append((freq, task))
if len(queue) == k + 1:
next_task = queue.popleft()
if next_task[0] < 0:
heapq.heappush(max_heap, next_task)
return num_intervals
def main():
print("Minimum intervals needed to execute all tasks: " +
str(schedule_tasks(['a', 'a', 'a', 'b', 'c', 'c'], 2)))
print("Minimum intervals needed to execute all tasks: " +
str(schedule_tasks(['a', 'b', 'a'], 3)))
main()
<file_sep>/graphs/dijkstra/dijkstra_shortest_reach.py
# implementation of dijkstra's algorithm
import math
import os
import random
import re
import sys
import heapq
def shortestReach(n: int, edges: list, s: int) -> list:
'''
n: number of nodes
edges: list of lists [[beginning node, ending node, edge length], ...]
s: starting node
returns a list of shortest distance from s to each node, excluding s
'''
#create adjacency list for undirected graph
adj_list = {i: set() for i in range(1, n+1)}
for begin, end, length in edges:
adj_list[begin].add((end, length))
adj_list[end].add((begin, length))
queue = [(0, s)] #list of tuples that will be used as a min-heap for Dijkstra's greedy score
dist = {i: float('inf') for i in range(1, n+1)} #dict of shortest distance to each node
dist[s] = 0
explored = {i: False for i in range(1, n+1)} #dict of explored state for each node
#loop through priority queue until it's empty
while queue:
cur_dist, cur_node = heapq.heappop(queue)
if explored[cur_node]: continue
explored[cur_node] = True
for adj_node, length in adj_list[cur_node]:
if not explored[adj_node]:
dijkstra_gs = cur_dist + length #dijkstra's greedy score
if dijkstra_gs < dist[adj_node]:
dist[adj_node] = dijkstra_gs
heapq.heappush(queue, (dijkstra_gs, adj_node))
#give all unexplored nodes a distance of -1
for node_num, state in explored.items():
if state == False:
dist[node_num] = -1
#delete starting node distance from dictionary
del dist[s]
return list(dist.values())
if __name__ == '__main__':
with open('dijkstra_input.txt', 'r') as input_f:
t = int(input_f.readline())
for t_itr in range(t):
nm = input_f.readline().split()
n = int(nm[0])
m = int(nm[1])
edges = []
for _ in range(m):
edges.append(list(map(int, input_f.readline().rstrip().split())))
s = int(input_f.readline())
result = shortestReach(n, edges, s)
print(' '.join(map(str, result)))
# print('\n')
<file_sep>/problems/monotonic_stack_and_queue/sliding_window_maximum.py
"""
You are given an array of integers nums, there is a sliding window of
size k which is moving from the very left of the array to the very right.
Find the maximum element in the window each time it slides.
Time complexity: O(n)
Space complexity: O(k) - not counting "ans" array
"""
from collections import deque
class MonotonicQueue:
def __init__(self):
self.queue = deque() # monotonic decreasing queue
def push(self, n):
while self.queue and self.queue[-1] < n:
self.queue.pop()
self.queue.append(n)
def get_max(self):
return self.queue[0]
def popleft(self, n):
if self.queue[0] == n:
self.queue.popleft()
class Solution:
def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:
ans = []
queue = MonotonicQueue() # maintain monotonic decreasing queue
for i, num in enumerate(nums):
if i < k - 1:
queue.push(num)
else:
queue.push(num)
ans.append(queue.get_max())
queue.popleft(nums[i-k+1])
return ans
<file_sep>/sorting/mergesort/merge_sort_lilnked_list.py
# Top-down implementation of merge sort for a singly-linked list
# O(n log(n)) time, where n is the number of nodes
# O(log(n)) space for the recursive call stack
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def sortList(self, head: ListNode) -> ListNode:
if head is None or head.next is None:
return head
mid = self.getMid(head) # find the middle node
left = self.sortList(head) # sort list on left side
right = self.sortList(mid) # sort list on right side
return self.mergeSortedLists(left, right)
def getMid(self, node):
# assume input list has at least 2 nodes
# "fast" pointer advances twice as fast as "slow" pointer
slow = None
fast = node
while fast and fast.next:
slow = node if slow is None else slow.next
fast = fast.next.next
mid = slow.next
slow.next = None # break the pointer pointing to the middle node
return mid
def mergeSortedLists(self, node1, node2):
head = ListNode()
node = head
# iteratively redirect pointers to create one sorted list
# until one list is completely traversed
while node1 and node2:
if node1.val <= node2.val:
node.next = node1
node1 = node1.next
elif node1.val > node2.val:
node.next = node2
node2 = node2.next
node = node.next
# point to the nodes of the remaining list
if node1:
node.next = node1
elif node2:
node.next = node2
return head.next<file_sep>/datastructures/disjoint_sets.py
from collections import defaultdict
### for m operations and n nodes, time complexity is O(m alpha(n))
### for practical purposes, O(alpha(n)) is constant, so final complexity is O(m)
class Node:
def __init__(self, value, parent = None, rank = 0):
self.val = value
self.parent = parent
self.rank = rank
class DisjointSet:
def __init__(self):
self.hashtable = {}
def makeSet(self, val):
"""Create a set with only one element."""
node = Node(val)
node.parent = node
self.hashtable[val] = node
def find(self, node):
"""Find uppermost ancestor of node using path compression."""
if node.parent == node:
return node
node.parent = self.find(node.parent)
return node.parent
def union(self, val1, val2):
"""Union of two sets by rank.
Returns true if val1 and val2 are in different sets before the union
"""
if val1 not in self.hashtable:
self.makeSet(val1)
if val2 not in self.hashtable:
self.makeSet(val2)
node1 = self.hashtable[val1]
node2 = self.hashtable[val2]
parent1 = self.find(node1)
parent2 = self.find(node2)
# if they are part of the same set
if parent1.val == parent2.val:
return False
# else the node with the higher rank becomes the parent of the other node
elif parent1.rank < parent2.rank:
parent1.parent = parent2
elif parent1.rank > parent2.rank:
parent2.parent = parent1
else:
parent1.parent = parent2
parent2.rank += 1
return True
class Solution:
def findRedundantConnection(self, edges: List[List[int]]) -> List[int]:
ds = DisjointSet()
for u, v in edges:
are_in_same_set = not ds.union(u, v)
# if nodes of an edge are in the same set before union,
# the edge is redundant
if are_in_same_set:
return [u, v]
<file_sep>/problems/k-way-merge/merge_k_sorted_lists.py
from __future__ import print_function
import heapq
"""
Given an array of 'K' sorted LinkedLists, merge them into one sorted list.
Time complexity: O(N * log(K)), where N is the total number of elements
and K is the number of lists
Space complexity: O(K) for min-heap
"""
class ListNode:
def __init__(self, value):
self.value = value
self.next = None
# used by heapq to compare two nodes
def __lt__(self, other):
return self.value < other.value
def merge_lists(lists):
resultHead = ListNode(0)
min_heap = [head for head in lists] # assume all head nodes are valid nodes
heapq.heapify(min_heap)
curr = resultHead
while min_heap:
smallest = heapq.heappop(min_heap) # pop the smallest node
if smallest.next:
# heap push next node from the same list as the one that was popped
heapq.heappush(min_heap, smallest.next)
curr.next = smallest
curr = curr.next
return resultHead.next
def main():
l1 = ListNode(2)
l1.next = ListNode(6)
l1.next.next = ListNode(8)
l2 = ListNode(3)
l2.next = ListNode(6)
l2.next.next = ListNode(7)
l3 = ListNode(1)
l3.next = ListNode(3)
l3.next.next = ListNode(4)
result = merge_lists([l1, l2, l3])
print("Here are the elements form the merged list: ", end='')
while result != None:
print(str(result.value) + " ", end='')
result = result.next
main()<file_sep>/sorting/mergesort/merge_sort_custom_comparator.py
class Solution:
def mergeSort(self, nums):
if len(nums) <= 1:
return
mid = len(nums) // 2
left = nums[:mid]
right = nums[mid:]
self.mergeSort(left)
self.mergeSort(right)
self.merge(nums, left, right)
def merge(self, nums, left, right):
l = r = k = 0
while l < len(left) and r < len(right):
if self.compare(left[l], right[r]):
nums[k] = left[l]
l += 1
else:
nums[k] = right[r]
r += 1
k += 1
while l < len(left):
nums[k] = left[l]
l += 1
k += 1
while r < len(right):
nums[k] = right[r]
r += 1
k += 1
def compare(self, n1, n2):
# comparator function used in LC problem 179: Largest Number
# e.g. return true if n1 = '34' and n2 = '3'
return str(n1) + str(n2) > str(n2) + str(n1)<file_sep>/problems/merging_intervals/minimum_meeting_rooms.py
"""
Given a list of intervals representing the start and end time of 'n' meetings,
find the minimum number of rooms required to hold all the meetings.
Time complexity: O(n*log(n))
- sorting takes O(n*log(n))
- iterating through sorted meetings, we may do up to
two log(n) operations per meeting
Space complexity: O(n)
- sorting takes O(n)
- min heap may grow to size O(n)
"""
import heapq
class Meeting:
def __init__(self, start, end):
self.start = start
self.end = end
def __lt__(self, other):
return self.end < other.end
def min_meeting_rooms(meetings):
if not meetings:
return 0
# sort by meeting start time
meetings.sort(key=lambda x: x.start)
# the min number of rooms required is the max number of rooms used
# at any given time
num_rooms = 0
# maintain a min heap sorted by meeting end time
# size of min heap at any given time will be the number of meetings
# happening at that time
min_heap = []
for i in range(len(meetings)):
meeting = meetings[i]
while min_heap and min_heap[0].end <= meeting.start:
heapq.heappop(min_heap)
heapq.heappush(min_heap, meeting)
num_rooms = max(num_rooms, len(min_heap))
return num_rooms
def main():
print("Minimum meeting rooms required: " + str(min_meeting_rooms(
[Meeting(4, 5), Meeting(2, 3), Meeting(2, 4), Meeting(3, 5)]))) # prints 2
print("Minimum meeting rooms required: " +
str(min_meeting_rooms([Meeting(1, 4), Meeting(2, 5), Meeting(7, 9)]))) # prints 2
print("Minimum meeting rooms required: " +
str(min_meeting_rooms([Meeting(6, 7), Meeting(2, 4), Meeting(8, 12)]))) # prints 1
print("Minimum meeting rooms required: " +
str(min_meeting_rooms([Meeting(1, 4), Meeting(2, 3), Meeting(3, 6)]))) # prints 2
print("Minimum meeting rooms required: " + str(min_meeting_rooms(
[Meeting(4, 5), Meeting(2, 3), Meeting(2, 4), Meeting(3, 5)]))) # prints 2
main()
<file_sep>/bit-manipulation/three_number_bit_manip.py
"""
Given a, b, and c, return d such that
For each bit of c: if the bit is a 1, choose the corresponding bit from a.
If it's a 0, choose the corresponding bit from b.
"""
def select(a, b, c):
# Truth tables:
# c a a1
# 0 0 0
# 0 1 0
# 1 0 0
# 1 1 1
# c and a
# c b b1
# 0 0 0
# 0 1 1
# 1 0 0
# 1 1 0
#
# (not c) and a
# a1 = a0aaa0
# b1 = 0b000b
#a1|b1=abaaab
return (c & a) | (~c & b) <file_sep>/problems/dynamic_programming/longest_common_subsequence.py
"""
Given two strings text1 and text2, return the length of their longest common subsequence.
If there is no common subsequence, return 0.
Time Complexity: O(m*n)
Space Complexity: O(m*n)
In general, we have 2 choices when comparing a letter from str1 and another letter from str2:
1. If the current letters match, the answer is 1 + lcs(rest of str1, rest of str2).
2. If the current letters don't match, the answer is
max(lcs(exclude current letter of str1), lcs(exclude current letter of str2))
"""
def lcs(word1, word2):
# top down approach
memo = [[-1]*len(word2) for _ in range(len(word1))]
ans = top_down(memo, word1, word2, 0, 0)
#bottom up approach
ans = bottom_up(word1, word2)
return ans
## Top-down approach
def top_down(dp, word1, word2, i, j):
# base case when either index is out of bounds
if (i == len(word1) or j == len(word2)):
return 0
# if the answer is already memoized
if dp[i][j] > -1:
return dp[i][j]
# if curr letters are the same,
# find the lcs given the rest of word 1 and the rest of word 2
if (word1[i] == word2[j]):
top_down = 1 + top_down(dp, word1, word2, i+1, j+1)
else:
option1 = top_down(dp, word1, word2, i, j+1) # include curr letter of word1, exclude curr letter of word2
option2 = top_down(dp, word1, word2, i+1, j) # exclude curr letter of word1, include curr letter of word2
top_down = max(option1, option2)
dp[i][j] = top_down
return top_down
def bottom_up(word1, word2):
# pad left column and top row with zeros
dp = [[0]*(len(word2)+1) for _ in range(len(word1)+1)]
for i in range(1, len(word1) + 1):
for j in range(1, len(word2) + 1):
if (word1[i-1] == word2[j-1]):
# dp[i-1][j-1] represents the lcs(word1[:i], word2[:j])
# (i.e. exclude curr letter of word1, exclude curr letter of word2)
# +1 because the current letters match
dp[i][j] = 1 + dp[i-1][j-1]
else:
# dp[i-1][j] represents lcs(word1[:i], word2[:j+1])
# (i.e. exclude curr letter of word1, include curr letter of word2)
# dp[i][j-1] represents lcs(word1[:i+1], word2[:j])
# (i.e. include curr letter of word1, exclude curr letter of word2)
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
return dp[-1][-1]<file_sep>/problems/subsets/permutations.py
from collections import deque
"""
BFS-like approach to generating permutations
Generates each "node" by inserting at different positions
Time complexity: O(N ∗ N!)
There are N! permutations. At each iteration, we insert a new number in each of the
current permutations, which take O(N) time.
Space complexity: O(N ∗ N!)
There are N! permutations, each containing N elements.
"""
def find_permutations(nums):
numsLength = len(nums)
result = []
permutations = deque()
permutations.append([]) # initialize queue with root node
for currentNumber in nums:
# we will take all existing permutations and add the current number to create new permutations
n = len(permutations) # get size of current level
for _ in range(n): # iterate through all nodes at current level
oldPermutation = permutations.popleft()
# create a new permutation by adding the current number at every position
for j in range(len(oldPermutation)+1):
newPermutation = list(oldPermutation)
newPermutation.insert(j, currentNumber)
# decide if we should append current node's children to queue
if len(newPermutation) == numsLength:
result.append(newPermutation)
else:
permutations.append(newPermutation)
return result
def main():
print("Here are all the permutations: " + str(find_permutations([1, 3, 5])))
main()
<file_sep>/searching/binary_search.py
""" Standard Binary Search """
def binary_search(nums: List[int], target: int) -> int:
l = 0
r = len(nums) - 1
while l <= r:
mid = (l + r) // 2
if nums[mid] == target:
return mid
elif target < nums[mid]:
r = mid - 1
elif nums[mid] < target:
l = mid + 1
return -1
"""
Modified binary search
While loop collapses window to 1 element using while (lo < hi)
and either lo = mid or high = mid
Set lo = mid + 1 if the target can never be at index mid
when lo = mid + 1 is triggered.
In the example below, peak_idx can never be mid when the slope is increasing.
"""
def search_bitonic_array(arr, key):
# find the peak index
lo = 0
hi = len(arr) - 1
while (lo < hi):
mid = (hi + lo) // 2
is_increasing = arr[mid] < arr[mid+1]
if is_increasing:
# mid can never be the peak when is_increasing, so we can safely set low = mid +1
lo = mid + 1
else:
hi = mid
peak_idx = lo
return peak_idx
""" Binary Search for a Range of Numbers """
class Solution:
def searchRange(self, nums: List[int], target: int) -> List[int]:
# binary search for target
def bs(nums, target, searching_left):
l = 0
r = len(nums) # allows while loop to run even if array only has one entry
# we want to run loop to put r past its actual position
# outside the function we always subtract 1 from r
while l < r:
mid = (l + r) // 2
if nums[mid] > target or (searching_left and nums[mid] == target):
r = mid
else:
l = mid + 1 # need to add 1 because mid value is floored (without +1 we could be stuck in inf loop)
return l
if not nums:
return [-1, -1]
#search for leftmost/starting position
starting = bs(nums, target, True)
#check that leftmost index is our target
if starting == len(nums) or nums[starting] != target:
return [-1, -1]
#search for rightmost/ending position
ending = bs(nums, target, False) - 1
return [starting, ending]<file_sep>/problems/merging_intervals/free_time.py
"""
For 'K' employees, we are given a list of intervals representing each employee’s working hours.
Our goal is to determine if there is a free interval which is common to all employees.
You can assume that each list of employee working hours is sorted on the start time.
Time complexity: O(n*log(k))
where n is the total number of intervals, and k is the total number of employees
Space complexity: O(n)
note that this could be improved to O(k) by appending to results inside of the while loop
"""
from __future__ import print_function
import heapq
class Interval:
def __init__(self, start, end):
self.start = start
self.end = end
def print_interval(self):
print("[" + str(self.start) + ", " + str(self.end) + "]", end='')
def __lt__(self, other):
if self.start < other.start:
return True
else:
return False
class EmployeeInterval:
def __init__(self, interval, employee_idx, interval_idx):
self.interval = interval
self.employee_idx = employee_idx
self.interval_idx = interval_idx
def __lt__(self, other):
return self.interval.start < other.interval.start
def find_employee_free_time(schedule):
result = []
# k-way merge
intervals = []
heap = []
# initialize heap
for employee_idx, times in enumerate(schedule):
heap.append(EmployeeInterval(times[0], employee_idx, 0))
heapq.heapify(heap)
# add all heap items to the intervals list, sorted by start time
while heap:
employee_interval = heapq.heappop(heap)
intervals.append(employee_interval.interval)
employee_idx = employee_interval.employee_idx
interval_idx = employee_interval.interval_idx
work_times = schedule[employee_idx]
if interval_idx + 1 < len(work_times):
nxt = EmployeeInterval(work_times[interval_idx + 1], employee_idx, interval_idx + 1)
heapq.heappush(heap, nxt)
# determine free intervals (to save memory, this could be done inside the while loop above)
prev_end = intervals[0].end
for i in range(1, len(intervals)):
interval = intervals[i]
if interval.start <= prev_end:
prev_end = max(prev_end, interval.end)
else:
result.append(Interval(prev_end, interval.start))
prev_end = interval.end
return result
def main():
# result = [3, 5]
input = [[Interval(1, 3), Interval(5, 6)], [
Interval(2, 3), Interval(6, 8)]]
print("Free intervals: ", end='')
for interval in find_employee_free_time(input):
interval.print_interval()
print()
# result = [4,6], [8,9]
input = [[Interval(1, 3), Interval(9, 12)], [
Interval(2, 4)], [Interval(6, 8)]]
print("Free intervals: ", end='')
for interval in find_employee_free_time(input):
interval.print_interval()
print()
# result = [5,7]
input = [[Interval(1, 3)], [
Interval(2, 4)], [Interval(3, 5), Interval(7, 9)]]
print("Free intervals: ", end='')
for interval in find_employee_free_time(input):
interval.print_interval()
print()
main()
<file_sep>/problems/binary_search/bitonic_array.py
def search_bitonic_array(arr, key):
# find the peak index
lo = 0
hi = len(arr) - 1
while (lo < hi):
mid = (hi + lo) // 2
is_increasing = arr[mid] < arr[mid+1]
if is_increasing:
# mid can never be the peak when is_increasing, so we can safely set low = mid +1
lo = mid + 1
else:
hi = mid
peak_idx = lo
# binary search for key in left half
left_search_result = binary_search(arr, 0, peak_idx, key, True)
if left_search_result != -1:
return left_search_result
# binary search for key in right half
right_search_result = binary_search(arr, peak_idx+1, len(arr)-1, key, False)
if right_search_result != -1:
return right_search_result
return -1
def binary_search(arr, lo, hi, key, is_increasing):
while (lo <= hi):
mid = (hi + lo) // 2
if arr[mid] == key:
return mid
elif (is_increasing and key < arr[mid]) or (not is_increasing and key > arr[mid]):
hi = mid - 1
else:
lo = mid + 1
return -1
def main():
print(search_bitonic_array([1, 3, 8, 4, 3], 4))
print(search_bitonic_array([3, 8, 3, 1], 8))
print(search_bitonic_array([1, 3, 8, 12], 12))
print(search_bitonic_array([10, 9, 8], 10))
main()
<file_sep>/sorting/quicksort/quicksort.py
# quicksort algorithm
import random
import copy
# returns random index in the array segment
# left: left index of array segment
# right: right index of array segment
def choose_pivot(left, right):
return random.randint(left, right)
# partitions so that all values less than pivot are placed to the left of pivot
# and all values greater than pivot are placed to the right of pivot
# returns index of pivot once arr has been partitioned
# arr: original array to partition
# left: left index bounding segment to partition
# right: right index bounding segment to partition
# pivot: index of pivot
def partition(arr, left, right, pivot):
arr[left], arr[pivot] = arr[pivot], arr[left] #swap pivot with leftmost element
pivot_val = arr[left]
i = left + 1 #index of leftmost element greater than the pivot value
for j in range(left + 1, right + 1):
if arr[j] < pivot_val:
arr[j], arr[i] = arr[i], arr[j]
i+=1
arr[left], arr[i-1] = arr[i-1], arr[left] #swap pivot to correct position
return i-1
# implementation of quicksort algorithm
# arr: array to be sorted
# left: left index of array segment to be sorted
# right: right index of array segment to be sorted
def quicksort(arr, left, right):
global TOTAL_COMP
TOTAL_COMP += (right - left) #keep track of number of comparisons
if left >= right:
return
pivot = choose_pivot(left, right) #index of randomly chosen pivot
pivot = partition(arr, left, right, pivot) #index of chosen pivot after partition
quicksort(arr, left, pivot - 1)
quicksort(arr, pivot + 1, right)
def compare_sort_funcs(quicksort_arr, python_sort_arr):
# flag = True
# incorrect = []
# for index, num in enumerate(quicksort_arr):
# if index+1 != num:
# flag = False
# incorrect.append([index, num])
# print("Both arrays are the same: ", flag)
# print("Incorrect locations:")
# print(incorrect)
print("Both arrays are the same: " + str(quicksort_arr == python_sort_arr))
if __name__ == '__main__':
TOTAL_COMP = 0 #running total of comparisons for a way of choosing the pivot
with open('quick_sort_test_data.txt', 'r') as f:
orig_array = [int(num.strip()) for num in f]
print(orig_array[0])
arr1 = copy.deepcopy(orig_array)
n = len(arr1)
quicksort(arr1, 0, n-1)
with open('quick_sort_test_output.txt', 'w') as output_f:
for num in arr1:
output_f.write(f'{num}\n')
# compare quicksort and python's sort function
compare_sort_funcs(arr1, sorted(orig_array))
print('Total number of comparisons: ', TOTAL_COMP)<file_sep>/problems/top_k_elements/frequency_stack.py
import heapq
"""
Design a stack-like data structure with the following operations:
- push(int num): Pushes the number 'num' on the stack.
- pop(): Returns the most frequent number in the stack. If there is a tie,
return the number which was pushed later.
Time complexity: O(log(N)) for both push() and pop() methods
Space complexity: O(N)
"""
class Element:
def __init__(self, num, freq, time):
self.num = num
self.freq = freq
self.time = time
# methods in heapq will use the __lt__ method to sort the heap's elements
def __lt__(self, other):
# sort by higher frequency first
if self.freq != other.freq:
return self.freq > other.freq # true means current instance comes before "other"
# if both have the same frequency, sort by the element that was pushed later
return self.time > other.time
class FrequencyStack:
def __init__(self):
self.max_heap = [] # max heap sorted by frequency
self.freq_map = {} # maps number to frequency
self.time = 0
def push(self, num):
self.freq_map[num] = self.freq_map.get(num, 0) + 1
elem = Element(num, self.freq_map[num], self.time)
heapq.heappush(self.max_heap, elem)
self.time += 1
def pop(self):
num = heapq.heappop(self.max_heap).num
self.freq_map[num] -= 1
return num
def main():
frequencyStack = FrequencyStack()
frequencyStack.push(1)
frequencyStack.push(2)
frequencyStack.push(3)
frequencyStack.push(2)
frequencyStack.push(1)
frequencyStack.push(2)
frequencyStack.push(5)
print(frequencyStack.pop())
print(frequencyStack.pop())
print(frequencyStack.pop())
main()<file_sep>/problems/monotonic_stack_and_queue/largest_rectangle_in_histogram.py
"""
Given an array of integers heights representing the histogram's bar height where
the width of each bar is 1, return the area of the largest rectangle in the histogram.
Time complexity: O(n)
Space complexity: O(n)
"""
from typing import List
class Solution:
def largestRectangleArea(self, heights: List[int]) -> int:
# i is the index of the bar whose height is the height of a candidate rectangle
# l is the first index of a bar to the left with height h[l] < h[i]
# r is the first index of a bar to the right with height h[r] < h[i]
max_area = 0
indexes = [-1]
heights.append(0) # ensures "indexes" stack will never be empty
# also ensures that all heights > 0 will be popped from stack when you reach
# the end of the heights array
for r, height in enumerate(heights):
while heights[indexes[-1]] > height:
i = indexes.pop()
h = heights[i]
w = r - indexes[-1] - 1 # right - left - 1
max_area = max(max_area, h*w)
indexes.append(r)
heights.pop()
return max_area<file_sep>/problems/subsets/evaluate_expression.py
"""
Given an expression containing digits and operations (+, -, *),
find all possible ways in which the expression can be evaluated
by grouping the numbers and operators using parentheses.
Soln: If we know all of the ways to evaluate the left-hand
side (LHS) of an expression and all of the ways to evaluate
the right-hand side (RHS), we can determine all of the ways
to evaluate the entire expression.
Time complexity: O(N * 2^N)
Space complexity: O(2^N)
"""
def diff_ways_to_evaluate_expression(input):
# base case
if input.isdigit():
return [int(input)]
result = []
for i in range(len(input)):
char = input[i]
if char.isdigit():
continue
LHS = diff_ways_to_evaluate_expression(input[:i])
RHS = diff_ways_to_evaluate_expression(input[i+1:])
for left_part in LHS:
for right_part in RHS:
if char == '+':
result.append(left_part + right_part)
elif char == '-':
result.append(left_part - right_part)
else:
result.append(left_part * right_part)
return result
def main():
print("Expression evaluations: " +
str(diff_ways_to_evaluate_expression("1+2*3")))
print("Expression evaluations: " +
str(diff_ways_to_evaluate_expression("2*3-4-5")))
main()
<file_sep>/graphs/topological_sort.py
"""
Given a directed graph, return an array of nodes where
each node appears before all the nodes it points to.
Time complexity: O(|V| + |E|)
- |V| is the number of vertices
- |E| is the number of edges
Space complexity: O(|V|)
"""
### Kahn's Algorithm ###
from collections import deque
class SolutionKahn:
def findOrder(self, numCourses: int, prerequisites: List[List[int]]) -> List[int]:
# create adj list and track degree of each node (number of incoming edges)
degree = [0]*numCourses
adj = [set() for _ in range(numCourses)]
for course, prereq in prerequisites:
adj[prereq].add(course)
degree[course] += 1
# implement topological sort using Kahn's algorithm
queue = deque([i for i in range(numCourses) if degree[i] == 0])
topsort = []
while queue:
course = queue.popleft()
topsort.append(course)
# iterate through neighboring nodes and reduce degree by 1
for course_next in adj[course]:
degree[course_next] -= 1
# nodes in a cycle would never have degree 0,
# so they would never be added to the queue
if degree[course_next] == 0:
queue.append(course_next)
# if there is a cycle, not all nodes will have degree 0,
# which means not all nodes have been added to the list "topsort"
if len(topsort) != numCourses:
return []
return topsort
### DFS solution ###
class SolutionDfs:
def findOrder(self, numCourses: int, prerequisites: List[List[int]]) -> List[int]:
# create adj list
adj = [set() for _ in range(numCourses)]
for course, prereq in prerequisites:
adj[prereq].add(course)
### dfs method
# visited[i] == 0 means not visited, 1 means has visited, -1 means is visiting
visited = [0 for course in range(numCourses)]
self.ans = []
self.is_possible = True # true if topological order is possible
for i in range(numCourses):
self.dfs(i, visited, adj)
if not self.is_possible:
return []
return self.ans[::-1]
def dfs(self, node, visited, adj):
# going to a node that you are "visiting" means you have a cycle
# and a topological ordering is impossible
if visited[node] == -1:
self.is_possible = False
return
if visited[node] == 1:
return
visited[node] = -1
for neighbor in adj[node]:
self.dfs(neighbor, visited, adj)
self.ans.append(node) # append in reverse topological order
visited[node] = 1
<file_sep>/problems/subsets/balanced_parentheses.py
from collections import deque
"""
BFS-like approach
Generates each "node" part by part
Time complexity: O(N * 2^N) --> good enough for interview purposes
Each position can hold an open or close parenthesis, so there are
approx 2^N combinations to generate. For each combination, we have to
concatenate a new parenthesis to the current string, which takes O(N)
Space complexity: O(N * 2^N)
"""
def generate_valid_parentheses(num):
result = deque([''])
# count[0] is the number of open paren
# count[1] is the number of close paren
counts = deque([[0, 0]])
for i in range(1, 2*num + 1):
level_size = len(result)
for _ in range(level_size):
parens = result.popleft()
count = counts.popleft()
# add open paren
if count[0] < num:
result.append(parens + '(')
counts.append([count[0] + 1, count[1]])
# add close paren
if count[0] > count[1]:
result.append(parens + ')')
counts.append([count[0], count[1] + 1])
return list(result)
def main():
print("All combinations of balanced parentheses are: " +
str(generate_valid_parentheses(2)))
print("All combinations of balanced parentheses are: " +
str(generate_valid_parentheses(3)))
main()
<file_sep>/graphs/bfs/bfs_shortest_reach.py
# find distance to any node in an undirected graph using bfs
import math
import os
import random
import re
import sys
# Complete the bfs function below.
def bfs(n, m, edges, s):
#create an adjacency list
adj_list = [set() for _ in range(n)] #zero-indexed
for edge in edges:
adj_list[edge[0]-1].add(edge[1])
adj_list[edge[1]-1].add(edge[0])
explored = [False]*n #zero-indexed
explored[s-1] = True
dist = [-1]*n #zero-indexed
dist[s-1] = 0 #distance at starting node is zero
queue = [s] #queue to track order of nodes to explore
while queue:
v = queue.pop(0) #current vertex
for w in adj_list[v-1]:
if not explored[w-1]:
explored[w-1] = True
queue.append(w)
dist[w-1] = dist[v-1] + 6
# print(adj_list)
# print(explored)
return dist[:s-1] + dist[s:]
if __name__ == '__main__':
with open('input.txt') as input_f:
q = int(input_f.readline())
for q_itr in range(q):
nm = input_f.readline().split()
n = int(nm[0])
m = int(nm[1])
edges = []
for _ in range(m):
edges.append(list(map(int, input_f.readline().rstrip().split())))
s = int(input_f.readline())
result = bfs(n, m, edges, s)
print(' '.join(map(str, result)))
# fptr.close()
<file_sep>/problems/top_k_elements/rearange_string.py
import heapq
"""
Given a string, find if its letters can be rearranged
in such a way that no two same characters come next to each other.
Time complexity: O(N * log(N))
Space complexity: O(N)
"""
def rearrange_string(str):
charFrequencyMap = {}
for char in str:
charFrequencyMap[char] = charFrequencyMap.get(char, 0) + 1
maxHeap = []
# add all characters to the max heap
for char, frequency in charFrequencyMap.items():
heapq.heappush(maxHeap, (-frequency, char))
# alternate between appending different letters by using a previousChar
# variable to track which letter you just appended
previousChar, previousFrequency = None, 0
resultString = []
while maxHeap:
frequency, char = heapq.heappop(maxHeap)
# add the previous entry back in the heap if its frequency is greater than zero
if previousChar and -previousFrequency > 0:
heapq.heappush(maxHeap, (previousFrequency, previousChar))
# append the current character to the result string and decrement its count
resultString.append(char)
previousChar = char
previousFrequency = frequency+1 # decrement the frequency
# if we were successful in appending all the characters to the result string, return it
return ''.join(resultString) if len(resultString) == len(str) else ""
def main():
print("Rearranged string: " + rearrange_string("aappp"))
print("Rearranged string: " + rearrange_string("Programming"))
print("Rearranged string: " + rearrange_string("aapa"))
main()
| 0e04bafd16ddb6509f579b3c445317f0a957ceed | [
"Python"
] | 24 | Python | jp-tran/dsa | f7199394b606edfe3f497d0cb4ecd30ab31298be | 81bcfae7167db826acf2ad28874d78a00702aa1d | |
refs/heads/main | <file_sep>
This repository contains code to scrape UFC figthers datasets and stats from ESPN website.
There is also a boiler plate code to generate an UFC fight summary table with [gt](https://gt.rstudio.com/) inspred by a [Thomas Mock](https://gist.github.com/jthomasmock/4a8851d74d911ce9b90bf7a43c4cdf47) gist post.
<file_sep>source("ufc_scraping.R")
# function to combine player name + country
combine_word <- function(name, country){
glue::glue(
"<span style='line-height:14px'><span style='font-weight:bold;font-size:16px'>{name}</span></span>
<span style='line-height:16px'><span style ='font-weight:bold;color:grey;font-size:9px'>{country}</span></span>"
)
}
# Graphical Table ---------------------------------------------------------
figthers_tab <- function(event_num, weight_class,fighters_stats) {
fighters_stats %>%
mutate(
combo = combine_word(player_name_ordered,player_country),
combo = map(combo,gt::html)
) %>%
select(player_headshot, combo, everything(), -c(player_name_ordered, player_country)) %>%
gt() %>%
text_transform(
locations = cells_body(c(player_headshot)),
fn = function(x) {
web_image(url =x,
height = px(40))
}
) %>%
cols_label(
player_headshot = "",
combo = "",
odds = "Odds",
nb_wins = "W",
nb_loses = "L",
last_5_kds = "KD",
last_5_body_pct = "%BODY",
last_5_head_pct = "%HEAD",
last_5_leg_pct = "%LEG"
) %>%
tab_spanner(
label = "Career Record",
columns = c(nb_wins, nb_loses)
) %>%
tab_spanner(
label = "Last 5 fights",
columns = 6:9
) %>%
data_color(
c(odds),
colors = scales::col_numeric(
palette = c("#489bca","white","#ff2d21" ),
domain = NULL
),
autocolor_text = FALSE
) %>%
tab_style(
style = cell_borders(sides = "bottom", color = "transparent", weight = px(3)),
locations = cells_body(
columns = 1
)
) %>%
tab_style(
style = cell_borders(sides = "bottom", color = "transparent", weight = px(3)),
locations = cells_body(
rows = 2
)
) %>%
tab_style(
style = cell_text(font = "Inconsolata"),
locations = cells_body(columns = c(3:9))
) %>%
tab_style(
style = cell_text(weight = "bold"),
locations = cells_body(columns = c(nb_wins,nb_loses))
) %>%
fmt_percent(c(last_5_body_pct,last_5_head_pct,last_5_leg_pct), scale_values = FALSE, decimals = 1) %>%
tab_header(
title = html(glue('<b> <img width = 50, style = "vertical-align: middle;" src = "{mma_logo}"/> UFC {event_num} - {weight_class}</b>'))
) %>%
tab_source_note(
html("<span style='float:left';><b>Data</b>: ESPN & Draftkings</span><span style = 'float:right';> <span style= 'font-family:\"Font Awesome 5 Brands\"'></span>@issa_madjid")
) %>%
tab_footnote(footnote = "Knockdowns",
locations = cells_column_labels(
columns = c(last_5_kds)
)) %>%
tab_footnote(footnote = "Target Breakdown Body",
locations = cells_column_labels(
columns = c(last_5_body_pct)
)) %>%
tab_footnote(footnote = "Target Breakdown Head",
locations = cells_column_labels(
columns = c(last_5_head_pct)
)) %>%
tab_footnote(footnote = "Target Breakdown Leg",
locations = cells_column_labels(
columns = c(last_5_leg_pct)
)) %>%
opt_table_font(font = "Lato") %>%
tab_options(
heading.align = "center",
heading.border.bottom.width = px(3),
heading.border.bottom.color = "black",
table.border.top.width = px(3),
table.border.top.color = "transparent",
table.border.bottom.color = "transparent",
table.border.bottom.width = px(3),
table.background.color = "#f9fbfc",
column_labels.font.size = 14,
table.width = px(600),
heading.title.font.weight = "bold",
data_row.padding = px(1),
source_notes.padding = px(8),
footnotes.font.size = px(10)
)
}
fighters_stats <- function(fighter_1, fighter_2, fighter_1_odd, fighter_2_odd) {
fighters_dataset %>%
filter(player_name %in% c(fighter_1, fighter_2)) %>%
mutate(
odds = c(fighter_1_odd, fighter_2_odd),
player_name_ordered = str_replace(player_name,"(.+), (.+)","\\2 \\1"),
player_headshot = glue("https://a.espncdn.com/combiner/i?img=/i/headshots/mma/players/full/{id}.png"),
summarize = map(map(id, get_fighter_stats), summarise_player_stats)
) %>%
unnest_wider(summarize) %>%
select(player_headshot, player_name_ordered, player_country, odds,nb_wins, nb_loses,contains("5"))
}
# Gather all
event_tab <- function(event_num, weight_class, fighter_1, fighter_2, fighter_1_odd, fighter_2_odd) {
fighters_stats <- fighters_stats(fighter_1, fighter_2, fighter_1_odd, fighter_2_odd)
figthers_tab(event_num, weight_class,fighters_stats)
}
# Example
event_tab(261,"Welterweight","<NAME>","<NAME>",+320,-435)
# UFC 264
mcgregor_poirier <- event_tab(264,"Lightweight","<NAME>","<NAME>",+105,-134)
gtsave(mcgregor_poirier,"mcgregor_poirier.png",path = "Graphics")
burns_thompson <- event_tab(264,"Welterweight","<NAME>","<NAME>",+130,-162)
gtsave(burns_thompson,"burns_thompson.png",path = "Graphics")
makhachev_moises <- event_tab("Fight Night","Lightweight","<NAME>","<NAME>",-720,+500)
gtsave(makhachev_moises,"makhachev_moises.png",path = "Graphics")
reneau_tate <- event_tab("Fight Night","Women's Bantamweight","Reneau, Marion","<NAME>",+120,-140)
gtsave(reneau_tate,"reneau_tate.png",path = "Graphics")
<file_sep>
# Load Libraries ----------------------------------------------------------
library(xml2)
library(rvest)
library(tidyverse)
library(purrr)
library(magick)
library(glue)
library(gt)
# Scrape UFC Rankings from ESPN API ---------------------------------------
ufc_rankings_url <- "http://site.api.espn.com/apis/site/v2/sports/mma/ufc/rankings"
raw_ranking_json <- rjson::fromJSON(file = ufc_rankings_url)
mma_logo <- pluck(raw_ranking_json, "sports",1, "logos",1,"href")
ufc_league_full_name <- pluck(raw_ranking_json, "leagues", 1, "name")
ufc_league_abbr <- pluck(raw_ranking_json, "leagues", 1, "abbreviation")
ufc_league_short_name <- pluck(raw_ranking_json, "leagues", 1, "shortName")
# Rankings
rankings <- pluck(raw_ranking_json, "rankings") %>%
enframe()
rankings <- rankings %>%
unnest_wider(value, names_sep = "_") %>%
unnest_wider(value_weightClass, names_sep = "_")
# Clean it
rankings <- rankings%>%
unnest_longer(value_ranks) %>%
unnest_wider(value_ranks) %>%
unnest_wider(athlete, names_sep = "_") %>%
select(starts_with("athlete"),everything(),-where(is.list), -c(name, value_id))
# Example : HeavyWeight fighters standing
( hw_fighters <- rankings %>%
filter(value_weightClass_slug == "heavyweight"))
# Scrape All MMA fighters profil from ESPN Website ------------------------
(fighters_rows <- str_c("http://www.espn.com/mma/fighters?search=", letters) %>%
map( ~read_html(.x) %>%
html_nodes("tr.evenrow")
) %>%
append(
str_c("http://www.espn.com/mma/fighters?search=", letters) %>%
map( ~read_html(.x) %>%
html_nodes("tr.oddrow")
)
)
)
# https://stackoverflow.com/questions/43427726/concatenate-vectors-from-a-lists-of-lists
# concatenate vectors from a lists of lists
vecs <- unlist(fighters_rows, recursive = F)
(lapply(unique(names(vecs)), function(name) do.call(c, vecs[name == names(vecs)])))
# Retrieve Player Links
retrieve_player_infos <- function(player) {
(player_infos <- player %>%
html_nodes("td")
)
(player_name <- player_infos %>%
pluck(1) %>%
html_node("a") %>%
html_text()
)
(player_link <- player_infos %>%
pluck(1) %>%
html_node("a") %>%
html_attr("href")
)
(player_country <- player_infos %>%
pluck(2) %>%
html_text()
)
list("player_name" = player_name, "player_link" = player_link, "player_country" = player_country)
}
# Retrieve links for all fighters
fighters_dataset <- vecs %>%
map(retrieve_player_infos) %>%
bind_rows() %>%
arrange(player_name) %>%
mutate(
id = as.integer(str_extract(player_link, "(\\d+)" )), # Extract id
player_link = str_c("https://www.espn.com", player_link)
)
# Example : French fighters
(fighters_dataset %>%
filter(player_country == "FRA")
)
# Save the dataset
# write_csv(fighters_dataset, "Dataset/mma_fighters.csv")
# saveRDS(fighters_dataset,"Dataset/mma_fighters.rds")
# Get a specifical fighter info
get_fighter_infos <- function(fighter_name){
fighters_dataset %>%
filter(player_name == fighter_name)
}
# Get Fighter Individual Stats
get_fighter_stats <- function(fighter_id) {
fighter_url <- glue("https://www.espn.com/mma/fighter/stats/_/id/{fighter_id}")
tables <- fighter_url %>%
read_html() %>%
html_nodes("table") %>%
html_table(header = T) %>%
enframe()
stats <- tables %>%
pull(value) %>%
map_dfc(pluck) %>%
select(-c(17:20,33:36)) # Remove date
colnames(stats) <- names(stats) %>%
str_replace("\\.+\\d","")
stats
}
# Summarise KD BODY HEAD LEG
summarise_player_stats <- function(stats) {
stats %>%
summarise(
last_5_kds = sum(head(as.numeric(KD),5)),
last_5_body_pct = mean(head(parse_number(`%BODY`),5)),
last_5_head_pct = mean(head(parse_number(`%HEAD`),5)),
last_5_leg_pct = mean(head(parse_number(`%LEG`),5)),
nb_wins = sum(Res == "W"),
nb_loses = sum(Res == "L")
)
}
| 8d1d4fcd5302c38d601416fb43535b88f30557a6 | [
"Markdown",
"R"
] | 3 | Markdown | AbdoulMa/UFC-Analysis | d89057c92d6b83f0000e83189e050ddbe38df40c | f2f077d3ae93abd0906968bf5fbc144f4ab6c262 | |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using ApiPeople.Models;
using Microsoft.EntityFrameworkCore;
namespace ApiPeople.Data
{
public class AppDbContext : DbContext
{
public DbSet<Person> People { get; set; }
public AppDbContext(DbContextOptions<AppDbContext> options) : base(options)// enlaza nuestra base de datos con el EntityFramework
{
}
}
}
| ea96bb0eb576224d8362aee18083374e153ee43a | [
"C#"
] | 1 | C# | AndreaVillarroelP/People | 5836762be16eca42a546fa8d942b7aea2859a3dd | 1c9ac9e010e4b9fb20cd99a48146c9bd49ebf80d | |
refs/heads/master | <repo_name>timkellogg/data_structures<file_sep>/spec/data_structures/stack_spec.rb
RSpec.describe DataStructures::Stack do
subject { described_class.new }
describe "#pop" do
it "removes the first element from the stack" do
subject.push 1
subject.pop
expect(subject.size).to eq 0
end
end
describe "#push" do
it "adds a new element to the top of the stack" do
subject.push 1
expect(subject.size).to eq 1
subject.push 2
expect(subject.elements).to eq [2, 1]
end
end
describe "#reverse" do
it "returns a copy of the array that's in the opposite order" do
expect(subject.reverse).to eq subject.elements
subject.push 1
subject.push 2
expect(subject.reverse).to eq [1, 2]
end
end
describe "#size" do
it "returns the number of elements in the stack" do
expect(subject.size).to eq 0
subject.push 1
subject.push 2
expect(subject.size).to eq 2
end
end
describe "#empty?" do
it "returns a boolean for whether there are any elements in the stack" do
expect(subject.empty?).to eq true
subject.push 1
expect(subject.empty?).to eq false
end
end
end
<file_sep>/lib/data_structures/node.rb
module DataStructures
class Node
attr_accessor :value, :child
def initialize(value, child: nil)
@value = value
@child = child
end
end
end
<file_sep>/spec/data_structures/linked_list_spec.rb
RSpec.describe DataStructures::LinkedList do
subject { described_class.new }
describe "#initialize" do
it "creates initial head" do
expect(described_class.method_defined?(:head)).to eq true
end
end
describe "#prepend" do
it "sets the new head equal to the prepending node" do
subject.prepend 1
expect(subject.values).to eq [1]
subject.prepend 2
subject.prepend 3
expect(subject.values).to eq [3, 2, 1]
end
end
describe "#find" do
it "returns the node by value" do
expect(subject.find(1)).to eq nil
subject.prepend 2
expect(subject.find(5)).to eq nil
subject.prepend 3
expect(subject.find(3).class).to eq DataStructures::Node
expect(subject.find(3).value).to eq 3
end
end
describe "#append" do
it "adds a node on to the last of the linked list" do
subject.prepend 1
subject.prepend 2
subject.append 10
subject.append 19
expect(subject.values).to eq [2, 1, 10, 19]
end
end
describe "#delete_head" do
it "removes the current head and sets the new head as the child if it exists" do
subject.prepend 1
subject.prepend 2
subject.prepend 3
subject.delete_head
expect(subject.head.value).to eq 2
end
end
describe "#delete_tail" do
it "removes the last element" do
subject.prepend 1
subject.delete_tail
expect(subject.values).to eq []
subject.prepend 1
subject.prepend 2
subject.prepend 3
subject.delete_tail
expect(subject.values).to eq [3, 2]
end
end
describe "#append_after" do
it "adds the element as a child of the element by the same value" do
subject.prepend 1
subject.prepend 2
subject.prepend 4
subject.append_after 4, 3
expect(subject.values).to eq [4, 3, 2, 1]
end
end
describe "#append_before" do
it "adds the element as a parent of the element by the same value" do
subject.prepend 1
subject.append_before 1, 0
expect(subject.values).to eq [0, 1]
subject.prepend 2
subject.prepend 4
subject.append_before 2, 3
expect(subject.values).to eq [4, 3, 2, 0, 1]
end
end
end
<file_sep>/lib/data_structures/linked_list.rb
module DataStructures
class LinkedList
attr_reader :head
def initialize
@head = nil
end
def prepend(value)
unless head
@head = Node.new value
return
end
current_head = head
@head = Node.new value, child: current_head
end
def append(value)
return unless head
node = head
while true
unless node.child
node.child = Node.new value
return
end
node = node.child
end
end
def append_after(current_node_value, value)
return unless head
node = find current_node_value
current_child = node.child
node.child = Node.new value, child: current_child
end
def append_before(current_node_value, value)
return unless head
node = head
unless head.child
@head = Node.new value, child: node
return
end
while node.child
if node.child.value = current_node_value
current_child = node.child
node.child = Node.new value, child: current_child
return
end
node = node.child
end
end
def delete_head
return unless head
@head = head.child
end
def delete_tail
return unless head
unless head.child
@head = head.child = nil
return
end
node = head
while node.child.child
node = node.child
end
node.child = nil
end
def find(value)
return unless head
node = head
while node.child
return node if node.value == value
node = node.child
end
end
def values
return [] unless head
values = [head.value]
node = head
while node.child
values << node.child.value
node = node.child
end
values
end
end
end
<file_sep>/spec/data_structures_spec.rb
RSpec.describe DataStructures do
it "has a version number" do
expect(DataStructures::VERSION).not_to be nil
end
end
<file_sep>/lib/data_structures/queue.rb
module DataStructures
class Queue < List
def enqueue(element)
elements << element
end
def dequeue
elements.shift
end
end
end
<file_sep>/lib/data_structures/version.rb
module DataStructures
VERSION = "0.1.0"
end
<file_sep>/lib/data_structures/list.rb
module DataStructures
class List
attr_reader :elements
def initialize
@elements = []
end
def pop
elements.shift
end
def reverse
elements.reverse
end
def size
elements.count
end
def empty?
!elements.any?
end
end
end
<file_sep>/lib/data_structures/stack.rb
module DataStructures
class Stack < List
def push(element)
elements.unshift element
end
end
end
<file_sep>/lib/data_structures.rb
require "data_structures/version"
require "data_structures/list"
require "data_structures/queue"
require "data_structures/stack"
require "data_structures/node"
require "data_structures/linked_list"
module DataStructures
end
| 56345ef48fc3d42034b73c94c557ca73f2ec9925 | [
"Ruby"
] | 10 | Ruby | timkellogg/data_structures | 8aacb9c2215693faa3dd65bb0ccd3be86f83ef4d | 8f8cb6e97571eaf2b811b64c7a982a2dd09f4f8e | |
refs/heads/master | <file_sep>#ifndef NEIGHBORPOINTS_H
#define NEIGHBORPOINTS_H
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <vector>
class NeighbourPoints
{
private:
pcl::PointXYZ _searchedPoint;
std::vector<pcl::PointXYZ> _neighbourCloud;
public:
NeighbourPoints(pcl::PointXYZ searchedPoint, std::vector<pcl::PointXYZ> neighbourCloud);
std::vector<pcl::PointXYZ> GetNeighbourCloud();
};
#endif // NEIGHBORPOINTS_H
<file_sep>#include "neighbourpoints.h"
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <vector>
NeighbourPoints::NeighbourPoints(pcl::PointXYZ searchedPoint, std::vector<pcl::PointXYZ> neighbourCloud)
{
_searchedPoint = searchedPoint;
_neighbourCloud = neighbourCloud;
}
std::vector<pcl::PointXYZ> NeighbourPoints::GetNeighbourCloud()
{
return _neighbourCloud;
}
<file_sep>#include "neighboursearch.h"
#include <iostream>
#include <vector>
#include <string>
#include <ctime>
#include <math.h>
#include <pcl/point_types.h>
#include <pcl/filters/passthrough.h>
#include <pcl/visualization/cloud_viewer.h>
#include <pcl/point_cloud.h>
#include <pcl/octree/octree_search.h>
#include <pcl/io/pcd_io.h>
#include <pcl/visualization/cloud_viewer.h>
NeighbourSearch::NeighbourSearch()
{
}
NeighbourPoints* NeighbourSearch::GetRadiusSearchNeighbour(pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud, pcl::PointXYZ& searchPoint , float radius)
{
float resolution = 128.0f;
pcl::octree::OctreePointCloudSearch<pcl::PointXYZ> octree (resolution);
octree.setInputCloud (cloud);
octree.addPointsFromInputCloud ();
// Neighbors within radius search
std::vector<int> pointIdxRadiusSearch;
std::vector<float> pointRadiusSquaredDistance;
std::cout << 500 <<" : "<<" ---> "<< "Neighbors within radius search at (" << searchPoint.x
<< " " << searchPoint.y
<< " " << searchPoint.z
<< ") with radius=" << radius << std::endl;
if (isnan(searchPoint.x))
return NULL;
std::vector<pcl::PointXYZ> _neighbourCloud;
if (octree.radiusSearch (searchPoint, radius, pointIdxRadiusSearch, pointRadiusSquaredDistance) > 0)
{
for (size_t i = 0; i < pointIdxRadiusSearch.size (); ++i)
{
std::cout << " " <<searchPoint.x
<< " " <<searchPoint.y
<< " " <<searchPoint.z
<< "-->" << cloud->points[ pointIdxRadiusSearch[i] ].x
<< " " << cloud->points[ pointIdxRadiusSearch[i] ].y
<< " " << cloud->points[ pointIdxRadiusSearch[i] ].z
<< " (squared distance: " << pointRadiusSquaredDistance[i] << ")"
<< std::endl;
_neighbourCloud.push_back(cloud->points[ pointIdxRadiusSearch[i] ]);
}
}
NeighbourPoints* neighbour = new NeighbourPoints(searchPoint, _neighbourCloud);
return neighbour;
}
<file_sep>#ifndef PASSTHROUGHFILTERCPP_H
#define PASSTHROUGHFILTERCPP_H
#include <iostream>
#include <string>
class PassThroughFiltercpp
{
public:
PassThroughFiltercpp();
void ApplyFilter(std::string fileName);
void SearchCircle(std::string fileName, float radius);
void SearchVoxel(std::string fileNam);
void SearchKNearest(std::string fileName, int K);
};
#endif // PASSTHROUGHFILTERCPP_H
<file_sep>#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <iostream>
#include <QPushButton>
#include <QHBoxLayout>
#include <QVBoxLayout>
#include <QFileDialog>
#include <QTextEdit>
#include "passthroughfiltercpp.h"
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
QPushButton *btnGetFile = new QPushButton(this);
txt = new QTextEdit();
btnGetFile->setText("Select File");
btnGetFile->setFixedWidth(100);
btnGetFile->setFixedHeight(30);
txt->setFixedHeight(30);
QObject::connect(btnGetFile, SIGNAL(clicked()),this, SLOT(clickedSlot()));
btnGetFile->setSizePolicy(QSizePolicy::Expanding,QSizePolicy::Expanding);
QWidget* centralWidget = new QWidget(this);
centralWidget->setSizePolicy(QSizePolicy::Expanding,QSizePolicy::Expanding);
QVBoxLayout* layout = new QVBoxLayout(centralWidget);
QHBoxLayout* hLayout = new QHBoxLayout(centralWidget);
hLayout->addWidget(txt);
hLayout->addWidget(btnGetFile);
layout->addLayout(hLayout);
setCentralWidget(centralWidget);
setWindowTitle("Spatial Partitioning");
}
void MainWindow::clickedSlot()
{
QString fileName = QFileDialog::getOpenFileName(this,
tr("Select File"), "/home/", tr("Files (*.*)"));
txt->setText(fileName);
PassThroughFiltercpp* obj = new PassThroughFiltercpp();
//obj->ApplyFilter(fileName.toUtf8().constData());
obj->SearchCircle(fileName.toUtf8().constData(), 0.5f);
//obj->SearchVoxel(fileName.toUtf8().constData(), 0.5f);
//obj->SearchKNearest(fileName.toUtf8().constData(), 0.5f, 2000);
}
MainWindow::~MainWindow()
{
delete ui;
}
<file_sep>#ifndef NEIGHBOURSEARCH_H
#define NEIGHBOURSEARCH_H
#include <string>
#include "neighbourpoints.h"
class NeighbourSearch
{
public:
NeighbourSearch();
NeighbourPoints* GetRadiusSearchNeighbour(pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud, pcl::PointXYZ& searchPoint, float radius);
};
#endif // NEIGHBOURSEARCH_H
<file_sep>#include <iostream>
#include <vector>
#include <string>
#include <ctime>
#include <math.h>
#include <pcl/point_types.h>
#include <pcl/filters/passthrough.h>
#include <pcl/visualization/cloud_viewer.h>
#include <pcl/point_cloud.h>
#include <pcl/octree/octree_search.h>
#include <pcl/io/pcd_io.h>
#include <pcl/visualization/cloud_viewer.h>
#include "passthroughfiltercpp.h"
#include "neighbourpoints.h"
#include "neighboursearch.h"
PassThroughFiltercpp::PassThroughFiltercpp()
{
}
void PassThroughFiltercpp::SearchCircle(std::string fileName, float radius )
{
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
{
PCL_ERROR ("Couldn't read file some_pcd.pcd \n");
return;
}
pcl::PointXYZ searchPoint = cloud->points[232459];
NeighbourSearch* n = new NeighbourSearch();
NeighbourPoints* neighbour = n->GetRadiusSearchNeighbour(cloud, searchPoint, radius);
std::vector<pcl::PointXYZ> points = neighbour->GetNeighbourCloud();
for (size_t i = 0; i < points.size (); ++i)
{
std::cout << " " << points[i].x
<< " " << points[i].y
<< " " << points[i].z << std::endl;
}
/*std::cout << "Loaded "
<< cloud->width * cloud->height
<< " data points from some_pcd.pcd with the following fields: "
<< std::endl;
for (size_t i = 0; i < cloud->points.size (); ++i)
std::cout << " " << cloud->points[i].x
<< " " << cloud->points[i].y
<< " " << cloud->points[i].z << std::endl;
*/
/*
float resolution = 128.0f;
pcl::octree::OctreePointCloudSearch<pcl::PointXYZ> octree (resolution);
octree.setInputCloud (cloud);
octree.addPointsFromInputCloud ();
for (size_t i = 0; i < cloud->points.size (); ++i)
{
pcl::PointXYZ searchPoint = cloud->points[i];
// Neighbors within radius search
std::vector<int> pointIdxRadiusSearch;
std::vector<float> pointRadiusSquaredDistance;
std::cout << i <<" : "<<" ---> "<< "Neighbors within radius search at (" << searchPoint.x
<< " " << searchPoint.y
<< " " << searchPoint.z
<< ") with radius=" << radius << std::endl;
if (isnan(searchPoint.x))
continue;
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster (new pcl::PointCloud<pcl::PointXYZ>);
if (octree.radiusSearch (searchPoint, radius, pointIdxRadiusSearch, pointRadiusSquaredDistance) > 0)
{
for (size_t i = 0; i < pointIdxRadiusSearch.size (); ++i)
{
std::cout << " " <<searchPoint.x
<< " " <<searchPoint.y
<< " " <<searchPoint.z
<< "-->" << cloud->points[ pointIdxRadiusSearch[i] ].x
<< " " << cloud->points[ pointIdxRadiusSearch[i] ].y
<< " " << cloud->points[ pointIdxRadiusSearch[i] ].z
<< " (squared distance: " << pointRadiusSquaredDistance[i] << ")"
<< std::endl;
cloud_cluster->points.push_back(cloud->points[ pointIdxRadiusSearch[i] ]);
}
}
pcl::visualization::CloudViewer viewer ("Simple Cloud Viewer");
viewer.showCloud(cloud_cluster);
while (!viewer.wasStopped ())
{
}
}
*/
}
void PassThroughFiltercpp::SearchVoxel(std::string fileName)
{
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
{
PCL_ERROR ("Couldn't read file some_pcd.pcd \n");
return;
}
/*std::cout << "Loaded "
<< cloud->width * cloud->height
<< " data points from some_pcd.pcd with the following fields: "
<< std::endl;
for (size_t i = 0; i < cloud->points.size (); ++i)
std::cout << " " << cloud->points[i].x
<< " " << cloud->points[i].y
<< " " << cloud->points[i].z << std::endl;
*/
float resolution = 128.0f;
pcl::octree::OctreePointCloudSearch<pcl::PointXYZ> octree (resolution);
octree.setInputCloud (cloud);
octree.addPointsFromInputCloud ();
for (size_t i = 0; i < cloud->points.size (); ++i)
{
pcl::PointXYZ searchPoint = cloud->points[i];
// Neighbors within voxel search
std::cout << i <<" : "<<" ---> "<< "voxel search (" << searchPoint.x
<< " " << searchPoint.y
<< " " << searchPoint.z
<< std::endl;
if (isnan(searchPoint.x))
continue;
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster (new pcl::PointCloud<pcl::PointXYZ>);
std::vector<int> pointIdxVec;
if (octree.voxelSearch (searchPoint, pointIdxVec))
{
std::cout << "Neighbors within voxel search at (" << searchPoint.x
<< " " << searchPoint.y
<< " " << searchPoint.z << ")"
<< std::endl;
for (size_t i = 0; i < pointIdxVec.size (); ++i)
{
std::cout << " " << cloud->points[pointIdxVec[i]].x
<< " " << cloud->points[pointIdxVec[i]].y
<< " " << cloud->points[pointIdxVec[i]].z << std::endl;
cloud_cluster->points.push_back(cloud->points[pointIdxVec[i]]);
}
}
pcl::visualization::CloudViewer viewer ("Simple Cloud Viewer");
viewer.showCloud(cloud_cluster);
while (!viewer.wasStopped ())
{
}
}
}
void PassThroughFiltercpp::SearchKNearest(std::string fileName, int k)
{
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
{
PCL_ERROR ("Couldn't read file some_pcd.pcd \n");
return;
}
/*std::cout << "Loaded "
<< cloud->width * cloud->height
<< " data points from some_pcd.pcd with the following fields: "
<< std::endl;
for (size_t i = 0; i < cloud->points.size (); ++i)
std::cout << " " << cloud->points[i].x
<< " " << cloud->points[i].y
<< " " << cloud->points[i].z << std::endl;
*/
float resolution = 128.0f;
pcl::octree::OctreePointCloudSearch<pcl::PointXYZ> octree (resolution);
octree.setInputCloud (cloud);
octree.addPointsFromInputCloud ();
for (size_t i = 0; i < cloud->points.size (); ++i)
{
pcl::PointXYZ searchPoint = cloud->points[i];
// K nearest neighbor search
std::cout << "K nearest neighbor search at (" << searchPoint.x
<< " " << searchPoint.y
<< " " << searchPoint.z
<< ") with K=" << k << std::endl;
if (isnan(searchPoint.x))
continue;
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cluster (new pcl::PointCloud<pcl::PointXYZ>);
std::vector<int> pointIdxNKNSearch;
std::vector<float> pointNKNSquaredDistance;
if (octree.nearestKSearch (searchPoint, k, pointIdxNKNSearch, pointNKNSquaredDistance) > 0)
{
for (size_t i = 0; i < pointIdxNKNSearch.size (); ++i)
{
std::cout << " " << cloud->points[ pointIdxNKNSearch[i] ].x
<< " " << cloud->points[ pointIdxNKNSearch[i] ].y
<< " " << cloud->points[ pointIdxNKNSearch[i] ].z
<< " (squared distance: " << pointNKNSquaredDistance[i] << ")" << std::endl;
cloud_cluster->points.push_back(cloud->points[pointIdxNKNSearch[i]]);
}
}
pcl::visualization::CloudViewer viewer ("Simple Cloud Viewer");
viewer.showCloud(cloud_cluster);
while (!viewer.wasStopped ())
{
}
}
}
void PassThroughFiltercpp::ApplyFilter(std::string fileName)
{
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_filtered (new pcl::PointCloud<pcl::PointXYZ>);
if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file
{
PCL_ERROR ("Couldn't read file some_pcd.pcd \n");
return;
}
// Create the filtering object
pcl::PassThrough<pcl::PointXYZ> pass;
pass.setInputCloud (cloud);
pass.setFilterFieldName ("z");
pass.setFilterLimits (0.0, 1.0);
//pass.setFilterLimitsNegative (true);
pass.filter (*cloud_filtered);
std::cerr << "Cloud after filtering: " << std::endl;
for (size_t i = 0; i < cloud_filtered->points.size (); ++i)
std::cerr << " " << cloud_filtered->points[i].x << " "
<< cloud_filtered->points[i].y << " "
<< cloud_filtered->points[i].z << std::endl;
pcl::visualization::CloudViewer viewer ("Simple Cloud Viewer");
viewer.showCloud(cloud_filtered);
while (!viewer.wasStopped ())
{
}
}
| d2f1f42feb5b6caae9bdd44b52722cb385a3022d | [
"C++"
] | 7 | C++ | satypro/PCLVizualizationTool | 28b3bc2f0bdc1a561f864caafee19342668099f7 | 1231c1a42e90875712f4cd6cba126dd35f7aebae | |
refs/heads/master | <repo_name>mina-amir1/BIT_MATH-LIB<file_sep>/BIT_MATH.h
/*BIT_MATH LIB CREATED BY <NAME>
0 1 0 1 0 1 0 1 YOUR BYTE
----------------------
7 6 5 4 3 2 1 0 YOUR BIT ORDER IN BIT_MATH LIB
┌ ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ ┐
| SYNTAX OF FUNCs |
|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| RTRN_TYPE | FUNC_NAME(ARGUMENTS TYPES_DESCREPTION);//FUNC DISCREPTION |
|--------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------|
|1. u8 (char) | set_bit(u8_byte needed to set, u8_no of bit needed CONCERNING THE ABOVE ORDER);//setting a bit from zero to one |
|2. u8 (char) | clr_bit(u8_byte needed to clear, u8_no of bit needed CONCERNING THE ABOVE ORDER);//clearing a bit from one to zero |
|3. u8 (char) | get_bit(u8_byte needed to get the bit within it, u8_no of bit needed CONCERNING THE ABOVE ORDER);//return the status of a bit zero or one |
|4. u8 (char) | togg_bit(u8_byte needed to toggle, u8_no of bit needed CONCERNING THE ABOVE ORDER);//toggle a bit from a status to a different one |
|5. u8 (char) | set_register(u8_byte needed register);//setting all bits of a byte to 1s |
|6. u8 (char) | set_low_nibble(u8_byte needed);//setting the right 4 bits of a byte to 1s |
|7. u8 (char) | clr_low_nibble(u8_byte needed);//clearing the right 4 bits of a byte |
|8. u8 (char) | set_high_nibble(u8_byte needed);//setting the left 4 bits of a byte to 1s |
|9. u8 (char) | clr_high_nibble(u8_byte needed);//clearing the left 4 bits of a byte |
|10.u8 (char) | togg_low_nibble(u8_byte needed);//toggle the right 4 bits of a byte from a status to a different one |
|11.u8 (char) | togg_high_nibble(u8_byte needed);//toggle the left 4 bits of a byte from a status to a different one |
|11.u8 (char) | assign_low_nibble(u8_byte needed , u8_number <=15 to assign);//put a number <=15 in the right 4 bits without changing the left ones |
|12.u8 (char) | assign_high_nibble(u8_byte needed , u8_number >15&&<255 to assign);//put a number >15 and <255 in the left 4 bits without changing the right ones |
└ ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ ┘ */
#include"std_types.h"
u8 set_bit(u8 byte, u8 n){
byte=byte |(1<<n);
return byte;
}
u8 clr_bit(u8 byte,u8 n){
u8 clr;
//check weather the bit is 1 or 0
clr=byte&(1<<n);
//if 0 return byte as it is
if (clr==0){return byte;}
//if not clear bit and return clr
else{
clr=byte^(1<<n);
return clr;}
}
u8 get_bit(u8 byte, u8 n){
byte=byte&(1<<n);
if(byte==0){return 0;}
else{return 1;}
}
u8 togg_bit(u8 byte,u8 n){
byte=byte^(1<<n);
return byte;
}
u8 set_register(u8 byte){
byte=byte | 255;
return byte;
}
u8 set_low_nibble(u8 byte){
byte = byte | 15;
return byte;
}
u8 clr_low_nibble(u8 byte){
byte=byte& 240;
return byte;
}
u8 set_high_nibble(u8 byte){
byte=byte | 240;
return byte;
}
u8 clr_high_nibble(u8 byte){
byte=byte & 15;
return byte;
}
u8 togg_low_nibble(u8 byte){
byte=byte^15;
return byte;
}
u8 togg_high_nibble(u8 byte){
byte=byte^240;
return byte;
}
u8 assign_low_nibble(u8 byte,u8 n){
byte=byte&240;
while(n>15){n=n-15;}
byte=byte|n;
return byte;
}
u8 assign_high_nibble(u8 byte,u8 n){
if(n<16){return byte;}
else{byte=byte&15;
while(n>255){n=n-255;}
u8 c=byte|n;
c=c&240;
c=c|byte;
return c;}
}
<file_sep>/README.md
# BIT_MATH-LIB
Make an easy bit math logic with this library that contains +10 functions that handle all the bit math level for you.
#BIT_MATH LIB CREATED BY <NAME>
0 1 0 1 0 1 0 1 YOUR BYTE
----------------------
7 6 5 4 3 2 1 0 YOUR BIT ORDER IN BIT_MATH LIB
SYNTAX OF FUNCs
RTRN_TYPE | FUNC_NAME(ARGUMENTS TYPES_DESCREPTION);//FUNC DISCREPTION
--------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------
1. u8 (char) | set_bit(u8_byte needed to set, u8_no of bit needed CONCERNING THE ABOVE ORDER);//setting a bit from zero to one
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------
2. u8 (char) | clr_bit(u8_byte needed to clear, u8_no of bit needed CONCERNING THE ABOVE ORDER);//clearing a bit from one to zero
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
3. u8 (char) | get_bit(u8_byte needed to get the bit within it, u8_no of bit needed CONCERNING THE ABOVE ORDER);//return the status of a bit zero or one
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
4. u8 (char) | togg_bit(u8_byte needed to toggle, u8_no of bit needed CONCERNING THE ABOVE ORDER);//toggle a bit from a status to a different one
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
5. u8 (char) | set_register(u8_byte needed register);//setting all bits of a byte to 1s
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
6. u8 (char) | set_low_nibble(u8_byte needed);//setting the right 4 bits of a byte to 1s
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
7. u8 (char) | clr_low_nibble(u8_byte needed);//clearing the right 4 bits of a byte
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
8. u8 (char) | set_high_nibble(u8_byte needed);//setting the left 4 bits of a byte to 1s
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
9. u8 (char) | clr_high_nibble(u8_byte needed);//clearing the left 4 bits of a byte
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
10.u8 (char) | togg_low_nibble(u8_byte needed);//toggle the right 4 bits of a byte from a status to a different one
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
11.u8 (char) | togg_high_nibble(u8_byte needed);//toggle the left 4 bits of a byte from a status to a different one
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
11.u8 (char) | assign_low_nibble(u8_byte needed , u8_number <=15 to assign);//put a number <=15 in the right 4 bits without changing the left ones
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
12.u8 (char) | assign_high_nibble(u8_byte needed , u8_number >15&&<255 to assign);//put a number >15 and <255 in the left 4 bits without changing the right ones
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
| 945ecdf54c82a8b6d30490addfce8977278e5440 | [
"Markdown",
"C"
] | 2 | C | mina-amir1/BIT_MATH-LIB | cad8b7425058288d62d937ebdfadbea04f358302 | d0bdc4963b498bc4e256c3ca68c0ccc1bcced66d | |
refs/heads/master | <file_sep>#include <Servo.h>
#include <Ultrasonic.h>
int sensor;
int angle;
Ultrasonic ultrasonic(9, 10);
Servo myservo;
void setup() {
// Begin the Serial at 9600 Baud
Serial.begin(9600);
myservo.attach(8);
}
void loop() {
sensor = ultrasonic.distanceRead();
if (sensor<100){
//Serial.println(sensor);
Serial.write(sensor);
}
// angle = Serial.read();
// Serial.println(angle, DEC);
// myservo.write(angle);
delay(300);
}
<file_sep>/*
This sketch was modified by the second easing code example
created by <NAME>
for Phsical Computing Week 5 class.
https://github.com/phillipdavidstearns/PGTE_5585_F2019/blob/master/notes/week5/arduino/easing/easing.ino
*/
#define LED_PIN 5
#define SENSOR_PIN A0
float progress;
int value;
void setup() {
Serial.begin(9600);
progress = 0.0;
value = 0;
pinMode(LED_PIN, OUTPUT);
}
void loop() {
int sensor = analogRead(SENSOR_PIN);
// int current_time = millis();
if (sensor > 500) {
value = int(255 * (0.5 * sin(progress) + 0.5));
progress+=0.2;
}
analogWrite(LED_PIN, value);
Serial.println(progress);
Serial.println(value);
delay(100);
}
<file_sep>/*
This sketch was modified by the first and the second easing code examples
created by <NAME>
for Phsical Computing Week 5 class.
https://github.com/phillipdavidstearns/PGTE_5585_F2019/blob/master/notes/week5/arduino/easing/easing.ino
*/
#define LED_PIN 5
#define BUTTON_PIN 4
unsigned int trigger_time;
float progress, ease_time;
int value1, value2;
int value;
void setup() {
Serial.begin(9600);
trigger_time = 0;
ease_time = 2500.0;
progress = 0.0;
value = 0;
pinMode(LED_PIN, OUTPUT);
pinMode(BUTTON_PIN, INPUT_PULLUP);
}
void loop() {
int current_time = millis();
if (buttonIsPressed() || progress > (PI * 3 / 2)) {
trigger_time = current_time;
progress = 0;
}
progress = (current_time - trigger_time) / ease_time;
value1 = int(255 * (pow(progress, 2)));
value2 = int(255 * (0.5 * sin(progress) + 0.5));
// Serial.println(value1);
// Serial.println(value2);
if (progress <= 0.96) {
value = value1;
}
if (progress > 0.96) {
value = value2;
}
analogWrite(LED_PIN, value);
Serial.println(progress);
Serial.println(value);
}
bool buttonIsPressed() {
boolean button_state = digitalRead(BUTTON_PIN);
boolean button;
if (button_state == LOW) {
button = true;
}
if (button == HIGH) {
button = false;
}
return button;
}
<file_sep>#define PIN_TRIG 12
#define PIN_ECHO 11
float cm;
float temp;
void setup() {
Serial.begin(9600);
pinMode(PIN_TRIG, OUTPUT);
pinMode(PIN_ECHO, INPUT);
}
void loop() {
digitalWrite(PIN_TRIG, LOW);
delayMicroseconds(2);
digitalWrite(PIN_TRIG, HIGH);
delayMicroseconds(10);
digitalWrite(PIN_TRIG, LOW);
temp = float(pulseIn(PIN_ECHO, HIGH));
cm = (temp * 17 )/1000;
Serial.print("Echo = ");
Serial.print(temp);
Serial.print(", Distance = ");
Serial.print(cm);
Serial.println("cm");
delay(300);
Serial.println(cm);
delay(100);
}
<file_sep>#define PIN_TRIG 12
#define PIN_ECHO 11
#define LED_PIN 7
float cm;
float temp;
char LEDstate;
void setup() {
Serial.begin(9600);
pinMode(PIN_TRIG, OUTPUT);
pinMode(PIN_ECHO, INPUT);
pinMode(LED_PIN, OUTPUT);
}
void loop() {
digitalWrite(PIN_TRIG, LOW);
delayMicroseconds(2);
digitalWrite(PIN_TRIG, HIGH);
delayMicroseconds(10);
digitalWrite(PIN_TRIG, LOW);
temp = float(pulseIn(PIN_ECHO, HIGH));
cm = (temp * 17 )/1000;
// Serial.print("Echo = ");
// Serial.print(temp);
// Serial.print(", Distance = ");
// Serial.print(cm);
// Serial.println("cm");
// delay(300);
Serial.println(cm);
delay(100);
checkUART();
}
void checkUART(){
while(Serial.available()>0){
LEDstate = Serial.read();
if(LEDstate == 'H'){
digitalWrite(LED_PIN, HIGH);
}
if(LEDstate == 'L'){
digitalWrite(LED_PIN,LOW);
}
delay(50);
}
}
<file_sep>#define P1A 3
#define P2A 7
#define EN12 8
#define KnobPin 4
boolean ButtonState;
float Speed;
void setup() {
Serial.begin(9600);
pinMode(P1A, OUTPUT);
pinMode(P2A, OUTPUT);
pinMode(EN12, OUTPUT);
pinMode(KnobPin, INPUT);
}
void loop() {
float Speed = analogRead(KnobPin);
Speed = map(Speed, 0, 1024, 0, 255);
Serial.println("CW");
digitalWrite(EN12, HIGH);
analogWrite(P1A , Speed);
digitalWrite(P2A, LOW);
Serial.println(Speed);
delay(1000);
}
<file_sep>#define relayPin 7
#define trigPin 9
#define echoPin 10
#define buttonPin 4
long duration;
int distance;
boolean flag = false;
int flag1 = 0;
void setup() {
// put your setup code here, to run once:
pinMode(relayPin, OUTPUT);
pinMode(trigPin, OUTPUT);
pinMode(echoPin, INPUT);
pinMode(buttonPin, INPUT_PULLUP);
Serial.begin(9600);
}
void loop() {
// put your main code here, to run repeatedly:
if (digitalRead(buttonPin) == LOW) {
if (flag == false) {
flag = true;
}
}
if (flag == true) {
digitalWrite(trigPin, LOW);
delayMicroseconds(2);
digitalWrite(trigPin, HIGH);
delayMicroseconds(10);
digitalWrite(trigPin, LOW);
duration = pulseIn(echoPin, HIGH);
distance = duration * 0.034 / 2;
// Serial.print("Distance: ");
// Serial.println(distance);
if (distance < 100 && flag1 == 0) {
flag1 = 1;
Serial.println(flag1);
// delay(800);
digitalWrite(7, HIGH);
delay(1000);
digitalWrite(7, LOW);
flag1 = 2;
}
if (distance > 150 && distance < 400) {
digitalWrite(7, LOW);
flag1 = 0;
Serial.println(flag1);
}
}
}
<file_sep>#define P1A 2
#define P2A 7
#define EN12 8
#define ButtonPin 4
boolean ButtonState;
void setup() {
Serial.begin(9600);
pinMode(P1A, OUTPUT);
pinMode(P2A, OUTPUT);
pinMode(EN12, OUTPUT);
pinMode(ButtonPin, INPUT_PULLUP);
}
void loop() {
ButtonState = digitalRead(ButtonPin);
if(ButtonState == HIGH){
Serial.println("CW");
digitalWrite(EN12 ,HIGH);
digitalWrite(P1A,HIGH);
digitalWrite(P2A,LOW);
}else{
Serial.println("CCW");
digitalWrite(EN12 ,HIGH);
digitalWrite(P1A,LOW);
digitalWrite(P2A,HIGH);
}
delay(100);
}
<file_sep>#define P1A 3
#define P2A 7
#define EN12 8
#define KnobPin 4
#define switch1Pin 12
#define switch2Pin 13
#define LEDPin 2
float knobValue;
int counter;
unsigned long delay_time = 50;
unsigned long last_time = 0;
boolean led_state = false;
unsigned long start_time = 0;
unsigned long duration = 5000;
boolean flag = false;
void setup() {
Serial.begin(9600);
pinMode(P1A, OUTPUT);
pinMode(P2A, OUTPUT);
pinMode(EN12, OUTPUT);
pinMode(KnobPin, INPUT);
pinMode(switch1Pin, INPUT_PULLUP);
pinMode(switch2Pin, INPUT_PULLUP);
counter = 0;
pinMode(LEDPin, OUTPUT);
}
void loop() {
float knobValue = analogRead(KnobPin);
if (knobValue < 500) {
digitalWrite(EN12, HIGH);
digitalWrite(P1A , HIGH);
digitalWrite(P2A, LOW);
}
if (knobValue >= 500 && knobValue <= 550) {
digitalWrite(EN12 , LOW);
Serial.println(" Motor Stopped");
}
if (knobValue > 550) {
digitalWrite(EN12 , HIGH);
digitalWrite(P1A, LOW);
digitalWrite(P2A, HIGH);
}
boolean switch1State = digitalRead(switch1Pin);
boolean switch2State = digitalRead(switch2Pin);
if (switch1State == LOW && switch2State == HIGH) {
counter--;
}
if (switch1State == HIGH && switch2State == LOW) {
counter++;
}
long current_time = millis();
if (counter >= 5) {
if(flag == false){
start_time = current_time;
flag = true;
}
if (current_time - last_time > delay_time) {
digitalWrite(LEDPin, !led_state);
led_state = !led_state;
last_time = current_time;
}
if(current_time - start_time > duration){
counter = 0;
flag = false;
}
}else if(counter<5){
digitalWrite(LEDPin, LOW);
}
Serial.println(knobValue);
Serial.println(counter);
delay(1000);
}
<file_sep>#define knob 0
int knobValue;
void setup() {
// Begin the Serial at 9600 Baud
Serial.begin(9600);
pinMode(7,OUTPUT);
pinMode(8,OUTPUT);
pinMode(9,OUTPUT);
pinMode(10,OUTPUT);
}
void loop() {
knobValue = analogRead(knob);
int value = map(knobValue,0,1023,0,180);
Serial.write(value);
delay(300);
if(Serial.available()){
int receive = Serial.read();
Serial.println(receive,DEC);
if(receive>0&&receive<10){
digitalWrite(7,HIGH);
digitalWrite(8,HIGH);
digitalWrite(9,HIGH);
digitalWrite(10,HIGH);
}else if(receive>=10&&receive<20){
digitalWrite(7,HIGH);
digitalWrite(8,HIGH);
digitalWrite(9,HIGH);
digitalWrite(10,LOW);
} else if(receive>=20&&receive<30){
digitalWrite(7,HIGH);
digitalWrite(8,HIGH);
digitalWrite(9,LOW);
digitalWrite(10,LOW);
} else if(receive>=30&&receive<40){
digitalWrite(7,HIGH);
digitalWrite(8,LOW);
digitalWrite(9,LOW);
digitalWrite(10,LOW);
}
}
}
<file_sep>/*
This sketch was modified by the fisrt easing code example
created by <NAME>
for Phsical Computing Week 5 class.
https://github.com/phillipdavidstearns/PGTE_5585_F2019/blob/master/notes/week5/arduino/easing/easing.ino
*/
#define LED_PIN 3
#define PIN_TRIG 12
#define PIN_ECHO 11
float cm;
float temp;
float currentValue;
float targetValue;
float easingAmount;
void setup() {
currentValue = 0;
easingAmount = .225;
Serial.begin(9600);
pinMode(PIN_TRIG, OUTPUT);
pinMode(PIN_ECHO, INPUT);
}
void loop() {
digitalWrite(PIN_TRIG, LOW);
delayMicroseconds(2);
digitalWrite(PIN_TRIG, HIGH);
delayMicroseconds(10);
digitalWrite(PIN_TRIG, LOW);
temp = float(pulseIn(PIN_ECHO, HIGH));
cm = (temp * 17 ) / 1000;
Serial.println(cm);
if (cm > 30) {
easingOff();
} else {
easingOn();
}
delay(300);
}
void easingOn() {
targetValue = 255;
currentValue += easingAmount * ( targetValue - currentValue );
analogWrite(LED_PIN, (int) currentValue);
delay(250);
}
void easingOff() {
targetValue = 0;
currentValue -= easingAmount * ( currentValue - targetValue );
analogWrite(LED_PIN, (int) currentValue);
delay(250);
}
| 24fc69ae918fce11d2d71eae87ddefd8dc5a5147 | [
"C++"
] | 11 | C++ | Wennnjunnnn/physical-computing | 9591015bfdccba608a42f40add1fbb8b693f5bc5 | 12ce3e12b2b46b2e64dccf0a6fc0ed4065283084 | |
refs/heads/master | <repo_name>boczeratul/30<file_sep>/app/src/main/java/com/yahoo/apps/thirty/activities/TopicsActivity.java
package com.yahoo.apps.thirty.activities;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.astuetz.PagerSlidingTabStrip;
import com.loopj.android.http.JsonHttpResponseHandler;
import com.yahoo.apps.thirty.R;
import com.yahoo.apps.thirty.backbone.TumblrApplication;
import com.yahoo.apps.thirty.backbone.TumblrClient;
import com.yahoo.apps.thirty.fragments.topic.HotTopicsFragment;
import com.yahoo.apps.thirty.fragments.topic.NewTopicsFragment;
import org.apache.http.Header;
import org.json.JSONObject;
public class TopicsActivity extends ActionBarActivity {
private HotTopicsFragment fHotTopics;
private NewTopicsFragment fNewTopics;
private TumblrClient tumblrClient;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_topics);
getSupportActionBar().hide();
tumblrClient = TumblrApplication.getTumblrClient();
if (savedInstanceState == null) {
ViewPager viewPager = (ViewPager) findViewById(R.id.viewpager);
viewPager.setAdapter(new TopicsPagerAdapter(getSupportFragmentManager()));
PagerSlidingTabStrip tabStrip = (PagerSlidingTabStrip) findViewById(R.id.tabs);
tabStrip.setViewPager(viewPager);
}
}
private void postStatus(String targetId, String content) {
tumblrClient.postStatus(content, targetId, new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Header[] headers, JSONObject json) {
Log.i("ERROR", json.toString());
}
@Override
public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONObject response) {
Log.i("ERROR", response.toString());
}
@Override
public void onFailure(int statusCode, Header[] headers, String response, Throwable throwable) {
Log.i("ERROR", response);
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_topics, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_post) {
Intent i = new Intent(TopicsActivity.this, ComposeActivity.class);
i.putExtra("targetId", "");
startActivity(i);
return true;
// } else if (id == R.id.action_profile) {
// Intent i = new Intent(this, ProfileActivity.class);
// i.putExtra("user", user);
// startActivity(i);
// return true;
}
return super.onOptionsItemSelected(item);
}
public void showCompose(View view) {
Intent i = new Intent(TopicsActivity.this, ComposeActivity.class);
i.putExtra("targetId", "");
startActivity(i);
}
public class TopicsPagerAdapter extends FragmentPagerAdapter {
private String tabItems[] = {"Hot Topics", "New Topics"};
public TopicsPagerAdapter(android.support.v4.app.FragmentManager fm) {
super(fm);
fHotTopics = new HotTopicsFragment();
fNewTopics = new NewTopicsFragment();
}
@Override
public int getCount() {
return tabItems.length;
}
@Override
public Fragment getItem(int position) {
if (position == 0) {
return fHotTopics;
} else if (position == 1) {
return fNewTopics;
} else {
return null;
}
}
@Override
public CharSequence getPageTitle(int position) {
return tabItems[position];
}
}
}
| 8ebf392c0731951a8fc1ff15bffd723fdcca7b9b | [
"Java"
] | 1 | Java | boczeratul/30 | 66821a134a7bcb17c398c98fa2e4599bd75b5580 | f914ffaf82cc819b03fc0068a374e6bcc986d30b | |
refs/heads/master | <repo_name>Pyrrus/scrabble-scorer<file_sep>/src/main/java/scrabbleScorer.java
class scrabbleScorer {
private long total;
public void scrabbleScorer() {
total = 0;
}
public long calculateScore(String word) {
String lower = word.toLowerCase();
for (int i = 0; i < lower.length(); i++) {
char at = lower.charAt(i);
if (at == 'a' || at == 'e' || at == 'i' || at == 'o' || at == 'u' || at == 'l' || at == 'n' || at == 'r' || at == 's' || at == 't') {
total += 1;
} else if (at == 'd' || at == 'g') {
total += 2;
} else if (at == 'b' || at == 'c' || at == 'm' || at == 'p') {
total += 3;
} else if (at == 'f' || at == 'h' || at == 'v' || at == 'w' || at == 'y') {
total += 4;
} else if (at == 'k') {
total += 5;
} else if (at == 'j' || at == 'x') {
total += 8;
} else if (at == 'q' || at == 'z') {
total += 10;
}
}
return total;
}
}
| feeb29171752f99572e95dab64ffe075f81e264b | [
"Java"
] | 1 | Java | Pyrrus/scrabble-scorer | d898c3ebb561f8fd6544f19a456556a54f8d66ef | b9d4a7a1495aebe481ba1cad9f40ed12f437ceea | |
refs/heads/master | <file_sep>LiteSpeed Exporter golang code root library directory
----
This directory holds the golang libraries that belong to and are used by litespeed_exporter.
<file_sep>package collector
import (
"github.com/prometheus/client_golang/prometheus"
"github.com/myokoo/litespeed_exporter/pkg/rtreport"
)
var (
networkLabel = []string{"scheme", "stream"}
nName = "network"
)
type network struct{}
func (n network) scrape(ch chan<- prometheus.Metric, report *rtreport.LiteSpeedReport) {
for key, value := range report.NetworkReport {
switch key {
case rtreport.NetworkReportKeyBpsIn:
ch <- newMetric(
namespace, nName, "throughput",
"Current ingress network throughput (http).",
networkLabel, prometheus.GaugeValue, value, "http", "in",
)
case rtreport.NetworkReportKeyBpsOut:
ch <- newMetric(
namespace, nName, "throughput",
"Current egress network throughput (http).",
networkLabel, prometheus.GaugeValue, value, "http", "out",
)
case rtreport.NetworkReportKeySslBpsIn:
ch <- newMetric(
namespace, nName, "throughput",
"Current ingress network throughput (https).",
networkLabel, prometheus.GaugeValue, value, "https", "in",
)
case rtreport.NetworkReportKeySslBpsOut:
ch <- newMetric(
namespace, nName, "throughput",
"Current egress network throughput (https).",
networkLabel, prometheus.GaugeValue, value, "https", "out",
)
}
}
}
<file_sep>package collector
import (
"github.com/prometheus/client_golang/prometheus"
"github.com/myokoo/litespeed_exporter/pkg/rtreport"
)
// Scraper is a minimal interface that allows you to add new prometheus metrics to litespeed_exporter.
type Scraper interface {
scrape(ch chan<- prometheus.Metric, report *rtreport.LiteSpeedReport)
}
<file_sep>package main
import (
"net/http"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/prometheus/common/log"
"github.com/prometheus/common/version"
"gopkg.in/alecthomas/kingpin.v2"
"github.com/myokoo/litespeed_exporter/collector"
"github.com/myokoo/litespeed_exporter/pkg/rtreport"
)
var (
listenAddress = kingpin.Flag(
"web.listen-address",
"Listen address for web interface and telemetry.",
).Default(":9104").String()
metricPath = kingpin.Flag(
"web.telemetry-path",
"URL path under which to expose metrics.",
).Default("/metrics").String()
reportPath = kingpin.Flag(
"lsws.report-path",
"Filesystem path under which exist lsws real-time statistics reports.",
).Default(rtreport.DefaultReportPath).String()
)
func main() {
// Parse flags.
log.AddFlags(kingpin.CommandLine)
kingpin.Version(version.Print("litespeed_exporter"))
kingpin.HelpFlag.Short('h')
kingpin.Parse()
// landingPage contains the HTML served at '/'.
// TODO: Make this nicer and more informative.
var landingPage = []byte(`<html>
<head><title>LiteSpeed exporter</title></head>
<body>
<h1>LiteSpeed exporter</h1>
<p><a href='` + *metricPath + `'>Metrics</a></p>
</body>
</html>
`)
log.Infoln("Starting litespeed_exporter", version.Info())
log.Infoln("Build context", version.BuildContext())
log.Infoln("Listening on", *listenAddress)
exporter := collector.New(reportPath)
prometheus.MustRegister(exporter)
prometheus.MustRegister(version.NewCollector("litespeed_exporter"))
http.Handle(*metricPath, promhttp.Handler())
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { w.Write(landingPage) })
log.Fatal(http.ListenAndServe(*listenAddress, nil))
}
<file_sep>package collector
import (
"github.com/prometheus/client_golang/prometheus"
"github.com/myokoo/litespeed_exporter/pkg/rtreport"
)
var (
extAppLabels = []string{"type", "vhost", "extapp_name"}
eName = "external_application"
)
type extApp struct{}
func (e extApp) scrape(ch chan<- prometheus.Metric, report *rtreport.LiteSpeedReport) {
for typeName, vhostMap := range report.ExtAppReports {
for vhost, extAppMap := range vhostMap {
for extAppName, valueMap := range extAppMap {
for key, value := range valueMap {
switch key {
case rtreport.ExtAppKeyMaxConn:
ch <- newMetric(
namespace, eName, "max_connections",
"The max possible connections value of external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
case rtreport.ExtAppKeyEffectiveMaxConn:
ch <- newMetric(
namespace, eName, "effective_max_connections",
"The max possible effective connections value of external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
case rtreport.ExtAppKeyPoolSize:
ch <- newMetric(
namespace, eName, "pool_size",
"The pool size by external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
case rtreport.ExtAppKeyInUseConn:
ch <- newMetric(
namespace, eName, "connection_used",
"The number of used connections by external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
case rtreport.ExtAppKeyIdleConn:
ch <- newMetric(
namespace, eName, "connection_idles",
"The number of idle connections by external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
case rtreport.ExtAppKeyWaitQueue:
ch <- newMetric(
namespace, eName, "wait_queues",
"The number of wait queues by external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
case rtreport.ExtAppKeyReqPerSec:
ch <- newMetric(
namespace, eName, "requests_per_sec",
"The total requests per second by external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
case rtreport.ExtAppKeyReqTotal:
ch <- newMetric(
namespace, eName, "requests_total",
"The total requests by external application.",
extAppLabels, prometheus.GaugeValue, value, typeName, vhost, extAppName,
)
}
}
}
}
}
}
<file_sep>package rtreport
import (
"errors"
"fmt"
"strconv"
"strings"
)
type LineParser interface {
parse(report *LiteSpeedReport)
}
func NewLineParser(lineTxt string) LineParser {
var v LineParser
switch {
case strings.HasPrefix(lineTxt, "VERSION:"):
v = versionLine(lineTxt)
case strings.HasPrefix(lineTxt, "UPTIME:"):
v = uptimeLine(lineTxt)
case strings.HasPrefix(lineTxt, "BPS_IN:"):
v = networkLine(lineTxt)
case strings.HasPrefix(lineTxt, "MAXCONN:"):
v = connectionLine(lineTxt)
case strings.HasPrefix(lineTxt, "REQ_RATE"):
v = virtualHostLine(lineTxt)
case strings.HasPrefix(lineTxt, "EXTAPP"):
v = extAppLine(lineTxt)
default:
v = ignoreLine(lineTxt)
}
return v
}
type versionLine string
// VERSION: LiteSpeed Web Server/Enterprise/x.x.x to x.x.x
func (v versionLine) parse(report *LiteSpeedReport) {
lineText := string(v)
if len(lineText) < 10 {
report.error = newTooShortParseLineError(lineText)
return
}
s := strings.Split(lineText, "/")
report.Version = s[len(s)-1]
}
type uptimeLine string
// parse UPTIME: xx:xx:xx
func (u uptimeLine) parse(report *LiteSpeedReport) {
lineText := string(u)
if len(lineText) < 16 {
report.error = newTooShortParseLineError(lineText)
return
}
v := strings.Split(lineText[8:], ":")
if len(v) != 3 {
report.error = errors.New(fmt.Sprintf("%s: Expected 3 parts after split, got %d.", lineText[8:], len(v)))
return
}
h, _ := strconv.ParseUint(v[0], 10, 64)
m, _ := strconv.ParseUint(v[1], 10, 64)
s, _ := strconv.ParseUint(v[2], 10, 64)
report.Uptime = float64((h * 60 * 60) + (m * 60) + s)
return
}
type networkLine string
// parse BPS_IN: x, BPS_OUT: x, SSL_BPS_IN: x, SSL_BPS_OUT: x
func (n networkLine) parse(report *LiteSpeedReport) {
report.NetworkReport, report.error = convertStringToMap(string(n))
}
type connectionLine string
// parse MAXCONN: 10000, MAXSSL_CONN: 5000, PLAINCONN: 1, AVAILCONN: 9999, IDLECONN: 0, SSLCONN: 0, AVAILSSL: 5000
func (c connectionLine) parse(report *LiteSpeedReport) {
report.ConnectionReport, report.error = convertStringToMap(string(c))
}
type virtualHostLine string
// parse REQ_RATE [xxxx]: REQ_PROCESSING: 1, REQ_PER_SEC: 0.1, TOT_REQS: 152, PUB_CACHE_HITS_PER_SEC: 0.0, TOTAL_PUB_CACHE_HITS: 0, PRIVATE_CACHE_HITS_PER_SEC: 0.0, TOTAL_PRIVATE_CACHE_HITS: 0, STATIC_HITS_PER_SEC: 0.0, TOTAL_STATIC_HITS: 47
func (r virtualHostLine) parse(report *LiteSpeedReport) {
lineText := string(r)
// pick up vhostName
s := pickUpStringName(lineText)
if len(s) < 1 {
report.error = errors.New(fmt.Sprintf("%s: Unable to parse VirtualHostName.", lineText))
return
}
vhName := s[0]
if vhName == "" {
vhName = "Server"
}
i := strings.Index(lineText, "]:")
report.VirtualHostReport[vhName], report.error = convertStringToMap(lineText[i+2:])
}
type extAppLine string
// parse EXTAPP [xxxx] [xxxx] [xxxx]: CMAXCONN: 1000, EMAXCONN: 1000, POOL_SIZE: 1, INUSE_CONN: 1, IDLE_CONN: 0, WAITQUE_DEPTH: 0, REQ_PER_SEC: 0.0, TOT_REQS: 0
func (e extAppLine) parse(report *LiteSpeedReport) {
lineText := string(e)
// pick up ExtAppType, vhostName, ExtAppName
s := pickUpStringName(lineText)
if len(s) < 3 {
report.error = errors.New(fmt.Sprintf("%s: Unable to parse ExtAppType, VirtualHostName, ExtAppName.", lineText))
return
}
vhostName := s[1]
if vhostName == "" {
vhostName = "Server"
}
i := strings.Index(lineText, "]:")
var m map[string]float64
m, report.error = convertStringToMap(lineText[i+2:])
if _, exist := report.ExtAppReports[s[0]]; !exist {
report.ExtAppReports[s[0]] = map[string]map[string]map[string]float64{vhostName: {s[2]: m}}
} else if _, exit := report.ExtAppReports[s[0]][vhostName]; !exit {
report.ExtAppReports[s[0]][vhostName] = map[string]map[string]float64{s[2]: m}
} else {
report.ExtAppReports[s[0]][vhostName][s[2]] = m
}
}
type ignoreLine string
// does not parse.
func (i ignoreLine) parse(report *LiteSpeedReport) {
}
// convert "xxxx: 1234, oooo: 4321" strings to map[string]float64{"xxxx":1234, "oooo":4321}
func convertStringToMap(lineText string) (map[string]float64, error) {
m := make(map[string]float64)
keyValues := strings.Split(lineText, ",")
for _, keyValue := range keyValues {
s := strings.Split(keyValue, ":")
if len(s) < 2 {
return nil, errors.New(fmt.Sprintf("%s: Unable to split item to key/value.", keyValue))
}
var err error
if m[strings.TrimSpace(s[0])], err = strconv.ParseFloat(strings.TrimSpace(s[1]), 64); err != nil {
return nil, errors.New(fmt.Sprintf("%s: Unable to convert string to float64.", strings.TrimSpace(s[1])))
}
}
return m, nil
}
// pick up "[]string{"oooo", "oooo"}" from "XXXX [oooo] [oooo]: xxxxx"
func pickUpStringName(lineText string) []string {
var s []string
for i := strings.Count(lineText, "["); i > 0; i-- {
startIndex := strings.Index(lineText, "[")
endIndex := strings.Index(lineText, "]")
s = append(s, strings.TrimSpace(lineText[startIndex+1:endIndex]))
lineText = lineText[endIndex+1:]
}
return s
}
// create parse line too short error.
func newTooShortParseLineError(s string) error {
return errors.New(fmt.Sprintf("%s: Parsed line too short.", s))
}
<file_sep>package collector
import (
"github.com/prometheus/client_golang/prometheus"
"github.com/myokoo/litespeed_exporter/pkg/rtreport"
)
var (
connectionLabel = []string{"scheme"}
cName = "server_connection"
)
type connection struct{}
func (c connection) scrape(ch chan<- prometheus.Metric, report *rtreport.LiteSpeedReport) {
for key, value := range report.ConnectionReport {
switch key {
case rtreport.ConnectionReportKeyMaxConn:
ch <- newMetric(
namespace, cName, "max",
"The maximum http connections value of server.",
connectionLabel, prometheus.GaugeValue, value, "http",
)
case rtreport.ConnectionReportKeyMaxConnSsl:
ch <- newMetric(
namespace, cName, "max",
"The maximum https connections value of server.",
connectionLabel, prometheus.GaugeValue, value, "https",
)
case rtreport.ConnectionReportKeyIdleConn:
ch <- newMetric(
namespace, cName, "idle",
"The current idle connections value of server.",
nil, prometheus.GaugeValue, value,
)
case rtreport.ConnectionReportKeyUsedConn:
ch <- newMetric(
namespace, cName, "used",
"The current number of used http connections to server.",
connectionLabel, prometheus.GaugeValue, value, "http",
)
case rtreport.ConnectionReportKeyUsedConnSsl:
ch <- newMetric(
namespace, cName, "used",
"The current number of used https connections to server.",
connectionLabel, prometheus.GaugeValue, value, "https",
)
}
}
}
<file_sep>package rtreport
func mergeSingleMap(a, b map[string]float64) {
for key, value := range b {
if _, exist := a[key]; !exist {
a[key] = value
} else {
a[key] = a[key] + value
}
}
}
func mergeDoubleMap(a, b map[string]map[string]float64) {
for key, value := range b {
if _, exist := a[key]; !exist {
a[key] = value
} else {
mergeSingleMap(a[key], b[key])
}
}
}
func mergeTripleMap(a, b map[string]map[string]map[string]float64) {
for key, value := range b {
if _, exist := a[key]; !exist {
a[key] = value
} else {
mergeDoubleMap(a[key], b[key])
}
}
}
func mergeQuadrupleMap(a, b map[string]map[string]map[string]map[string]float64) {
for key, value := range b {
if _, exist := a[key]; !exist {
a[key] = value
} else {
mergeTripleMap(a[key], b[key])
}
}
}
<file_sep>module github.com/myokoo/litespeed_exporter
go 1.16
require (
github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-cmp v0.5.5
github.com/google/go-querystring v1.1.0 // indirect
github.com/prometheus/client_golang v1.11.0
github.com/prometheus/common v0.29.0
github.com/prometheus/promu v0.12.0 // indirect
go.uber.org/atomic v1.8.0 // indirect
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b // indirect
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c // indirect
golang.org/x/sys v0.0.0-20210608053332-aa57babbf139 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
google.golang.org/appengine v1.6.7 // indirect
gopkg.in/alecthomas/kingpin.v2 v2.2.6
gopkg.in/yaml.v2 v2.4.0 // indirect
)
replace github.com/prometheus/common v0.29.0 => github.com/prometheus/common v0.26.0
<file_sep>package rtreport
import (
"reflect"
"testing"
"github.com/google/go-cmp/cmp"
)
func TestNewLineParser(t *testing.T) {
tests := []struct {
name string
args string
want LineParser
}{
{
name: "ok_versionLine",
args: "VERSION: LiteSpeed Web Server/Enterprise/5.8",
want: versionLine("VERSION: LiteSpeed Web Server/Enterprise/5.8"),
},
{
name: "ok_uptimeLine",
args: "UPTIME: 03:02:01",
want: uptimeLine("UPTIME: 03:02:01"),
},
{
name: "ok_networkLine",
args: "BPS_IN: 1, BPS_OUT: 2, SSL_BPS_IN: 3, SSL_BPS_OUT: 4",
want: networkLine("BPS_IN: 1, BPS_OUT: 2, SSL_BPS_IN: 3, SSL_BPS_OUT: 4"),
},
{
name: "ok_connectionLine",
args: "MAXCONN: 10000, MAXSSL_CONN: 5000, PLAINCONN: 1, AVAILCONN: 9999, IDLECONN: 0, SSLCONN: 0, AVAILSSL: 5000",
want: connectionLine("MAXCONN: 10000, MAXSSL_CONN: 5000, PLAINCONN: 1, AVAILCONN: 9999, IDLECONN: 0, SSLCONN: 0, AVAILSSL: 5000"),
},
{
name: "ok_requestLine",
args: "REQ_RATE [hoge.com]: REQ_PROCESSING: 1, REQ_PER_SEC: 0.1, TOT_REQS: 152, PUB_CACHE_HITS_PER_SEC: 0.0, TOTAL_PUB_CACHE_HITS: 0, " +
"PRIVATE_CACHE_HITS_PER_SEC: 0.0, TOTAL_PRIVATE_CACHE_HITS: 0, STATIC_HITS_PER_SEC: 0.0, TOTAL_STATIC_HITS: 47",
want: virtualHostLine("REQ_RATE [hoge.com]: REQ_PROCESSING: 1, REQ_PER_SEC: 0.1, TOT_REQS: 152, PUB_CACHE_HITS_PER_SEC: 0.0, " +
"TOTAL_PUB_CACHE_HITS: 0, PRIVATE_CACHE_HITS_PER_SEC: 0.0, TOTAL_PRIVATE_CACHE_HITS: 0, STATIC_HITS_PER_SEC: 0.0, TOTAL_STATIC_HITS: 47"),
},
{
name: "ok_extAppLine",
args: "EXTAPP [LSAPI] [hoge.com] [hoge.com_php7.3]: CMAXCONN: 1000, EMAXCONN: 1000, POOL_SIZE: 1, " +
"INUSE_CONN: 1, IDLE_CONN: 0, WAITQUE_DEPTH: 0, REQ_PER_SEC: 0.0, TOT_REQS: 0",
want: extAppLine("EXTAPP [LSAPI] [hoge.com] [hoge.com_php7.3]: CMAXCONN: 1000, EMAXCONN: 1000, POOL_SIZE: 1, " +
"INUSE_CONN: 1, IDLE_CONN: 0, WAITQUE_DEPTH: 0, REQ_PER_SEC: 0.0, TOT_REQS: 0"),
},
{
name: "ok_ignoreLine",
args: "BLOCKED_IP:",
want: ignoreLine("BLOCKED_IP:"),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := NewLineParser(tt.args); cmp.Equal(got, tt.want) != true {
t.Errorf("NewLineParser() = %v, want %v, type: %v, %v", got, tt.want, reflect.TypeOf(got), reflect.TypeOf(tt.want))
}
})
}
}
func Test_versionLine_parse(t *testing.T) {
tests := []struct {
name string
v versionLine
args LiteSpeedReport
want string
wantErr bool
}{
{
name: "ok",
v: versionLine("VERSION: LiteSpeed Web Server/Enterprise/5.8.1"),
args: LiteSpeedReport{},
want: "5.8.1",
wantErr: false,
},
{
name: "ng",
v: versionLine("5.8"),
args: LiteSpeedReport{},
want: "",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.v.parse(&tt.args)
if (tt.args.error != nil) != tt.wantErr {
t.Errorf("(versionLine)parse() error = %v, wantErr %v", tt.args.error, tt.wantErr)
}
if !tt.wantErr && tt.args.Version != tt.want {
t.Errorf("(versionLine)parse() does not match. got = %v, want = %v", tt.args.Version, tt.want)
}
})
}
}
func Test_uptimeLine_parse(t *testing.T) {
tests := []struct {
name string
u uptimeLine
args LiteSpeedReport
want float64
wantErr bool
}{
{
name: "ok",
u: uptimeLine("UPTIME: 03:02:01"),
args: LiteSpeedReport{},
want: 10921,
wantErr: false,
},
{
name: "ng",
u: uptimeLine("03:02:01"),
args: LiteSpeedReport{},
want: 0,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.u.parse(&tt.args)
if (tt.args.error != nil) != tt.wantErr {
t.Errorf("(uptimeLine)parse() error = %v, wantErr %v", tt.args.error, tt.wantErr)
}
if !tt.wantErr && tt.args.Uptime != tt.want {
t.Errorf("(uptimeLine)parse() does not match. got = %v, want = %v", tt.args.Uptime, tt.want)
}
})
}
}
func Test_networkLine_parse(t *testing.T) {
tests := []struct {
name string
n networkLine
args LiteSpeedReport
want LiteSpeedReport
wantErr bool
}{
{
name: "ok",
n: networkLine("BPS_IN: 2, BPS_OUT: 1954, SSL_BPS_IN: 5, SSL_BPS_OUT: 3332"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
NetworkReport: map[string]float64{
"BPS_IN": 2,
"BPS_OUT": 1954,
"SSL_BPS_IN": 5,
"SSL_BPS_OUT": 3332,
},
},
wantErr: false,
},
{
name: "ng",
n: networkLine("BPS_IN: 2 BPS_OUT: 1954"),
args: LiteSpeedReport{},
want: LiteSpeedReport{},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.n.parse(&tt.args)
if (tt.args.error != nil) != tt.wantErr {
t.Errorf("(networkLine)parse() error = %v, wantErr %v", tt.args.error, tt.wantErr)
}
if !tt.wantErr && !cmp.Equal(tt.args.NetworkReport, tt.want.NetworkReport) {
t.Errorf("(networkLine)parse() does not match. got = %v, want = %v", tt.args.NetworkReport, tt.want.NetworkReport)
}
})
}
}
func Test_connectionLine_parse(t *testing.T) {
tests := []struct {
name string
c connectionLine
args LiteSpeedReport
want LiteSpeedReport
wantErr bool
}{
{
name: "ok",
c: connectionLine("MAXCONN: 10000, MAXSSL_CONN: 5000, PLAINCONN: 100, AVAILCONN: 200, IDLECONN: 1, SSLCONN: 2, AVAILSSL: 3"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
ConnectionReport: map[string]float64{
"MAXCONN": 10000,
"MAXSSL_CONN": 5000,
"PLAINCONN": 100,
"AVAILCONN": 200,
"IDLECONN": 1,
"SSLCONN": 2,
"AVAILSSL": 3,
},
},
wantErr: false,
},
{
name: "ng",
c: connectionLine("MAXCONN: 10000 MAXSSL_CONN: 5000, PLAINCONN: 100, AVAILCONN: 200, IDLECONN: 1, SSLCONN: 2"),
args: LiteSpeedReport{},
want: LiteSpeedReport{},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.c.parse(&tt.args)
if (tt.args.error != nil) != tt.wantErr {
t.Errorf("(connectionLine)parse() error = %v, wantErr %v", tt.args.error, tt.wantErr)
}
if !tt.wantErr && !cmp.Equal(tt.args.ConnectionReport, tt.want.ConnectionReport) {
t.Errorf("(connectionLine)parse() does not match. got = %v, want = %v", tt.args.ConnectionReport, tt.want.ConnectionReport)
}
})
}
}
func Test_ignoreLine_parse(t *testing.T) {
tests := []struct {
name string
i ignoreLine
args LiteSpeedReport
want LiteSpeedReport
wantErr bool
}{
{
name: "ok. not store",
i: "BLOCKED_IP:",
args: LiteSpeedReport{},
want: LiteSpeedReport{},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.i.parse(&tt.args)
if (tt.args.error != nil) != tt.wantErr {
t.Errorf("(ignoreLine)parse() error = %v, wantErr %v", tt.args.error, tt.wantErr)
}
if !tt.wantErr && !cmp.Equal(tt.args.ConnectionReport, tt.want.ConnectionReport) {
t.Errorf("(ignoreLine)parse() does not match. got = %v, want = %v", tt.args, tt.want)
}
})
}
}
func Test_requestLine_parse(t *testing.T) {
tests := []struct {
name string
r virtualHostLine
args LiteSpeedReport
want LiteSpeedReport
wantErr bool
}{
{
name: "ok_vhost",
r: virtualHostLine("REQ_RATE [hoge.jp]: REQ_PROCESSING: 1, REQ_PER_SEC: 0.1, TOT_REQS: 2, PUB_CACHE_HITS_PER_SEC: 0.2, TOTAL_PUB_CACHE_HITS: 3, " +
"PRIVATE_CACHE_HITS_PER_SEC: 0.3, TOTAL_PRIVATE_CACHE_HITS: 4, STATIC_HITS_PER_SEC: 0.4, TOTAL_STATIC_HITS: 5"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
VirtualHostReport: map[string]map[string]float64{
"hoge.jp": {
"REQ_PROCESSING": 1,
"REQ_PER_SEC": 0.1,
"TOT_REQS": 2,
"PUB_CACHE_HITS_PER_SEC": 0.2,
"TOTAL_PUB_CACHE_HITS": 3,
"PRIVATE_CACHE_HITS_PER_SEC": 0.3,
"TOTAL_PRIVATE_CACHE_HITS": 4,
"STATIC_HITS_PER_SEC": 0.4,
"TOTAL_STATIC_HITS": 5,
},
},
},
wantErr: false,
},
{
name: "ok_vhost_and_port",
r: virtualHostLine("REQ_RATE [hoge.jp:80]: REQ_PROCESSING: 1, REQ_PER_SEC: 0.1, TOT_REQS: 2, PUB_CACHE_HITS_PER_SEC: 0.2, TOTAL_PUB_CACHE_HITS: 3, " +
"PRIVATE_CACHE_HITS_PER_SEC: 0.3, TOTAL_PRIVATE_CACHE_HITS: 4, STATIC_HITS_PER_SEC: 0.4, TOTAL_STATIC_HITS: 5"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
VirtualHostReport: map[string]map[string]float64{
"hoge.jp:80": {
"REQ_PROCESSING": 1,
"REQ_PER_SEC": 0.1,
"TOT_REQS": 2,
"PUB_CACHE_HITS_PER_SEC": 0.2,
"TOTAL_PUB_CACHE_HITS": 3,
"PRIVATE_CACHE_HITS_PER_SEC": 0.3,
"TOTAL_PRIVATE_CACHE_HITS": 4,
"STATIC_HITS_PER_SEC": 0.4,
"TOTAL_STATIC_HITS": 5,
},
},
},
wantErr: false,
},
{
name: "ok_Server",
r: virtualHostLine("REQ_RATE []: REQ_PROCESSING: 2, REQ_PER_SEC: 0.3, TOT_REQS: 5, PUB_CACHE_HITS_PER_SEC: 0.6, TOTAL_PUB_CACHE_HITS: 3, " +
"PRIVATE_CACHE_HITS_PER_SEC: 0.3, TOTAL_PRIVATE_CACHE_HITS: 4, STATIC_HITS_PER_SEC: 0.4, TOTAL_STATIC_HITS: 5"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
VirtualHostReport: map[string]map[string]float64{
"Server": {
"REQ_PROCESSING": 2,
"REQ_PER_SEC": 0.3,
"TOT_REQS": 5,
"PUB_CACHE_HITS_PER_SEC": 0.6,
"TOTAL_PUB_CACHE_HITS": 3,
"PRIVATE_CACHE_HITS_PER_SEC": 0.3,
"TOTAL_PRIVATE_CACHE_HITS": 4,
"STATIC_HITS_PER_SEC": 0.4,
"TOTAL_STATIC_HITS": 5,
},
},
},
wantErr: false,
},
{
name: "ng",
r: virtualHostLine("REQ_RATE [hoge.jp]:: REQ_PROCESSING: 1, REQ_PER_SEC: 0.1, TOT_REQS: 2, PUB_CACHE_HITS_PER_SEC: 0.2, TOTAL_PUB_CACHE_HITS: 3"),
args: LiteSpeedReport{},
want: LiteSpeedReport{},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.args.VirtualHostReport = make(map[string]map[string]float64)
tt.r.parse(&tt.args)
if (tt.args.error != nil) != tt.wantErr {
t.Errorf("(virtualHostLine)parse() error = %v, wantErr %v", tt.args.error, tt.wantErr)
}
if !tt.wantErr && !cmp.Equal(tt.args.VirtualHostReport, tt.want.VirtualHostReport) {
t.Errorf("(virtualHostLine)parse() does not match. got = %v, want = %v", tt.args.VirtualHostReport, tt.want.VirtualHostReport)
}
})
}
}
func Test_extAppLine_parse(t *testing.T) {
tests := []struct {
name string
e extAppLine
args LiteSpeedReport
want LiteSpeedReport
wantErr bool
}{
{
name: "ok_vhost",
e: extAppLine("EXTAPP [LSAPI] [fuga.com] [fuga.com_php73]: CMAXCONN: 1, EMAXCONN: 2, POOL_SIZE: 3, INUSE_CONN: 4, IDLE_CONN: 5, WAITQUE_DEPTH: 6, REQ_PER_SEC: 0.7, TOT_REQS: 8"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
ExtAppReports: map[string]map[string]map[string]map[string]float64{
"LSAPI": {
"fuga.com": {
"fuga.com_php73": {
"CMAXCONN": 1,
"EMAXCONN": 2,
"POOL_SIZE": 3,
"INUSE_CONN": 4,
"IDLE_CONN": 5,
"WAITQUE_DEPTH": 6,
"REQ_PER_SEC": 0.7,
"TOT_REQS": 8,
},
},
},
},
},
wantErr: false,
},
{
name: "ok_vhost_port",
e: extAppLine("EXTAPP [LSAPI] [fuga.com:80] [fuga.com_php73]: CMAXCONN: 1, EMAXCONN: 2, POOL_SIZE: 3, INUSE_CONN: 4, IDLE_CONN: 5, WAITQUE_DEPTH: 6, REQ_PER_SEC: 0.7, TOT_REQS: 8"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
ExtAppReports: map[string]map[string]map[string]map[string]float64{
"LSAPI": {
"fuga.com:80": {
"fuga.com_php73": {
"CMAXCONN": 1,
"EMAXCONN": 2,
"POOL_SIZE": 3,
"INUSE_CONN": 4,
"IDLE_CONN": 5,
"WAITQUE_DEPTH": 6,
"REQ_PER_SEC": 0.7,
"TOT_REQS": 8,
},
},
},
},
},
wantErr: false,
},
{
name: "ok_cgi",
e: extAppLine("EXTAPP [CGI] [] [lscgid]: CMAXCONN: 2, EMAXCONN: 3, POOL_SIZE: 4, INUSE_CONN: 5, IDLE_CONN: 6, WAITQUE_DEPTH: 7, REQ_PER_SEC: 0.8, TOT_REQS: 9"),
args: LiteSpeedReport{},
want: LiteSpeedReport{
ExtAppReports: map[string]map[string]map[string]map[string]float64{
"CGI": {
"Server": {
"lscgid": {
"CMAXCONN": 2,
"EMAXCONN": 3,
"POOL_SIZE": 4,
"INUSE_CONN": 5,
"IDLE_CONN": 6,
"WAITQUE_DEPTH": 7,
"REQ_PER_SEC": 0.8,
"TOT_REQS": 9,
},
},
},
},
},
wantErr: false,
},
{
name: "ng",
e: extAppLine("EXTAPP [LSA:PI] [fuga.com]: CMAXCONN: 2, EMAXCONN: 3, POOL_SIZE: 4, INUSE_CONN: 5, IDLE_CONN: 6, WAITQUE_DEPTH: 7, REQ_PER_SEC: 0.8, TOT_REQS: 9"),
args: LiteSpeedReport{},
want: LiteSpeedReport{},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.args.ExtAppReports = make(map[string]map[string]map[string]map[string]float64)
tt.e.parse(&tt.args)
if (tt.args.error != nil) != tt.wantErr {
t.Errorf("(extAppLine)parse() error = %v, wantErr %v", tt.args.error, tt.wantErr)
}
if !tt.wantErr && !cmp.Equal(tt.args.ExtAppReports, tt.want.ExtAppReports) {
t.Errorf("(extAppLine)parse() does not match. got = %v, want = %v", tt.args.ExtAppReports, tt.want.ExtAppReports)
}
})
}
}
func Test_pickUpStringName(t *testing.T) {
tests := []struct {
name string
args string
want []string
}{
{
name: "ok_single",
args: "REQ_RATE [hoge.com]: REQ_PROCESSING: 1",
want: []string{"hoge.com"},
},
{
name: "ok_multi",
args: "EXTAPP [LSAPI] [hoge.com] [hoge.com_php7.3]: CMAXCONN: 1000,",
want: []string{"LSAPI", "hoge.com", "hoge.com_php7.3"},
},
{
name: "ok_vhost_and_port",
args: "EXTAPP [LSAPI] [hoge.com:80] [hoge.com_php7.3]: CMAXCONN: 1000,",
want: []string{"LSAPI", "hoge.com:80", "hoge.com_php7.3"},
},
{
name: "ok_trim_space",
args: "EXTAPP [LSAPI] [ hoge.com] [hoge.com_php7.3]: CMAXCONN: 1000,",
want: []string{"LSAPI", "hoge.com", "hoge.com_php7.3"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := pickUpStringName(tt.args); !cmp.Equal(got, tt.want) {
t.Errorf("pickUpStringName() = %v, want %v", got, tt.want)
}
})
}
}
func Test_convertStringToMap(t *testing.T) {
tests := []struct {
name string
args string
want map[string]float64
wantErr bool
}{
{
name: "ok_single",
args: "xxxx: 1234",
want: map[string]float64{"xxxx": 1234},
wantErr: false,
},
{
name: "ok_multi",
args: "xxxx: 1234, oooo: 432.1",
want: map[string]float64{"xxxx": 1234, "oooo": 432.1},
wantErr: false,
},
{
name: "ng",
args: "xxxx: 1234 oooo: 432.1",
want: map[string]float64{},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := convertStringToMap(tt.args)
if (err != nil) != tt.wantErr {
t.Errorf("convertStringToMap() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !tt.wantErr && !cmp.Equal(got, tt.want) {
t.Errorf("convertStringToMap() got = %v, want %v", got, tt.want)
}
})
}
}
<file_sep>package rtreport
import (
"testing"
"github.com/google/go-cmp/cmp"
)
func Test_mergeSingleMap(t *testing.T) {
type args struct {
a map[string]float64
b map[string]float64
}
tests := []struct {
name string
args args
want map[string]float64
}{
{
name: "ok",
args: args{
a: map[string]float64{"hoge": 10, "fuga": 0.1},
b: map[string]float64{"hoge": 3, "aaa": 123},
},
want: map[string]float64{"hoge": 13, "fuga": 0.1, "aaa": 123},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mergeSingleMap(tt.args.a, tt.args.b)
if !cmp.Equal(tt.args.a, tt.want) {
t.Errorf("mergeSingleMap() does not match. got = %v, want = %v", tt.args.a, tt.want)
}
})
}
}
func Test_mergeDoubleMap(t *testing.T) {
type args struct {
a map[string]map[string]float64
b map[string]map[string]float64
}
tests := []struct {
name string
args args
want map[string]map[string]float64
}{
{
name: "ok",
args: args{
a: map[string]map[string]float64{
"hoge": {"fuga": 100, "xxxx": 123},
"aaaa": {"abc": 1.1},
},
b: map[string]map[string]float64{
"hoge": {"fuga": 321, "cccc": 10},
"bbb": {"dddd": 3456},
},
},
want: map[string]map[string]float64{
"hoge": {"fuga": 421, "xxxx": 123, "cccc": 10},
"aaaa": {"abc": 1.1},
"bbb": {"dddd": 3456},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mergeDoubleMap(tt.args.a, tt.args.b)
if !cmp.Equal(tt.args.a, tt.want) {
t.Errorf("mergeDoubleMap() does not match. got = %v, want = %v", tt.args.a, tt.want)
}
})
}
}
func Test_mergeTripleMap(t *testing.T) {
type args struct {
a map[string]map[string]map[string]float64
b map[string]map[string]map[string]float64
}
tests := []struct {
name string
args args
want map[string]map[string]map[string]float64
}{
{
name: "ok",
args: args{
a: map[string]map[string]map[string]float64{"aaa": {"bb": {"a": 1000, "b": 202}}},
b: map[string]map[string]map[string]float64{"aaa": {"bb": {"a": 21}}, "ccc": {"dd": {"e": 100}}},
},
want: map[string]map[string]map[string]float64{"aaa": {"bb": {"a": 1021, "b": 202}}, "ccc": {"dd": {"e": 100}}},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mergeTripleMap(tt.args.a, tt.args.b)
if !cmp.Equal(tt.args.a, tt.want) {
t.Errorf("mergeTripleMap() does not match. got = %v, want = %v", tt.args.a, tt.want)
}
})
}
}
func Test_mergeQuadrupleMap(t *testing.T) {
type args struct {
a map[string]map[string]map[string]map[string]float64
b map[string]map[string]map[string]map[string]float64
}
tests := []struct {
name string
args args
want map[string]map[string]map[string]map[string]float64
}{
{
name: "ok",
args: args{
a: map[string]map[string]map[string]map[string]float64{"aaaa": {"bbb": {"aa": {"a": 1000, "h": 544}, "bb": {"a": 1345}}}},
b: map[string]map[string]map[string]map[string]float64{"aaaa": {"bbb": {"aa": {"h": 21}}}, "cccc": {"ddd": {"ee": {"z:": 456}}}},
},
want: map[string]map[string]map[string]map[string]float64{"aaaa": {"bbb": {"aa": {"a": 1000, "h": 565}, "bb": {"a": 1345}}}, "cccc": {"ddd": {"ee": {"z:": 456}}}},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mergeQuadrupleMap(tt.args.a, tt.args.b)
if !cmp.Equal(tt.args.a, tt.want) {
t.Errorf("mergeTripleMap() does not match. got = %v, want = %v", tt.args.a, tt.want)
}
})
}
}
<file_sep># litespeed_exporter
Prometheus exporter for LiteSpeed server metrics.
## Building and running
### Compatibility
- Go 1.14+
### Build
```bash
make build
or
make cross_build
```
## usage
```bash
usage: litespeed_exporter [<flags>]
Flags:
-h, --help Show context-sensitive help (also try --help-long and --help-man).
--web.listen-address=":9104"
Listen address for web interface and telemetry.
--web.telemetry-path="/metrics"
URL path under which to expose metrics.
--lsws.report-path="/tmp/lshttpd"
Filesystem path under which exist lsws real-time statistics reports.
--log.level="info" Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal]
--log.format="logger:stderr"
Set the log target and format. Example: "logger:syslog?appname=bob&local=7" or "logger:stdout?json=true"
--version Show application version.
```
## author
@myokoo
<file_sep>package rtreport
import (
"reflect"
"testing"
"github.com/google/go-cmp/cmp"
)
func Test_sum(t *testing.T) {
type args struct {
a *LiteSpeedReport
b *LiteSpeedReport
}
tests := []struct {
name string
args args
want *LiteSpeedReport
}{
{
name: "ok",
args: args{
a: &LiteSpeedReport{
Uptime: 123,
Version: "5.4",
NetworkReport: map[string]float64{"BPS_IN": 123, "BPS_OUT": 713819, "SSL_BPS_IN": 136, "SSL_BPS_OUT": 891290},
ConnectionReport: map[string]float64{"MAXCONN": 10000, "MAXSSL_CONN": 5000, "PLAINCONN": 2331, "AVAILCONN": 7669, "IDLECONN": 0, "SSLCONN": 5, "AVAILSSL": 4995},
VirtualHostReport: map[string]map[string]float64{
"Server": {"REQ_PROCESSING": 3, "REQ_PER_SEC": 3.5, "TOT_REQS": 1533, "PUB_CACHE_HITS_PER_SEC": 0.0, "TOTAL_PUB_CACHE_HITS": 0, "PRIVATE_CACHE_HITS_PER_SEC": 1.1,
"TOTAL_PRIVATE_CACHE_HITS": 123, "STATIC_HITS_PER_SEC": 4.4, "TOTAL_STATIC_HITS": 49},
"hoge.com": {"REQ_PROCESSING": 1, "REQ_PER_SEC": 1.5, "TOT_REQS": 133, "PUB_CACHE_HITS_PER_SEC": 2.1,
"TOTAL_PUB_CACHE_HITS": 345, "PRIVATE_CACHE_HITS_PER_SEC": 4.3, "TOTAL_PRIVATE_CACHE_HITS": 345, "STATIC_HITS_PER_SEC": 5.5, "TOTAL_STATIC_HITS": 813},
},
// EXTAPP [xxxx] [xxxx] [xxxx]: CMAXCONN: 1000, EMAXCONN: 1000, POOL_SIZE: 1, INUSE_CONN: 1, IDLE_CONN: 0, WAITQUE_DEPTH: 0, REQ_PER_SEC: 0.0, TOT_REQS: 0
ExtAppReports: map[string]map[string]map[string]map[string]float64{
"LSAPI": {"hoge.com": {"hoge.com_php7.3": {"CMAXCONN": 1000, "EMAXCONN": 1000, "POOL_SIZE": 1,
"INUSE_CONN": 1, "IDLE_CONN": 0, "WAITQUE_DEPTH": 0, "REQ_PER_SEC": 0.0, "TOT_REQS": 0}}},
},
},
b: &LiteSpeedReport{
Uptime: 123,
Version: "5.4",
NetworkReport: map[string]float64{"BPS_IN": 21213, "BPS_OUT": 343819, "SSL_BPS_IN": 123363, "SSL_BPS_OUT": 913290},
ConnectionReport: map[string]float64{"MAXCONN": 10000, "MAXSSL_CONN": 5000, "PLAINCONN": 1000, "AVAILCONN": 9000, "IDLECONN": 1, "SSLCONN": 100, "AVAILSSL": 4900},
VirtualHostReport: map[string]map[string]float64{
"Server": {"REQ_PROCESSING": 5, "REQ_PER_SEC": 6.6, "TOT_REQS": 903, "PUB_CACHE_HITS_PER_SEC": 3.8, "TOTAL_PUB_CACHE_HITS": 1100, "PRIVATE_CACHE_HITS_PER_SEC": 5.3,
"TOTAL_PRIVATE_CACHE_HITS": 9393, "STATIC_HITS_PER_SEC": 7.9, "TOTAL_STATIC_HITS": 3939},
},
ExtAppReports: map[string]map[string]map[string]map[string]float64{
"LSAPI": {"hoge.com": {"hoge.com_php7.3": {"CMAXCONN": 1000, "EMAXCONN": 1000, "POOL_SIZE": 2,
"INUSE_CONN": 4, "IDLE_CONN": 3, "WAITQUE_DEPTH": 2, "REQ_PER_SEC": 1.2, "TOT_REQS": 3}}},
"CGI": {"Server": {"lscgid": {"CMAXCONN": 1000, "EMAXCONN": 1000, "POOL_SIZE": 1,
"INUSE_CONN": 1, "IDLE_CONN": 0, "WAITQUE_DEPTH": 0, "REQ_PER_SEC": 0.0, "TOT_REQS": 0}}},
},
},
},
want: &LiteSpeedReport{
Uptime: 123,
Version: "5.4",
NetworkReport: map[string]float64{"BPS_IN": 21336, "BPS_OUT": 1057638, "SSL_BPS_IN": 123499, "SSL_BPS_OUT": 1804580},
ConnectionReport: map[string]float64{"MAXCONN": 20000, "MAXSSL_CONN": 10000, "PLAINCONN": 3331, "AVAILCONN": 16669, "IDLECONN": 1, "SSLCONN": 105, "AVAILSSL": 9895},
VirtualHostReport: map[string]map[string]float64{
"Server": {"REQ_PROCESSING": 8, "REQ_PER_SEC": 10.1, "TOT_REQS": 2436, "PUB_CACHE_HITS_PER_SEC": 3.8, "TOTAL_PUB_CACHE_HITS": 1100, "PRIVATE_CACHE_HITS_PER_SEC": 6.4,
"TOTAL_PRIVATE_CACHE_HITS": 9516, "STATIC_HITS_PER_SEC": 12.3, "TOTAL_STATIC_HITS": 3988},
"hoge.com": {"REQ_PROCESSING": 1, "REQ_PER_SEC": 1.5, "TOT_REQS": 133, "PUB_CACHE_HITS_PER_SEC": 2.1,
"TOTAL_PUB_CACHE_HITS": 345, "PRIVATE_CACHE_HITS_PER_SEC": 4.3, "TOTAL_PRIVATE_CACHE_HITS": 345, "STATIC_HITS_PER_SEC": 5.5, "TOTAL_STATIC_HITS": 813},
},
ExtAppReports: map[string]map[string]map[string]map[string]float64{
"LSAPI": {"hoge.com": {"hoge.com_php7.3": {"CMAXCONN": 2000, "EMAXCONN": 2000, "POOL_SIZE": 3,
"INUSE_CONN": 5, "IDLE_CONN": 3, "WAITQUE_DEPTH": 2, "REQ_PER_SEC": 1.2, "TOT_REQS": 3}}},
"CGI": {"Server": {"lscgid": {"CMAXCONN": 1000, "EMAXCONN": 1000, "POOL_SIZE": 1,
"INUSE_CONN": 1, "IDLE_CONN": 0, "WAITQUE_DEPTH": 0, "REQ_PER_SEC": 0.0, "TOT_REQS": 0}}},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := sum(tt.args.a, tt.args.b)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("sum() = %v, want %v", got, tt.want)
}
})
}
}
func Test_load(t *testing.T) {
tests := []struct {
name string
args string
want *LiteSpeedReport
}{
{
name: "ok",
args: "../test/data/load/.rtreport",
want: &LiteSpeedReport{
Version: "5.4",
Uptime: 56070,
NetworkReport: map[string]float64{"BPS_IN": 1, "BPS_OUT": 2, "SSL_BPS_IN": 3, "SSL_BPS_OUT": 4},
ConnectionReport: map[string]float64{"MAXCONN": 10000, "MAXSSL_CONN": 5000, "PLAINCONN": 0, "AVAILCONN": 10000, "IDLECONN": 0, "SSLCONN": 0, "AVAILSSL": 5000},
VirtualHostReport: map[string]map[string]float64{
"Server": {"REQ_PROCESSING": 0, "REQ_PER_SEC": 0.1, "TOT_REQS": 448, "PUB_CACHE_HITS_PER_SEC": 0.0, "TOTAL_PUB_CACHE_HITS": 0, "PRIVATE_CACHE_HITS_PER_SEC": 0.0,
"TOTAL_PRIVATE_CACHE_HITS": 0, "STATIC_HITS_PER_SEC": 0.1, "TOTAL_STATIC_HITS": 133},
"hoge.jp": {"REQ_PROCESSING": 3, "REQ_PER_SEC": 2.1, "TOT_REQS": 121, "PUB_CACHE_HITS_PER_SEC": 4.0,
"TOTAL_PUB_CACHE_HITS": 345, "PRIVATE_CACHE_HITS_PER_SEC": 4.3, "TOTAL_PRIVATE_CACHE_HITS": 345, "STATIC_HITS_PER_SEC": 5.5, "TOTAL_STATIC_HITS": 813},
},
ExtAppReports: make(map[string]map[string]map[string]map[string]float64),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := load(tt.args)
if !cmp.Equal(got, tt.want, cmp.AllowUnexported(LiteSpeedReport{})) {
t.Errorf("load() = %v, want %v", *got, *tt.want)
}
})
}
}
func TestNew(t *testing.T) {
tests := []struct {
name string
args string
want *LiteSpeedReport
wantErr bool
}{
{
name: "ok",
args: "../test/data/new",
want: &LiteSpeedReport{
Version: "5.4",
Uptime: 56070,
NetworkReport: map[string]float64{"BPS_IN": 2, "BPS_OUT": 4, "SSL_BPS_IN": 6, "SSL_BPS_OUT": 8},
ConnectionReport: map[string]float64{"MAXCONN": 20000, "MAXSSL_CONN": 10000, "PLAINCONN": 0, "AVAILCONN": 20000, "IDLECONN": 0, "SSLCONN": 0, "AVAILSSL": 10000},
VirtualHostReport: map[string]map[string]float64{
"Server": {"REQ_PROCESSING": 0, "REQ_PER_SEC": 0.2, "TOT_REQS": 896, "PUB_CACHE_HITS_PER_SEC": 0.0, "TOTAL_PUB_CACHE_HITS": 0, "PRIVATE_CACHE_HITS_PER_SEC": 0.0,
"TOTAL_PRIVATE_CACHE_HITS": 0, "STATIC_HITS_PER_SEC": 0.2, "TOTAL_STATIC_HITS": 266},
"hoge.jp": {"REQ_PROCESSING": 6, "REQ_PER_SEC": 4.2, "TOT_REQS": 242, "PUB_CACHE_HITS_PER_SEC": 8.0,
"TOTAL_PUB_CACHE_HITS": 690, "PRIVATE_CACHE_HITS_PER_SEC": 8.6, "TOTAL_PRIVATE_CACHE_HITS": 690, "STATIC_HITS_PER_SEC": 11.0, "TOTAL_STATIC_HITS": 1626},
},
ExtAppReports: map[string]map[string]map[string]map[string]float64{
"LSAPI": {"hoge.jp": {"hoge.jp_php73": {"CMAXCONN": 1000, "EMAXCONN": 1000, "POOL_SIZE": 1,
"INUSE_CONN": 1, "IDLE_CONN": 0, "WAITQUE_DEPTH": 0, "REQ_PER_SEC": 0.0, "TOT_REQS": 0}}},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := New(tt.args)
if (err != nil) != tt.wantErr {
t.Errorf("New() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !cmp.Equal(got, tt.want, cmp.AllowUnexported(LiteSpeedReport{})) {
t.Errorf("New() got = %v, want %v", got, tt.want)
}
})
}
}
<file_sep>package rtreport
import (
"bufio"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
// Constants
const (
DefaultReportPath = "/tmp/lshttpd"
reportFileNamePrefix = ".rtreport"
)
// MapKey
const (
NetworkReportKeyBpsIn = "BPS_IN"
NetworkReportKeyBpsOut = "BPS_OUT"
NetworkReportKeySslBpsIn = "SSL_BPS_IN"
NetworkReportKeySslBpsOut = "SSL_BPS_OUT"
ConnectionReportKeyMaxConn = "MAXCONN"
ConnectionReportKeyMaxConnSsl = "MAXSSL_CONN"
ConnectionReportKeyUsedConn = "PLAINCONN"
ConnectionReportKeyIdleConn = "IDLECONN"
ConnectionReportKeyUsedConnSsl = "SSLCONN"
VHostReportKeyProcessing = "REQ_PROCESSING"
VhostReportKeyReqPerSec = "REQ_PER_SEC"
VHostReportKeyReqTotal = "TOT_REQS"
VHostReportKeyPubCacheHits = "TOTAL_PUB_CACHE_HITS"
VHostReportKeyPteCacheHits = "TOTAL_PRIVATE_CACHE_HITS"
VHostReportKeyStaticHits = "TOTAL_STATIC_HITS"
ExtAppKeyMaxConn = "CMAXCONN"
ExtAppKeyEffectiveMaxConn = "EMAXCONN"
ExtAppKeyPoolSize = "POOL_SIZE"
ExtAppKeyInUseConn = "INUSE_CONN"
ExtAppKeyIdleConn = "IDLE_CONN"
ExtAppKeyWaitQueue = "WAITQUE_DEPTH"
ExtAppKeyReqPerSec = "REQ_PER_SEC"
ExtAppKeyReqTotal = "TOT_REQS"
)
// LiteSpeedReport
type LiteSpeedReport struct {
error error
Version string
Uptime float64
NetworkReport map[string]float64
ConnectionReport map[string]float64
VirtualHostReport map[string]map[string]float64
ExtAppReports map[string]map[string]map[string]map[string]float64
}
// New return a new instance of real time report and error.
func New(path string) (*LiteSpeedReport, error) {
reportFiles, err := searchReportFiles(path)
if err != nil {
return nil, err
}
counter := len(reportFiles)
ch := make(chan *LiteSpeedReport, counter)
defer close(ch)
done := make(chan interface{})
defer close(done)
loadReportFiles(done, ch, reportFiles)
sumReportData(done, ch, counter)
r := <-ch
return r, r.error
}
// Search Real TIme Report Files.
func searchReportFiles(path string) ([]string, error) {
files, err := ioutil.ReadDir(path)
if err != nil {
return nil, err
}
var reportFiles []string
for _, file := range files {
if file.IsDir() || !strings.HasPrefix(file.Name(), reportFileNamePrefix) {
continue
}
reportFiles = append(reportFiles, filepath.Join(path, file.Name()))
}
return reportFiles, nil
}
func loadReportFiles(done <-chan interface{}, ch chan<- *LiteSpeedReport, reportFiles []string) {
for _, reportFile := range reportFiles {
go func(filePath string) {
select {
case <-done:
return
case ch <- load(filePath):
}
}(reportFile)
}
}
func load(filePath string) *LiteSpeedReport {
fp, err := os.Open(filePath)
if err != nil {
return &LiteSpeedReport{error: err}
}
defer fp.Close()
v := &LiteSpeedReport{
NetworkReport: make(map[string]float64),
ConnectionReport: make(map[string]float64),
VirtualHostReport: make(map[string]map[string]float64),
ExtAppReports: make(map[string]map[string]map[string]map[string]float64),
}
scanner := bufio.NewScanner(fp)
for scanner.Scan() {
NewLineParser(scanner.Text()).parse(v)
if v.error != nil {
break
}
}
return v
}
func sumReportData(done <-chan interface{}, ch chan *LiteSpeedReport, counter int) {
for counter > 1 {
report1 := <-ch
counter--
report2 := <-ch
// be offset. counter-- ; counter++;
go func(a, b *LiteSpeedReport) {
select {
case <-done:
return
case ch <- sum(a, b):
}
}(report1, report2)
}
}
func sum(a, b *LiteSpeedReport) *LiteSpeedReport {
// if error exist. return only error.
if a.error != nil || b.error != nil {
v := &LiteSpeedReport{}
if a.error == nil {
v.error = b.error
} else if b.error == nil {
v.error = a.error
} else {
v.error = errors.New(fmt.Sprintf("%s\n--------------%s", a.error.Error(), b.error.Error()))
}
return v
}
// merge map value.
mergeSingleMap(a.NetworkReport, b.NetworkReport)
mergeSingleMap(a.ConnectionReport, b.ConnectionReport)
mergeDoubleMap(a.VirtualHostReport, b.VirtualHostReport)
mergeQuadrupleMap(a.ExtAppReports, b.ExtAppReports)
return a
}
<file_sep>package collector
import (
"sync"
"github.com/prometheus/client_golang/prometheus"
"github.com/myokoo/litespeed_exporter/pkg/rtreport"
)
const (
namespace = "litespeed" // For Prometheus metrics.
)
var (
errorDesc = prometheus.NewDesc(
prometheus.BuildFQName(namespace, "", "up"),
"Whether the realtime report could be read", nil, nil,
)
upteimeDesc = prometheus.NewDesc(
prometheus.BuildFQName(namespace, "", "uptime_seconds_total"),
"Current uptime in seconds.", nil, nil,
)
)
type Exporter struct {
mutex sync.Mutex
reportPath *string
scrapers []Scraper
}
func New(path *string) *Exporter {
return &Exporter{
reportPath: path,
scrapers: []Scraper{
connection{},
network{},
virtualHost{},
extApp{},
},
}
}
// Describe implements prometheus.Collector.
func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
ch <- upteimeDesc
}
// Collect implements prometheus.Collector.
func (e *Exporter) Collect(ch chan<- prometheus.Metric) {
e.mutex.Lock()
defer e.mutex.Unlock()
report, err := rtreport.New(*e.reportPath)
if err != nil {
ch <- metricsIsLitespeedUp(float64(0))
return
}
ch <- metricsIsLitespeedUp(float64(1))
ch <- prometheus.MustNewConstMetric(upteimeDesc, prometheus.CounterValue, report.Uptime)
for _, scraper := range e.scrapers {
scraper.scrape(ch, report)
}
}
func metricsIsLitespeedUp(i float64) prometheus.Metric {
return prometheus.MustNewConstMetric(errorDesc, prometheus.GaugeValue, i)
}
func newMetric(namespace, subsystem, name, help string, label []string, metricType prometheus.ValueType, value float64, labelValues ...string) prometheus.Metric {
return prometheus.MustNewConstMetric(prometheus.NewDesc(prometheus.BuildFQName(namespace, subsystem, name), help, label, nil), metricType, value, labelValues...)
}
<file_sep>## 0.1.6 / 2021-10-05
### Change
- Corrected some spelling/grammar
## 0.1.5 / 2021-05-04
## Add
- Added support for port number in virtual host name
### Change
- Corrected spelling
- Use idomatic naming
- made error messages more clear
## 0.1.4 / 2019-11-05
### Add
- added 'litespeed_virtual_host_requests_per_sec' metrics
- added 'litespeed_external_applicationt_requests_per_sec' metrics
## 0.1.3 / 2019-11-05
### Change
- refactored code
## 0.1.2 / 2019-08-29
### Fixed
- fixed a problem where old metrics data remained
## 0.1.1 / 2019-08-27
### Change
- LiteSpeedReport.Version to string from float64
## 0.1.0 / 2019-08-27
Initial release
<file_sep>package collector
import (
"github.com/prometheus/client_golang/prometheus"
"github.com/myokoo/litespeed_exporter/pkg/rtreport"
)
var (
vhostLabels = []string{"vhost"}
vName = "virtual_host"
)
type virtualHost struct{}
func (v virtualHost) scrape(ch chan<- prometheus.Metric, report *rtreport.LiteSpeedReport) {
for vhost, valueMap := range report.VirtualHostReport {
for key, value := range valueMap {
switch key {
case rtreport.VHostReportKeyProcessing:
ch <- newMetric(
namespace, vName, "running_processe",
"The number of running processes by vhost.",
vhostLabels, prometheus.GaugeValue, value, vhost,
)
case rtreport.VhostReportKeyReqPerSec:
ch <- newMetric(
namespace, vName, "requests_per_sec",
"The total requests per second by vhost.",
vhostLabels, prometheus.GaugeValue, value, vhost,
)
case rtreport.VHostReportKeyReqTotal:
ch <- newMetric(
namespace, vName, "requests_total",
"The total requests by vhost.",
vhostLabels, prometheus.GaugeValue, value, vhost,
)
case rtreport.VHostReportKeyStaticHits:
ch <- newMetric(
namespace, vName, "hists_total",
"The number of static requests by vhost.",
vhostLabels, prometheus.GaugeValue, value, vhost,
)
case rtreport.VHostReportKeyPubCacheHits:
ch <- newMetric(
namespace, vName, "public_cache_hists_total",
"The number of public cache hits by vhost.",
vhostLabels, prometheus.GaugeValue, value, vhost,
)
case rtreport.VHostReportKeyPteCacheHits:
ch <- newMetric(
namespace, vName, "private_cache_hists_total",
"The number of private cache hits by vhost.",
vhostLabels, prometheus.GaugeValue, value, vhost,
)
}
}
}
}
| 25cf77a752ef4bb5ab9bdb4d1f299346ae623f61 | [
"Markdown",
"Go Module",
"Go"
] | 17 | Markdown | myokoo/litespeed_exporter | 7a9b3779bda455b6ff99b942c3cf2afb92d23006 | ea891f15d1c9908de3291256bd3bafe4feb579ac | |
refs/heads/master | <repo_name>oliverhager29/readable<file_sep>/src/utils/ReadableAPI.js
const api = "http://localhost:3001"
const uuidv4 = require('uuid/v4');
// Generate a unique token for storing your forum data on the backend server.
let token = localStorage.token
if (!token)
token = localStorage.token = Math.random().toString(36).substr(-8)
const headers = {
'Accept': 'application/json',
'Authorization': 'Basic bmFtZTpwYXNzd29yZA=='
}
export const getAllPosts = () =>
fetch(`${api}/posts`, { headers })
.then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const getAllCategories = () =>
fetch(`${api}/categories`, { headers })
.then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data.categories)
export const getPostById = (postId) =>
fetch(`${api}/posts/${postId}`, { headers })
.then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const getPostsForCategory = (category) =>
fetch(`${api}/${category}/posts`, { headers })
.then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const getCommentById = (commentId) =>
fetch(`${api}/comments/${commentId}`, { headers })
.then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const getCommentsForPost = (postId) =>
fetch(`${api}/posts/${postId}/comments`, { headers })
.then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const updatePost = (post) =>
fetch(`${api}/posts/${post.id}`, {
method: 'PUT',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify(post)
}).then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const updateComment = (comment) =>
fetch(`${api}/comments/${comment.id}`, {
method: 'PUT',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify(comment)
}).then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const upVoteComment = (commentId) =>
fetch(`${api}/comments/${commentId}`, {
method: 'POST',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify({'option': 'upVote'})
}).then(res => res.json())
.then(data => data)
.catch(error => console.log(error))
export const downVoteComment = (commentId) =>
fetch(`${api}/comments/${commentId}`, {
method: 'POST',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify({'option': 'downVote'})
}).then(res => res.json())
.then(data => data)
.catch(error => console.log(error))
export const upVotePost = (postId) =>
fetch(`${api}/posts/${postId}`, {
method: 'POST',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify({'option': 'upVote'})
}).then(res => res.json())
.then(data => data)
.catch(error => console.log(error))
export const downVotePost = (postId) =>
fetch(`${api}/posts/${postId}`, {
method: 'POST',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify({'option': 'downVote'})
}).then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const createPost = (post) =>
fetch(`${api}/posts`, {
method: 'POST',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify(post)
}).then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data).catch()
export const createComment = (comment) =>
fetch(`${api}/comments`, {
method: 'POST',
headers: {
...headers,
'Content-Type': 'application/json'
},
body: JSON.stringify(comment)
}).then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const deletePost = (postId) =>
fetch(`${api}/posts/${postId}`, {
method: 'DELETE',
headers: {
...headers,
'Content-Type': 'application/json'
}
}).then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export const deleteComment = (commentId) =>
fetch(`${api}/comments/${commentId}`, {
method: 'DELETE',
headers: {
...headers,
'Content-Type': 'application/json'
}
}).then(res => res.json())
.catch(error => console.error('Error:', error))
.then(data => data)
export function getId() {
let newId=uuidv4();
newId=newId.replace(/-/g, "")
return newId;
}<file_sep>/src/components/PostListByCategory.js
import React, { Component } from 'react';
import {Link} from 'react-router-dom'
import Modal from 'react-modal'
import PostCreateEdit from "./PostCreateEdit";
import * as ReadableAPI from "../utils/ReadableAPI";
import {
addPostAction, removePostAction, updatePostAction, upVotePostAction, downVotePostAction,
addCommentAction, updateCommentAction, removeCommentAction, upVoteCommentAction, downVoteCommentAction,
addCategoriesAction
} from '../actions'
import {connect} from "react-redux";
Modal.setAppElement('#root');
class PostListByCategory extends Component {
static contextTypes = {
router: () => true, // replace with PropTypes.object if you use them
}
state = {
isCreatePostModalOpen: false,
isEditPostModalOpen: [],
isSortedByTimestamp: true,
isSortedByVoteScore: false,
isAscending: true
}
openCreatePostModal() {
this.setState(() => ({isCreatePostModalOpen: true}))
}
closeCreatePostModal(){
this.setState(() => ({isCreatePostModalOpen: false}))
}
openEditPostModal(index) {
const isEditPostModalOpen = this.state.isEditPostModalOpen.map(f => false)
isEditPostModalOpen[index] = true
this.setState(() => ({isEditPostModalOpen: isEditPostModalOpen}))
}
closeEditPostModal(){
const isEditPostModalOpen = this.state.isEditPostModalOpen.map(f => false)
this.setState(() => ({isEditPostModalOpen: isEditPostModalOpen}))
}
handleDownVote = (postId, index) => {
const {actions} = this.props
ReadableAPI.downVotePost(postId).then(
(post) => {
actions.downVotePostProp({id: post.id, voteScore: post.voteScore})
}
)
}
handleUpVote = (postId, index) => {
const {actions} = this.props
ReadableAPI.upVotePost(postId).then(
(post) => {
actions.upVotePostProp({id: post.id, voteScore: post.voteScore})
}
)
}
handleDelete = (postId, index) => {
const {actions} = this.props
ReadableAPI.deletePost(postId).then(
(post) => {
actions.removePostProp(post.id)
}
)
}
handleSortByTimestamp = () => {
let isAscending = true
if(this.state.isSortedByTimestamp) {
isAscending=!this.state.isAscending
}
this.setState({isSortedByTimestamp: true, isSortedByVoteScore: false, isAscending: isAscending})
}
handleSortByVoteScore = () => {
let isAscending = true
if(this.state.isSortedByVoteScore) {
isAscending=!this.state.isAscending
}
this.setState({isSortedByTimestamp: false, isSortedByVoteScore: true, isAscending: isAscending})
}
constructor(props) {
super(props)
this.openCreatePostModal = this.openCreatePostModal.bind(this);
this.closeCreatePostModal = this.closeCreatePostModal.bind(this);
this.openEditPostModal = this.openEditPostModal.bind(this);
this.closeEditPostModal = this.closeEditPostModal.bind(this);
this.handleSortByTimestamp = this.handleSortByTimestamp.bind(this);
this.handleSortByVoteScore = this.handleSortByVoteScore.bind(this);
}
render() {
let {actions, categories, posts, comments} = this.props
const path = this.props.location.pathname
const category = path.substring(path.lastIndexOf('/')+1, path.length)
if(actions==null) {
actions = {}
}
if(categories==null) {
categories = []
}
if(posts==null) {
posts = {allIds: [], byId: {}, byCategory: {}}
}
if(comments==null) {
comments = {allIds: [], byId: {}}
}
const {isSortedByTimestamp, isSortedByVoteScore, isAscending} = this.state
const handleDownVote = this.handleDownVote
const handleUpVote = this.handleUpVote
const handleDelete = this.handleDelete
const isEditPostModalOpen = this.state.isEditPostModalOpen
const closeEditPostModal = this.closeEditPostModal
const openEditPostModal = this.openEditPostModal
return (
<div>
<div id="categories">
<h1>Posts by Categories:</h1>
<table className="form">
<tbody>
{categories.map(function (category, index) {
return (
<tr key={category.name} >
<td>
<Link to={{
pathname: '/'+category.path
}}>
View Posts in Category {category.name}
</Link>
</td>
</tr>
)})}
<tr key="all">
<td>
<Link to={{
pathname: '/'
}}>
View Posts in all Categories
</Link>
</td>
</tr>
</tbody>
</table>
</div>
<div id="postsByCategory">
<h1>Posts in Category {category}:</h1>
<table>
<thead className={"table-header"}>
<tr>
<th>Id</th>
<th>Timestamp<button><img height='20' width='20' src={(isSortedByTimestamp?(isAscending?'./sort-up.svg':'./sort-down.svg'):'./sort-arrows-couple-pointing-up-and-down.svg')} alt="sort by timestamp" onClick={this.handleSortByTimestamp} /></button></th>
<th>Title</th>
<th>Body</th>
<th>Author</th>
<th>Category</th>
<th>Vote Score<button><img height='20' width='20' src={(isSortedByVoteScore?(isAscending?'./sort-up.svg':'./sort-down.svg'):'./sort-arrows-couple-pointing-up-and-down.svg')} alt="sort by vote score" onClick={this.handleSortByVoteScore} /></button></th>
<th>Comment Count</th>
<th>Delete</th>
<th>Down Vote</th>
<th>Up Vote</th>
<th>Edit Post</th>
</tr>
</thead>
<tbody>
{(posts===null || posts.allIds==null || posts.allIds.length===0 || posts.byCategory==null || posts.byCategory[category]==null || posts.byCategory[category].length===0) ?
<tr><td colSpan={12}>no posts available</td></tr>
:posts.byCategory[category].map(postId => posts.byId[postId] )
.filter(post => post.deleted===false)
.sort(function(a,b) {
if(isSortedByTimestamp) {
if(isAscending) {
return a.timestamp - b.timestamp
}
else {
return b.timestamp - a.timestamp
}
}
else if(isSortedByVoteScore) {
if(isAscending) {
return a.voteScore - b.voteScore
}
else {
return b.voteScore - a.voteScore
}
}
return 0
})
.map(function (post, index) {
const postId = post.id
const dateTime = new Date(post.timestamp)
const dateTimeStr=dateTime.toLocaleString()
return (
<tr key={postId} className={index%2===0?"table-row-even":"table-row-odd"}>
<td>
<Link to={{
pathname: '/'+post.category+'/'+post.id
}}>
{post.id}
</Link>
</td>
<td>{dateTimeStr}</td>
<td>{post.title}</td>
<td>{post.body}</td>
<td>{post.author}</td>
<td>{post.category}</td>
<td>{post.voteScore}</td>
<td>{post.commentCount}</td>
<td><button onClick={() => handleDelete(post.id, index)} name="Delete">Delete</button></td>
<td><button onClick={() => handleDownVote(post.id, index)} name="Vote Down">Vote Down</button></td>
<td><button onClick={() => handleUpVote(post.id, index)} name="Vote Up">Vote Up</button></td>
<td>
<button
className='button'
onClick={() => openEditPostModal(index)}>
Edit
</button>
<div>
<Modal
className='modal'
overlayClassName='overlay'
isOpen={isEditPostModalOpen[index]}
onRequestClose={closeEditPostModal}
contentLabel='Edit Post'
backdropColor = {'white'}
backdropOpacity = {1}
animationIn={'slideInLeft'}
animationOut={'slideOutRight'}
>
{isEditPostModalOpen[index] && <PostCreateEdit closeModal={closeEditPostModal} postId={post.id}/>}
</Modal>
</div>
</td>
</tr>
)
}
)
}
</tbody>
</table>
</div>
<button
className="button"
onClick={this.context.router.history.goBack}>
Back
</button>
<button
className='button'
onClick={() => this.openCreatePostModal()}>
Create
</button>
<div>
<Modal
className='modal'
overlayClassName='overlay'
isOpen={this.state.isCreatePostModalOpen}
onRequestClose={this.closeCreatePostModal}
contentLabel='Create Post'
backdropColor = {'white'}
backdropOpacity = {1}
animationIn={'slideInLeft'}
animationOut={'slideOutRight'}
>
{this.state.isCreatePostModalOpen && <PostCreateEdit closeModal={this.closeCreatePostModal}/>}
</Modal>
</div>
</div>
);
}
}
function mapStateToProps ({ forum }) {
return {
categories: forum.categories,
posts: forum.posts,
comments: forum.comments
}
}
function mapDispatchToProps (dispatch) {
return {
actions: {
addPostProp: (data) => dispatch(addPostAction(data)),
removePostProp: (data) => dispatch(removePostAction(data)),
updatePostProp: (data) => dispatch(updatePostAction(data)),
upVotePostProp: (data) => dispatch(upVotePostAction(data)),
downVotePostProp: (data) => dispatch(downVotePostAction(data)),
addCommentProp: (data) => dispatch(addCommentAction(data)),
removeCommentProp: (data) => dispatch(removeCommentAction(data)),
updateCommentProp: (data) => dispatch(updateCommentAction(data)),
upVoteCommentProp: (data) => dispatch(upVoteCommentAction(data)),
downVoteCommentProp: (data) => dispatch(downVoteCommentAction(data)),
addCategoriesProp: (data) => dispatch(addCategoriesAction(data))
}
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(PostListByCategory)
<file_sep>/README.md
Readable is a React application that implements a forum with posts and comments. Both can be created, deleted, modified and up/down voted. Each posts is assigned to one category.
The main page consists of links to posts by category and all posts. Each posts entry has a link to post details with their associated comments, an up vote, down vote, edit and delete button. Further there is a create button to create a new post. In the post by catageory posts are displayed in a similar way. The post details page shows details of a post and its comments. It has edit, delete, up/down vote buttons for the post. It shows the lists of associated comments with up vote, down vote, delete and edit button. There is a also a create button in order to create a new comment. The create/edit posts/comment are implemented as modal dialogs.
In the root component (App) all valid routes are defined. Posts by category have a URL /{category} and post details have a URL /{category}/{post.id}. If post.id is the id of a deleted or non-existant post or category a non-existant category then a 404 NotFound page is displayed (including a link to the home i.e. /). Same for any other invalid URL.
Further the root component has a Redux store that is propagted into the child components with the action functions.
Posts and comments can be sorted either by timestamp or vote score. The sorting can ascending or descending.
Redux store model (with sample data):
```i
categories: [
{
"name": "react",
"path": "react"
},
{
"name": "redux",
"path": "redux"
},
{
"name": "udacity",
"path": "udacity"
}
],
posts: {
byId: {
"8xf0y6ziyjabvozdd253nd": {
"id": "8xf0y6ziyjabvozdd253nd",
"timestamp": 1467166872634,
"title": "Udacity is the best place to learn React",
"body": "Everyone says so after all.",
"author": "thingtwo",
"category": "react",
"voteScore": 6,
"deleted": false,
"commentCount": 2,
"commentIds": ["894tuq4ut84ut8v4t8wun89g", "8tu4bsun805n8un48ve89"]
},
"6ni6ok3ym7mf1p33lnez": {
"id": "6ni6ok3ym7mf1p33lnez",
"timestamp": 1468479767190,
"title": "Learn Redux in 10 minutes!",
"body": "Just kidding. It takes more than 10 minutes to learn technology.",
"author": "thingone",
"category": "redux",
"voteScore": -5,
"deleted": false,
"commentCount": 0,
"commentIds": []
}
},
byCategory: {
"react" : ["8xf0y6ziyjabvozdd253nd"],
"redux" : ["6ni6ok3ym7mf1p33lnez"],
"udacity" : []
},
allIds: ["8xf0y6ziyjabvozdd253nd", "6ni6ok3ym7mf1p33lnez"]
},
comments: {
byId: {
"894tuq4ut84ut8v4t8wun89g" : {
"id": "894tuq4ut84ut8v4t8wun89g",
"parentId": "8xf0y6ziyjabvozdd253nd",
"timestamp": 1468166872634,
"body": "Hi there! I am a COMMENT.",
"author": "thingtwo",
"voteScore": 6,
"deleted": false,
"parentDeleted": false
},
"8tu4bsun805n8un48ve89" : {
"id": "8tu4bsun805n8un48ve89",
"parentId": "8xf0y6ziyjabvozdd253nd",
"timestamp": 1469479767190,
"body": "Comments. Are. Cool.",
"author": "thingone",
"voteScore": -5,
"deleted": false,
"parentDeleted": false
}
},
allIds: ["894tuq4ut84ut8v4t8wun89g", "8tu4bsun805n8un48ve89"]
}
```
Instructions to run:
```
#install latest node js and npm
unzip projetc2_oliver_hager.zip
cd readable
# installs all necessary node js packages
npm install
cd src
# starts web container with React app
npm start
```
<file_sep>/src/reducers/index.js
import {
ADD_POST,
UPDATE_POST,
REMOVE_POST,
UP_VOTE_POST,
DOWN_VOTE_POST,
ADD_COMMENT,
UPDATE_COMMENT,
REMOVE_COMMENT,
UP_VOTE_COMMENT,
DOWN_VOTE_COMMENT,
ADD_CATEGORIES
} from '../actions'
import { combineReducers } from 'redux';
const initialState = {
categories: [],
posts: {
byId: {
},
byCategory: {
},
allIds: []
},
comments: {
byId: {
},
allIds: []
}
}
export function addPost(state=initialState, action) {
if(action.type!==ADD_POST) {
return state
}
let {id, title, body, author, category, timestamp, voteScore, deleted} = action
let postIdsByCategory = state.posts.byCategory[category]
if(postIdsByCategory==null) {
postIdsByCategory = []
}
if(voteScore===null) {
voteScore = 0
}
if(deleted===null) {
deleted = false
}
const newPostIdsByCategory = postIdsByCategory.concat([id])
const allPostIds = state.posts.allIds
const newAllPostIds = allPostIds.concat([id])
return {
...state,
posts: {
byId: {
[id]: {
id,
title,
body,
author,
category,
timestamp,
voteScore,
deleted,
commentCount: 0,
commentIds: []
},
...state.posts.byId
},
byCategory: {
...state.posts.byCategory,
[category]: newPostIdsByCategory
},
allIds: newAllPostIds
}
}
}
export function updatePost(state=initialState, action) {
if(action.type!==UPDATE_POST) {
return state
}
const {id, title, body, author, category, timestamp} = action
let postIdsByCategory = state.posts.byCategory[category]
if(postIdsByCategory==null) {
postIdsByCategory = []
}
const post = state.posts.byId[id]
const allPostIds = state.posts.allIds
let postCommentIds = post.commentIds
if(postCommentIds==null) {
postCommentIds = []
}
return {
...state,
posts: {
byId: {
...state.posts.byId,
[id]: {
id,
title,
body,
author,
category,
timestamp,
commentIds: postCommentIds,
voteScore: post.voteScore,
deleted: post.deleted,
commentCount: post.commentCount
}
},
byCategory: {
...state.posts.byCategory,
[category]: postIdsByCategory
},
allIds: allPostIds
}
}
}
export function removePost(state=initialState, action) {
if(action.type!==REMOVE_POST) {
return state
}
const {id, category} = state.posts.byId[action.id]
const postIdsByCategory = state.posts.byCategory[category]
const newPostIdsByCategory = postIdsByCategory.filter(e => e!==id)
const allPostIds = state.posts.allIds
const newAllPostIds = allPostIds.filter(e => e!==id)
const newCommentsById = Object.keys(state.comments.byId).map(
k => state.comments.byId[k]
).filter(
c => c.parentId!==id
)
const newAllCommentIds = Object.keys(newCommentsById).map(c => c.id)
return {
...state,
posts: {
byId: {
...state.posts.byId,
[id]: null
},
byCategory: {
...state.posts.byCategory,
[category]: newPostIdsByCategory
},
allIds: newAllPostIds
},
comments: {
byId: newCommentsById,
allIds: newAllCommentIds
}
}
}
export function upVotePost(state=initialState, action) {
if(action.type!==UP_VOTE_POST) {
return state
}
const {id, voteScore} = action
const post = state.posts.byId[id]
const postIdsByCategory = state.posts.byCategory[post.category]
const allPostIds = state.posts.allIds
return {
...state,
posts: {
byId: {
...state.posts.byId,
[id]: {
id,
title: post.title,
body: post.body,
author: post.author,
category: post.category,
timestamp: post.timestamp,
commentIds: post.commentIds,
voteScore: voteScore,
deleted: post.deleted,
commentCount: post.commentCount
}
},
byCategory: {
...state.posts.byCategory,
[post.category]: postIdsByCategory
},
allIds: allPostIds
}
}
}
export function downVotePost(state=initialState, action) {
if(action.type!==DOWN_VOTE_POST) {
return state
}
const {id, voteScore} = action
const post = state.posts.byId[id]
const postIdsByCategory = state.posts.byCategory[post.category]
const allPostIds = state.posts.allIds
return {
...state,
posts: {
byId: {
...state.posts.byId,
[id]: {
id,
title: post.title,
body: post.body,
author: post.author,
category: post.category,
timestamp: post.timestamp,
commentIds: post.commentIds,
voteScore: voteScore,
deleted: post.deleted,
commentCount: post.commentCount
}
},
byCategory: {
...state.posts.byCategory,
[post.category]: postIdsByCategory
},
allIds: allPostIds
}
}
}
export function addComment(state=initialState, action) {
if(action.type!==ADD_COMMENT) {
return state
}
let {id, timestamp, voteScore, deleted, parentDeleted, body, author, parentId} = action
const post = state.posts.byId[parentId]
if(post==null) {
return state
}
if(voteScore===null) {
voteScore = 0
}
if(deleted===null) {
deleted = false
}
if(parentDeleted===null) {
parentDeleted = false
}
const allCommentIds = state.comments.allIds
const newAllCommandIds = allCommentIds.concat([id])
let allCommentIdsByPost = []
if(post!=null && post.commentIds!=null) {
allCommentIdsByPost = post.commentIds
}
const newAllCommentIdsByPost = allCommentIdsByPost.concat([id])
return {
...state,
posts: {
...state.posts,
byId: {
...state.posts.byId,
[post.id]: {
id: post.id,
title: post.title,
body: post.body,
author: post.author,
category: post.category,
timestamp: post.timestamp,
commentIds: newAllCommentIdsByPost,
voteScore: post.voteScore,
commentCount: newAllCommentIdsByPost.length,
deleted: post.deleted
}
}
},
comments: {
byId: {
...state.comments.byId,
[id]: {
id: id,
timestamp: timestamp,
body: body,
author: author,
parentId: parentId,
voteScore: voteScore,
deleted: deleted,
parentDeleted: parentDeleted
}
},
allIds: newAllCommandIds
}
}
}
export function updateComment(state=initialState, action) {
if(action.type!==UPDATE_COMMENT) {
return state
}
const { id, timestamp, body} = action
const comment = state.comments.byId[id]
const allCommentIds = state.comments.allIds
return {
...state,
comments: {
byId: {
...state.comments.byId,
[id]: {
id: id,
timestamp: timestamp,
body: body,
author: comment.author,
parentId: comment.parentId,
voteScore: comment.voteScore,
deleted: comment.deleted,
parentDeleted: comment.parentDeleted
}
},
allIds: allCommentIds
}
}
}
export function removeComment(state=initialState, action) {
if(action.type!==REMOVE_COMMENT) {
return state
}
const { id, parentId} = state.comments.byId[action.id]
const newCommentsByPost = state.posts.byId[parentId].commentIds.filter(c => c!==id)
const newAllCommentIds = state.comments.allIds.filter(c => c!==id)
const post = state.posts.byId[parentId]
const postIdsByCategory = state.posts.byCategory
const allPostIds = state.posts.allIds
return {
...state,
posts: {
byId: {
...state.posts.byId,
[post.id]: {
id: post.id,
title: post.title,
body: post.body,
author: post.author,
category: post.category,
timestamp: post.timestamp,
commentIds: newCommentsByPost,
voteScore: post.voteScore,
commentCount: newCommentsByPost.length,
deleted: post.deleted
}
},
byCategory: {
postIdsByCategory
},
allIds: allPostIds
},
comments: {
byId: {
...state.comments.byId,
[id]: null
},
allIds: newAllCommentIds
}
}
}
export function upVoteComment(state=initialState, action) {
if(action.type!==UP_VOTE_COMMENT) {
return state
}
const {id, voteScore} = action
const comment = state.comments.byId[id]
const allCommentIds = state.comments.allIds
return {
...state,
comments: {
byId: {
...state.comments.byId,
[id]: {
id: id,
timestamp: comment.timestamp,
body: comment.body,
author: comment.author,
parentId: comment.parentId,
voteScore: voteScore,
deleted: comment.deleted,
parentDeleted: comment.parentDeleted
}
},
allIds: allCommentIds
}
}
}
export function downVoteComment(state=initialState, action) {
if(action.type!==DOWN_VOTE_COMMENT) {
return state
}
const {id, voteScore} = action
const comment = state.comments.byId[id]
const allCommentIds = state.comments.allIds
return {
...state,
comments: {
byId: {
...state.comments.byId,
[id]: {
id: id,
timestamp: comment.timestamp,
body: comment.body,
author: comment.author,
parentId: comment.parentId,
voteScore: voteScore,
deleted: comment.deleted,
parentDeleted: comment.parentDeleted
}
},
allIds: allCommentIds
}
}
}
export function addCategories(state=initialState, action) {
if(action.type!==ADD_CATEGORIES) {
return state
}
const categories = action.categories
return {
...state,
categories: categories
}
}
// function getCommentsByPost(state=initialState, action) {
// return state.posts.byId[action.id].commentIds.map(i => state.comments.byId[i]);
// }
// function getAllPosts(state=initialState, action) {
// return state.posts.allIds.map(i => state.posts.byId[i]);
// }
// function getPostsByCategory(state=initialState, action) {
// return state.posts.byCategory[action.category].map(i => state.posts.byId[i]);
// }
//
// function getPostById(state=initialState, action) {
// return state.posts.byId[action.id];
// }
//
// function getAllCategories(state=initialState, action) {
// return state.categories.map(c => c.name)
// }
function forum(state = initialState, action) {
switch(action.type) {
case ADD_POST :
return addPost(state, action)
case UPDATE_POST :
return updatePost(state, action)
case REMOVE_POST :
return removePost(state, action)
case UP_VOTE_POST :
return upVotePost(state, action)
case DOWN_VOTE_POST :
return downVotePost(state, action)
case ADD_COMMENT :
return addComment(state, action)
case UPDATE_COMMENT :
return updateComment(state, action)
case REMOVE_COMMENT :
return removeComment(state, action)
case UP_VOTE_COMMENT :
return upVoteComment(state, action)
case DOWN_VOTE_COMMENT :
return downVoteComment(state, action)
case ADD_CATEGORIES :
return addCategories(state, action)
default :
return state
}
}
export default combineReducers({
forum
})<file_sep>/src/components/CommentCreateEdit.js
import React, { Component } from 'react';
import * as ReadableAPI from "../utils/ReadableAPI";
import {
addCommentAction, updateCommentAction
} from "../actions";
import {connect} from "react-redux";
import {getId} from "../utils/ReadableAPI";
class CommentCreateEdit extends Component {
state = {
id: "",
parentId: "",
body: "",
author: "",
}
constructor(props) {
super(props)
const {commentId, comments} = this.props
if(commentId!=null) {
this._body = comments.byId[commentId].body
this._author = comments.byId[commentId].author
}
this.handleSaveClick = this.handleSaveClick.bind(this);
this.handleCloseClick = this.handleCloseClick.bind(this);
}
handleSaveClick() {
const body = this._body.value
const author = this._author.value
const {closeModal, actions} = this.props
if(this.state.id===null) {
const newId=getId()
ReadableAPI.createComment(
{
id: newId,
parentId: this.props.parentId,
body: body,
author: author,
timestamp: Date.now(),
voteScore: 0,
deleted: false,
parentDeleted: false
}).then(
(comment) => {
this.setState({
id: newId,
parentId: this.props.parentId,
body: body,
author: author,
timestamp: comment.timestamp,
voteScore: comment.voteScore,
deleted: false,
parentDeleted: false
}
)
actions.addCommentProp({
id: newId,
parentId: this.state.parentId,
body: body,
author: author,
timestamp: comment.timestamp,
voteScore: comment.voteScore,
deleted: false,
parentDeleted: false
})
if(closeModal!=null) {
closeModal()
}
}
)
}
else {
ReadableAPI.updateComment(
{
id: this.state.id,
author: author,
body: body,
timestamp: Date.now()
}).then(
(comment) => {
this.setState({
id: this.state.id,
parentId: this.props.parentId,
body: body,
author: author,
timestamp: comment.timestamp
}
)
actions.updateCommentProp({
id: this.state.id,
parentId: this.props.parentId,
body: body,
author: author,
timestamp: comment.timestamp})
if(closeModal!=null) {
closeModal()
}
}
)
}
}
handleCloseClick() {
const {closeModal} = this.props
if (closeModal != null) {
closeModal()
}
}
componentDidMount() {
const {closeModal, commentId, comments} = this.props
this._body = this.state.body
this._author = this.state.author
this.setState({
closeModal: closeModal==null?null:closeModal,
id: commentId==null?null:commentId,
body: commentId==null?"":comments.byId[commentId].body,
author: commentId==null?"":comments.byId[commentId].author,
parentId: commentId==null?"":comments.byId[commentId].parentId,
timestamp: commentId==null?"":comments.byId[commentId].timestamp
})
}
render() {
return (
<div className="form" id="commentCreateEdit">
<fieldset title={this.state.id===null?"Create Comment":"Modify Comment"}>
<label className="field-label">Body:</label><textarea rows={4} cols={80} className="field" type="text" minLength="255" maxLength="255"
ref={input => this._body = input} defaultValue={this._body}/><br/>
{this.state.id === null ? (
<div>
<label className="field-label">Author:</label><input className="field" type="text" minLength="60" maxLength="60"
ref={input => this._author = input} defaultValue={this._author}/><br/>
</div>) : (
<div>
<label className="field-label">Author:</label><input readOnly={true} className="field" type="text" minLength="60" maxLength="60" ref={input => this._author = input} defaultValue={this._author}/>
</div>
)
}
<button className="button" onClick={this.handleCloseClick}>Close</button>
<button className="button" onClick={this.handleSaveClick}>Save</button>
</fieldset>
</div>
)
}
}
function mapStateToProps ({ forum }) {
return {
categories: forum.categories,
posts: forum.posts,
comments: forum.comments
}
}
function mapDispatchToProps (dispatch) {
return {
actions: {
addCommentProp: (data) => dispatch(addCommentAction(data)),
updateCommentProp: (data) => dispatch(updateCommentAction(data))
}
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(CommentCreateEdit)<file_sep>/src/components/PostDetails.js
import React, { Component } from 'react'
import CommentCreateEdit from "./CommentCreateEdit"
import PostCreateEdit from "./PostCreateEdit"
import {Link} from 'react-router-dom'
import * as ReadableAPI from "../utils/ReadableAPI"
import {
addCategoriesAction, addCommentAction, addPostAction, downVoteCommentAction, downVotePostAction,
removeCommentAction, removePostAction, updateCommentAction,
updatePostAction,
upVoteCommentAction,
upVotePostAction
} from "../actions";
import {connect} from "react-redux";
import Modal from "react-modal";
Modal.setAppElement('#root');
class PostDetails extends Component {
static contextTypes = {
router: () => true // replace with PropTypes.object if you use them
}
state = {
isCreateCommentModalOpen: false,
isEditCommentModalOpen: [],
isCreatePostModalOpen: false,
isEditPostModalOpen: false,
}
openCreatePostModal() {
this.setState(() => ({isCreatePostModalOpen: true}))
}
closeCreatePostModal(){
this.setState(() => ({isCreatePostModalOpen: false}))
}
openEditPostModal(index) {
this.setState(() => ({isEditPostModalOpen: true}))
}
closeEditPostModal(){
this.setState(() => ({isEditPostModalOpen: false}))
}
handleDownVotePost = (postId, index) => {
const {actions} = this.props
ReadableAPI.downVotePost(postId).then(
(post) => {
actions.downVotePostProp({id: post.id, voteScore: post.voteScore})
}
)
}
handleUpVotePost = (postId, index) => {
const {actions} = this.props
ReadableAPI.upVotePost(postId).then(
(post) => {
actions.upVotePostProp({id: post.id, voteScore: post.voteScore})
}
)
}
handleDeletePost = (postId, index) => {
const {actions} = this.props
ReadableAPI.deletePost(postId).then(
(post) => {
actions.removePostProp(post.id)
this.context.router.history.goBack()
}
)
}
componentDidMount() {
const path = this.props.location.pathname
const postId = path.substring(path.lastIndexOf('/')+1, path.length)
const post = this.props.posts.byId[postId]
const comments = this.props.posts.byId[postId].commentIds.map( i => this.props.comments.byId[i]).filter(c => (c.deleted===false && c.parentDeleted===false))
this.setState(
{
post: post,
comments: comments,
isSortedByTimestamp: true,
isSortedByVoteScore: false,
isAscending: true
})
}
openCreateCommentModal() {
this.setState(() => ({isCreateCommentModalOpen: true}))
}
closeCreateCommentModal(){
this.setState(() => ({isCreateCommentModalOpen: false}))
}
openEditCommentModal(index) {
const isEditCommentModalOpen = this.state.isEditCommentModalOpen.map(f => false)
isEditCommentModalOpen[index] = true
this.setState(() => ({isEditCommentModalOpen: isEditCommentModalOpen}))
}
closeEditCommentModal(){
const isEditCommentModalOpen = this.state.isEditCommentModalOpen.map(f => false)
this.setState(() => ({isEditCommentModalOpen: isEditCommentModalOpen}))
}
handleDownVoteComment = (commentId, index) => {
const {actions} = this.props
ReadableAPI.downVoteComment(commentId).then(
(comment) => {
actions.downVoteCommentProp({id: comment.id, voteScore: comment.voteScore})
}
)
}
handleUpVoteComment = (commentId, index) => {
const {actions} = this.props
ReadableAPI.upVoteComment(commentId).then(
(comment) => {
actions.upVoteCommentProp({id: comment.id, voteScore: comment.voteScore})
}
)
}
handleDeleteComment = (commentId, index) => {
const {actions} = this.props
ReadableAPI.deleteComment(commentId).then(
(comment) => {
let commentsCloned=this.state.comments.slice()
commentsCloned[index] = comment
this.setState(
{
comments: commentsCloned
})
actions.removeCommentProp(commentId)
}
)
}
handleSortByTimestamp = () => {
let isAscending = true
if(this.state.isSortedByTimestamp) {
isAscending=!this.state.isAscending
}
this.setState({isSortedByTimestamp: true, isSortedByVoteScore: false, isAscending: isAscending})
}
handleSortByVoteScore = () => {
let isAscending = true
if(this.state.isSortedByVoteScore) {
isAscending=!this.state.isAscending
}
this.setState({isSortedByTimestamp: false, isSortedByVoteScore: true, isAscending: isAscending})
}
constructor(props) {
super(props)
this.openCreateCommentModal = this.openCreateCommentModal.bind(this);
this.closeCreateCommentModal = this.closeCreateCommentModal.bind(this);
this.openEditCommentModal = this.openEditCommentModal.bind(this);
this.closeEditCommentModal = this.closeEditCommentModal.bind(this);
this.openCreatePostModal = this.openCreatePostModal.bind(this);
this.closeCreatePostModal = this.closeCreatePostModal.bind(this);
this.openEditPostModal = this.openEditPostModal.bind(this);
this.closeEditPostModal = this.closeEditPostModal.bind(this);
this.handleSortByTimestamp = this.handleSortByTimestamp.bind(this);
this.handleSortByVoteScore = this.handleSortByVoteScore.bind(this);
}
render() {
const path = this.props.location.pathname
const postId = path.substring(path.lastIndexOf('/')+1, path.length)
const post = this.props.posts.byId[postId]
let {actions, categories, posts, comments} = this.props
if(actions==null) {
actions = {}
}
if(categories==null) {
categories = []
}
if(posts==null) {
posts = {allIds: [], byId: {}, byCategory: {}}
}
if(comments==null) {
comments = {allIds: [], byId: {}}
}
const {isSortedByTimestamp, isSortedByVoteScore, isAscending} = this.state
const handleDeleteComment = this.handleDeleteComment
const isEditCommentModalOpen = this.state.isEditCommentModalOpen
const closeEditCommentModal = this.closeEditCommentModal
const openEditCommentModal = this.openEditCommentModal
const handleDownVoteComment = this.handleDownVoteComment
const handleUpVoteComment = this.handleUpVoteComment
const handleDeletePost = this.handleDeletePost
const isEditPostModalOpen = this.state.isEditPostModalOpen
const closeEditPostModal = this.closeEditPostModal
const openEditPostModal = this.openEditPostModal
const handleDownVotePost = this.handleDownVotePost
const handleUpVotePost = this.handleUpVotePost
const postDateTime = new Date(post.timestamp)
const postDateTimeStr=postDateTime.toLocaleString()
return (
<div>
<h1>Post Details:</h1>
<div className="form" id="postDetails">
<table>
<tbody>
<tr className="table-row-even"><td className="table-column-label"><label className="field-label">Id:</label></td><td className="table-column-value"><label className="field-value">{post.id}</label></td></tr>
<tr className="table-row-odd"><td className="table-column-label"><label className="field-label">Timestamp:</label></td><td className="table-column-value"><label className="field-value">{postDateTimeStr}</label></td></tr>
<tr className="table-row-even"><td className="table-column-label"><label className="field-label">Title:</label></td><td className="table-column-value"><label className="field-value">{post.title}</label></td></tr>
<tr className="table-row-odd"><td className="table-column-label"><label className="field-label">Body:</label></td><td className="table-column-value"><label className="field-value">{post.body}</label></td></tr>
<tr className="table-row-even"><td className="table-column-label"><label className="field-label">Author:</label></td><td className="table-column-value"><label className="field-value">{post.author}</label></td></tr>
<tr className="table-row-odd"><td className="table-column-label"><label className="field-label">Category:</label></td><td className="table-column-value"><label className="field-value">{post.author}</label></td></tr>
<tr className="table-row-even"><td className="table-column-label"><label className="field-label">Vote Score:</label></td><td className="table-column-value"><label className="field-value">{post.voteScore}</label></td></tr>
<tr className="table-row-odd"><td className="table-column-label"><label className="field-label">Comment Count:</label></td><td className="table-column-value"><label className="field-value">{post.commentCount}</label></td></tr>
</tbody>
</table>
<table>
<tr>
<td><button onClick={() => handleDeletePost(post.id)} name="Delete">Delete</button></td>
<td><button onClick={() => handleDownVotePost(post.id)} name="Vote Down">Vote Down</button></td>
<td><button onClick={() => handleUpVotePost(post.id)} name="Vote Up">Vote Up</button></td>
<td>
<button
className='button'
onClick={() => openEditPostModal()}>
Edit
</button>
<div>
<Modal
className='modal'
overlayClassName='overlay'
isOpen={isEditPostModalOpen}
onRequestClose={closeEditPostModal}
contentLabel='Edit Post'
backdropColor = {'white'}
backdropOpacity = {1}
animationIn={'slideInLeft'}
animationOut={'slideOutRight'}
>
{isEditPostModalOpen && <PostCreateEdit closeModal={closeEditPostModal} postId={post.id}/>}
</Modal>
</div>
</td>
</tr>
</table>
</div>
<div id="commentsForPost">
<h1>Comments:</h1>
<table>
<thead className="table-header">
<tr>
<th>Id</th>
<th>Timestamp<button><img height='20' width='20' src={(isSortedByTimestamp?(isAscending?'../sort-up.svg':'../sort-down.svg'):'../sort-arrows-couple-pointing-up-and-down.svg')} alt="sort by timestamp" onClick={this.handleSortByTimestamp} /></button></th>
<th>Body</th>
<th>Author</th>
<th>Vote Score<button><img height='20' width='20' src={(isSortedByVoteScore?(isAscending?'../sort-up.svg':'../sort-down.svg'):'../sort-arrows-couple-pointing-up-and-down.svg')} alt="sort by vote score" onClick={this.handleSortByVoteScore} /></button></th>
<th>Down Vote</th>
<th>Up Vote</th>
<th>Delete</th>
<th>Edit</th>
</tr>
</thead>
<tbody>
{(comments===null || comments.allIds==null || comments.allIds.length===0 || post===null || post.commentCount===0) ?
<tr><td colSpan={12}>no comments available</td></tr>
:comments.allIds.map(commentId => comments.byId[commentId] )
.filter(comment => comment.deleted===false)
.filter(comment => comment.parentId===postId)
.sort(function(a,b) {
if(isSortedByTimestamp) {
if(isAscending) {
return a.timestamp - b.timestamp
}
else {
return b.timestamp - a.timestamp
}
}
else if(isSortedByVoteScore) {
if(isAscending) {
return a.voteScore - b.voteScore
}
else {
return b.voteScore - a.voteScore
}
}
return 0
})
.map(function (comment, index) {
const commentDateTime = new Date(comment.timestamp)
const commentDateTimeStr=commentDateTime.toLocaleString()
return (
<tr className={index%2===0?"table-row-even":"table-row-odd"} key={comment.id}>
<td>{comment.id}</td>
<td>{commentDateTimeStr}</td>
<td>{comment.body}</td>
<td>{comment.author}</td>
<td>{comment.voteScore}</td>
<td><button onClick={() => handleDownVoteComment(comment.id, index)} name="Vote Down">Vote Down</button></td>
<td><button onClick={() => handleUpVoteComment(comment.id, index)} name="Vote Up">Vote Up</button></td>
<td><button onClick={() => handleDeleteComment(comment.id, index)} name="Delete">Delete</button></td>
<td>
<button
className='button'
onClick={() => openEditCommentModal(index)}>
Edit
</button>
<div>
<Modal
className='modal'
overlayClassName='overlay'
isOpen={isEditCommentModalOpen[index]}
onRequestClose={closeEditCommentModal}
contentLabel='Edit Comment'
backdropColor = {'white'}
backdropOpacity = {1}
animationIn={'slideInLeft'}
animationOut={'slideOutRight'}
>
{isEditCommentModalOpen[index] && <CommentCreateEdit closeModal={closeEditCommentModal} commentId={comment.id}/>}
</Modal>
</div>
</td>
</tr>
)})}
</tbody>
</table>
</div>
<div id="addComment">
<table>
<tbody>
<tr>
<td>
<button
className="button"
onClick={this.context.router.history.goBack}>
Back
</button>
</td>
<td>
<Link to={{
pathname: '/'
}}>
Home
</Link>
</td>
<td>
<button
className='button'
onClick={() => this.openCreateCommentModal()}>
Create
</button>
<div>
<Modal
className='modal'
overlayClassName='overlay'
isOpen={this.state.isCreateCommentModalOpen}
onRequestClose={this.closeCreateCommentModal}
contentLabel='Create Comment'
backdropColor = {'white'}
backdropOpacity = {1}
animationIn={'slideInLeft'}
animationOut={'slideOutRight'}
>
{this.state.isCreateCommentModalOpen && <CommentCreateEdit closeModal={this.closeCreateCommentModal} parentId={post.id}/>}
</Modal>
</div>
</td>
</tr>
</tbody>
</table>
</div>
</div>
);
}
}
function mapStateToProps ({ forum }) {
return {
categories: forum.categories,
posts: forum.posts,
comments: forum.comments
}
}
function mapDispatchToProps (dispatch) {
return {
actions: {
addPostProp: (data) => dispatch(addPostAction(data)),
removePostProp: (data) => dispatch(removePostAction(data)),
updatePostProp: (data) => dispatch(updatePostAction(data)),
upVotePostProp: (data) => dispatch(upVotePostAction(data)),
downVotePostProp: (data) => dispatch(downVotePostAction(data)),
addCommentProp: (data) => dispatch(addCommentAction(data)),
removeCommentProp: (data) => dispatch(removeCommentAction(data)),
updateCommentProp: (data) => dispatch(updateCommentAction(data)),
upVoteCommentProp: (data) => dispatch(upVoteCommentAction(data)),
downVoteCommentProp: (data) => dispatch(downVoteCommentAction(data)),
addCategoriesProp: (data) => dispatch(addCategoriesAction(data))
}
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(PostDetails) | e1ef87798a5af8520ecb53ee43be357528e60af6 | [
"JavaScript",
"Markdown"
] | 6 | JavaScript | oliverhager29/readable | bab7073d105560f4594b8f8c19f2e86d664b11ee | 0873a8ba9a19fddb4747800f3f816529e037cdfe | |
refs/heads/main | <repo_name>Z0neNp/maxim_chanturiay<file_sep>/frontend/src/app/education/education.component.spec.ts
import { DebugNode } from '@angular/core';
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { environment } from "../../environments/environment";
import { AppModule } from "../app.module";
import { EducationComponent } from "./education.component";
import { EducationRsmElDirective } from "./rsm-el.directive";
import { EducationRsmElTitleDirective } from "./rsm-el-title.directive";
describe('EducationComponent', () => {
let component: EducationComponent;
let fixture: ComponentFixture<EducationComponent>;
let educationRsmElTitleDirectiveQueryResponse: DebugNode[];
let educationRsmElDirectiveQueryResponse: DebugNode[];
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ EducationComponent, EducationRsmElDirective, EducationRsmElTitleDirective ],
imports: [ AppModule ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(EducationComponent);
component = fixture.componentInstance;
fixture.detectChanges();
educationRsmElTitleDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(EducationRsmElTitleDirective)
);
educationRsmElDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(EducationRsmElDirective)
)
});
it('should create', () => {
expect(component).toBeTruthy();
});
it("should have as title \"Education\"", () => {
expect(component.title).toEqual('Education');
});
it("should contain the directives", () => {
expect(educationRsmElTitleDirectiveQueryResponse).toHaveSize(1);
expect(educationRsmElDirectiveQueryResponse).toHaveSize(1);
});
it("should render the EducationRsmElTitleDirective with default style", () => {
let directive: DebugNode = educationRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
expect(el.innerHTML).toContain("Education");
expect(el.classList).toContain("rsm-el-title");
});
it("should show EducationRsmElTitleDirective as pressed when touch starts", () => {
let directive: DebugNode = educationRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
expect(el.classList).toContain("pressed");
});
it("should show EducationRsmElTitleDirective as non pressed when touch ends", (done) => {
let directive: DebugNode = educationRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
setTimeout(() => {
expect(el.classList).not.toContain("pressed");
done();
}, environment.event.rendered_delay);
});
it(
"should show EducationRsmElDirective when EducationRsmElTitleDirective touch ends",
(done) => {
let directive: DebugNode = educationRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
directive = educationRsmElDirectiveQueryResponse[0];
el = directive.nativeNode;
setTimeout(() => {
expect(el.classList).toContain("rsm-el-shown");
expect(el.classList).not.toContain("rsm-el-not-shown");
done();
}, environment.event.rendered_delay);
}
);
});
<file_sep>/README.md
# About
This is the source code for my interactive resume.
You can visit it at http://172.16.31.10
<file_sep>/frontend/src/app/reusable-directives/rsm-el-title.directive.ts
import { Directive, ElementRef, HostListener, OnInit } from '@angular/core';
import { environment } from '../../environments/environment';
import { AppModule } from '../app.module';
import { LoggerService } from '../reusable-services/logger.service';
@Directive()
export class RsmElTitleDirective implements OnInit {
protected logger: LoggerService;
constructor(private el: ElementRef) {
this.logger = AppModule.injector.get(LoggerService);
}
ngOnInit(): void {
this.defaultStyle();
}
@HostListener('touchend')
onTouchEnd() {
this.showNonPressed();
}
@HostListener('touchstart')
onTouchStart() {
this.showPressed();
}
protected defaultStyle() {
this.el.nativeElement.classList.add("rsm-el-title");
}
protected showNonPressed() {
setTimeout(() => {
this.el.nativeElement.classList.remove("pressed");
this.el.nativeElement.scrollIntoView();
}, environment.event.pressed_delay);
}
protected showPressed() {
this.el.nativeElement.classList.add("pressed");
}
}
<file_sep>/frontend/src/app/contact-info/chip.spec.ts
import { Chip } from './chip';
describe('Chip', () => {
it('should create an instance', () => {
expect(new Chip()).toBeTruthy();
});
});
<file_sep>/frontend/src/app/contact-info/contact-info.component.spec.ts
import { DebugNode } from '@angular/core';
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { environment } from "../../environments/environment";
import { AppModule } from "../app.module";
import { ContactInfoComponent } from './contact-info.component';
import { ContactInfoRsmElTitleDirective } from './rsm-el-title.directive';
import { ContactInfoRsmElDirective } from './rsm-el.directive';
describe('ContactInfoComponent', () => {
let component: ContactInfoComponent;
let fixture: ComponentFixture<ContactInfoComponent>;
let contactInfoRsmElTitleDirectiveQueryResponse: DebugNode[];
let contactInfoRsmElDirectiveQueryResponse: DebugNode[];
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [
ContactInfoComponent, ContactInfoRsmElTitleDirective, ContactInfoRsmElDirective
],
imports: [ AppModule ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(ContactInfoComponent);
component = fixture.componentInstance;
fixture.detectChanges();
contactInfoRsmElTitleDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(ContactInfoRsmElTitleDirective)
);
contactInfoRsmElDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(ContactInfoRsmElDirective)
)
});
it('should create', () => {
expect(component).toBeTruthy();
});
it("should have as title \"Contact Details\"", () => {
expect(component.title).toEqual('Contact Details');
});
it("should contain the directives", () => {
expect(contactInfoRsmElTitleDirectiveQueryResponse).toHaveSize(1);
expect(contactInfoRsmElDirectiveQueryResponse).toHaveSize(1);
});
it("should render the ContactInfoRsmElTitleDirective with default style", () => {
let directive: DebugNode = contactInfoRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
expect(el.innerHTML).toContain("Contact Details");
expect(el.classList).toContain("rsm-el-title");
});
it("should show ContactInfoRsmElTitleDirective as pressed when touch starts", () => {
let directive: DebugNode = contactInfoRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
expect(el.classList).toContain("pressed");
});
it(
"should show ContactInfoRsmElTitleDirective as non pressed when touch ends",
(done) => {
let directive: DebugNode = contactInfoRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
setTimeout(() => {
expect(el.classList).not.toContain("pressed");
done();
}, environment.event.rendered_delay);
}
);
it(
"should show ContactInfoRsmElDirective when ContactInfoRsmElTitleDirective touch ends",
(done) => {
let directive: DebugNode = contactInfoRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
directive = contactInfoRsmElDirectiveQueryResponse[0];
el = directive.nativeNode;
setTimeout(() => {
expect(el.classList).toContain("rsm-el-shown");
expect(el.classList).not.toContain("rsm-el-not-shown");
done();
}, environment.event.rendered_delay);
}
);
});
<file_sep>/frontend/src/app/enterprise-experience/software-tool.ts
import { Icon } from "./icon";
export class SoftwareTool {
icon: Icon;
link: string;
name: string;
constructor(src: string, alt: string, link: string, name: string) {
this.icon = new Icon(src, alt);
this.link = link;
this.name = name;
}
}
<file_sep>/frontend/src/app/reusable-directives/rsm-el.directive.ts
import { Directive, ElementRef, Inject, OnInit } from '@angular/core';
import { environment } from '../../environments/environment';
import { AppModule } from '../app.module';
import { LoggerService } from '../reusable-services/logger.service';
@Directive()
export class RsmElDirective implements OnInit {
protected logger: LoggerService;
constructor(private el: ElementRef) {
this.logger = AppModule.injector.get(LoggerService);
}
ngOnInit(): void {
if (window.innerWidth < 1024)
this.doNotShowDirective();
}
protected doNotShowDirective(): void {
this.el.nativeElement.classList.remove("rsm-el-shown");
this.el.nativeElement.classList.add("rsm-el-not-shown");
setTimeout(() => {
this.el.nativeElement.classList.remove("rendered");
this.el.nativeElement.classList.add("not-rendered");
}, environment.event.rendered_delay);
}
protected showDirective(): void {
this.el.nativeElement.classList.remove("not-rendered");
this.el.nativeElement.classList.add("rendered");
setTimeout(() => {
this.el.nativeElement.classList.remove("rsm-el-not-shown");
this.el.nativeElement.classList.add("rsm-el-shown");
}, environment.event.rendered_delay);
}
}
<file_sep>/frontend/src/environments/environment.prod.ts
export const environment = {
event: {
pressed_delay: 100,
rendered_delay: 70
},
logger: {
debug: false
},
production: true
};
<file_sep>/frontend/src/app/reusable-services/logger.service.ts
import { Injectable } from '@angular/core';
import { environment } from '../../environments/environment';
@Injectable({
providedIn: 'root'
})
export class LoggerService {
constructor() {}
public debug(class_name: String, method_name: String, msg: String): void {
if (environment.logger.debug)
console.debug(`DEBUG -- ${class_name} -- ${method_name}() -- ${msg}`);
}
public error(class_name: String, method_name: String, msg: String): void {
console.error(`ERROR -- ${class_name} -- ${method_name}() -- ${msg}`);
}
}
<file_sep>/frontend/src/app/contact-info/chip.ts
import { Icon } from "./icon";
export class Chip {
icon: Icon;
link: string;
value: string;
constructor(src: string, alt: string, link: string, value: string) {
this.icon = new Icon(src, alt);
this.link = link;
this.value = value;
}
public hasLink(): boolean {
return this.link.length !== 0;
}
}
<file_sep>/frontend/src/app/enterprise-experience/rsm-el.directive.ts
import { Directive, ElementRef } from '@angular/core';
import { Subscription } from 'rxjs';
import { RsmElDirective } from "../reusable-directives/rsm-el.directive";
import { RsmElState } from "../reusable-enums/rsm-el-state";
import { EntExpRsmElTitleToRsmElCommService } from "./rsm-el-title-to-rsm-el-comm.service";
@Directive({
selector: '[appEntExpRsmEl]'
})
export class EntExpRsmElDirective extends RsmElDirective {
private commToRsmElTitle: Subscription;
constructor(
el: ElementRef,
private commToRsmElTitleService: EntExpRsmElTitleToRsmElCommService
) {
super(el);
this.commToRsmElTitle = this.commToRsmElTitleService.rsmElTitleToRsmEl
.subscribe((state: RsmElState) => { this.consumeRsmElState(state); }
);
}
ngOnDestroy(): void {
this.logger.debug("EntExpRsmElDirective", "ngOnDestroy", "Start");
this.commToRsmElTitle.unsubscribe();
this.logger.debug("EntExpRsmElDirective", "ngOnDestroy", "Finish");
}
private consumeRsmElState(state: RsmElState): void {
this.logger.debug("EntExpRsmElDirective", "consumeRsmElState", `Start - state: ${state}`);
if (state == RsmElState.SHOWN)
this.showDirective();
else if (state == RsmElState.NOT_SHOWN)
this.doNotShowDirective();
this.logger.debug("EntExpRsmElDirective", "consumeRsmElState", "Finish");
}
protected doNotShowDirective(): void {
this.logger.debug("EntExpRsmElDirective", "doNotShowDirective", "Start");
super.doNotShowDirective();
this.logger.debug("EntExpRsmElDirective", "doNotShowDirective", "Finish");
}
protected showDirective(): void {
this.logger.debug("EntExpRsmElDirective", "showDirective", "Start");
super.showDirective();
this.logger.debug("EntExpRsmElDirective", "doNotShowDirective", "Finish");
}
}
<file_sep>/frontend/src/app/reusable-services/rsm-el-title-to-rsm-el-comm.service.ts
import { Observable, Subject } from 'rxjs';
import { RsmElState } from "../reusable-enums/rsm-el-state";
export class RsmElTitleToRsmElCommService {
public rsmElTitleToRsmEl: Observable<RsmElState>;
private rsmElTitleToRsmElSource: Subject<RsmElState>;
constructor() {
this.rsmElTitleToRsmElSource = new Subject<RsmElState>();
this.rsmElTitleToRsmEl = this.rsmElTitleToRsmElSource.asObservable();
}
public showRsmEl(): void {
this.rsmElTitleToRsmElSource.next(RsmElState.SHOWN);
}
public doNotShowRsmEl(): void {
this.rsmElTitleToRsmElSource.next(RsmElState.NOT_SHOWN);
}
}
<file_sep>/frontend/src/app/enterprise-experience/rsm-el-title-to-rsm-el-titles-comm.service.ts
import { Injectable } from '@angular/core';
import { RsmElTitleToRsmElTitlesCommService } from "../reusable-services/rsm-el-title-to-rsm-el-titles-comm.service";
@Injectable({
providedIn: 'root'
})
export class EntExpRsmElTitleToRsmElTitlesCommService extends RsmElTitleToRsmElTitlesCommService {
}
<file_sep>/frontend/src/app/enterprise-experience/software-tool.spec.ts
import { SoftwareTool } from './software-tool';
describe('SoftwareTool', () => {
it('should create an instance', () => {
expect(new SoftwareTool()).toBeTruthy();
});
});
<file_sep>/frontend/src/environments/environment.test.ts
export const environment = {
event: {
pressed_delay: 10,
rendered_delay: 10
},
logger: {
debug: false
},
production: false
};<file_sep>/frontend/src/app/enterprise-experience/enterprise-experience.component.ts
import { Component, OnInit } from '@angular/core';
import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
import { Position } from "./position";
import { POSITIONS } from "./mock-positions";
@Component({
selector: 'app-enterprise-experience',
templateUrl: './enterprise-experience.component.html',
styleUrls: ['./enterprise-experience.component.sass']
})
export class EnterpriseExperienceComponent implements OnInit {
title: String;
positions: Position[];
constructor(private sanitizer: DomSanitizer) {
this.title = "Enterprise Experience";
this.positions = POSITIONS;
}
ngOnInit(): void {
}
pngSrcFromBase64(base64Val: string): SafeResourceUrl {
return this.sanitizer.bypassSecurityTrustResourceUrl(
'data:image/png;base64,' + base64Val
)
}
}
<file_sep>/frontend/src/app/portfolio/rsm-el-title.directive.ts
import { Directive, ElementRef, OnDestroy } from '@angular/core';
import { Subscription } from 'rxjs';
import { AbtMeRsmElTitleToRsmElTitlesCommService } from "../about-me/rsm-el-title-to-rsm-el-titles-comm.service";
import { ContactInfoRsmElTitleToRsmElTitlesCommService } from "../contact-info/rsm-el-title-to-rsm-el-titles-comm.service";
import { EducationRsmElTitleToRsmElTitlesCommService } from "../education/rsm-el-title-to-rsm-el-titles-comm.service";
import { EntExpRsmElTitleToRsmElTitlesCommService } from "../enterprise-experience/rsm-el-title-to-rsm-el-titles-comm.service";
import { RsmElTitleDirective } from "../reusable-directives/rsm-el-title.directive";
import { PortfolioRsmElTitleToRsmElCommService } from "./rsm-el-title-to-rsm-el-comm.service";
import { PortfolioRsmElTitleToRsmElTitlesCommService } from "./rsm-el-title-to-rsm-el-titles-comm.service";
@Directive({
selector: '[appPortfolioRsmElTitle]'
})
export class PortfolioRsmElTitleDirective extends RsmElTitleDirective implements OnDestroy {
private commToAbtMeRsmElTitle: Subscription;
private commToContactInfoRsmElTitle: Subscription;
private commToEducationRsmElTitle: Subscription;
private commToEntExpRsmElTitle: Subscription;
constructor(
el: ElementRef,
private commToRsmElService: PortfolioRsmElTitleToRsmElCommService,
private commToRsmElTitlesService: PortfolioRsmElTitleToRsmElTitlesCommService,
private commToAbtMeRsmElTitleService: AbtMeRsmElTitleToRsmElTitlesCommService,
private commToContactInfoRsmElTitleService: ContactInfoRsmElTitleToRsmElTitlesCommService,
private commToEducationRsmElTitleService: EducationRsmElTitleToRsmElTitlesCommService,
private commToEntExpRsmElTitleService: EntExpRsmElTitleToRsmElTitlesCommService
) {
super(el);
this.commToAbtMeRsmElTitle = this.commToAbtMeRsmElTitleService
.rsmElTitleToRsmElTitles.subscribe(() => { this.consumeEventFromRsmElTitle(); });
this.commToContactInfoRsmElTitle = this.commToContactInfoRsmElTitleService
.rsmElTitleToRsmElTitles.subscribe(() => { this.consumeEventFromRsmElTitle(); });
this.commToEducationRsmElTitle = this.commToEducationRsmElTitleService
.rsmElTitleToRsmElTitles.subscribe(() => { this.consumeEventFromRsmElTitle(); });
this.commToEntExpRsmElTitle = this.commToEntExpRsmElTitleService
.rsmElTitleToRsmElTitles.subscribe(() => { this.consumeEventFromRsmElTitle(); });
}
ngOnDestroy(): void {
this.logger.debug("PortfolioRsmElTitleDirective", "ngOnDestroy", "Start");
this.commToAbtMeRsmElTitle.unsubscribe();
this.commToContactInfoRsmElTitle.unsubscribe();
this.commToEducationRsmElTitle.unsubscribe();
this.commToEntExpRsmElTitle.unsubscribe();
this.logger.debug("PortfolioRsmElTitleDirective", "ngOnDestroy", "Finish");
}
protected consumeEventFromRsmElTitle(): void {
this.logger.debug("PortfolioRsmElTitleDirective", "consumeEventFromRsmElTitle", "Start");
this.commToRsmElService.doNotShowRsmEl();
this.logger.debug("PortfolioRsmElTitleDirective", "consumeEventFromRsmElTitle", "Finish");
}
protected showPressed() {
this.logger.debug("PortfolioRsmElTitleDirective", "showPressed", "Start");
super.showPressed();
this.logger.debug("PortfolioRsmElTitleDirective", "showPressed", "Finish");
}
protected showNonPressed() {
this.logger.debug("PortfolioRsmElTitleDirective", "showNonPressed", "Start");
super.showNonPressed();
this.commToRsmElService.showRsmEl();
this.commToRsmElTitlesService.doNotShowRsmEl();
this.logger.debug("PortfolioRsmElTitleDirective", "showNonPressed", "Finish");
}
}
<file_sep>/frontend/src/app/education/rsm-el-title.directive.ts
import { Directive, ElementRef, OnDestroy } from '@angular/core';
import { Subscription } from 'rxjs';
import { AbtMeRsmElTitleToRsmElTitlesCommService } from "../about-me/rsm-el-title-to-rsm-el-titles-comm.service";
import { ContactInfoRsmElTitleToRsmElTitlesCommService } from "../contact-info/rsm-el-title-to-rsm-el-titles-comm.service";
import { EntExpRsmElTitleToRsmElTitlesCommService } from "../enterprise-experience/rsm-el-title-to-rsm-el-titles-comm.service";
import { PortfolioRsmElTitleToRsmElTitlesCommService } from "../portfolio/rsm-el-title-to-rsm-el-titles-comm.service";
import { RsmElTitleDirective } from "../reusable-directives/rsm-el-title.directive";
import { EducationRsmElTitleToRsmElCommService } from "./rsm-el-title-to-rsm-el-comm.service";
import { EducationRsmElTitleToRsmElTitlesCommService } from "./rsm-el-title-to-rsm-el-titles-comm.service";
@Directive({
selector: '[appEducationRsmElTitle]'
})
export class EducationRsmElTitleDirective extends RsmElTitleDirective implements OnDestroy {
private commToAbtMeRsmElTitle: Subscription;
private commToContactInfoRsmElTitle: Subscription;
private commToEntExpRsmElTitle: Subscription;
private commToPortfolioRsmElTitle: Subscription;
constructor(
el: ElementRef,
private commToRsmElService: EducationRsmElTitleToRsmElCommService,
private commToRsmElTitlesService: EducationRsmElTitleToRsmElTitlesCommService,
private commToAbtMeRsmElTitleService: AbtMeRsmElTitleToRsmElTitlesCommService,
private commToContactInfoRsmElTitleService: ContactInfoRsmElTitleToRsmElTitlesCommService,
private commToEntExpRsmElTitleService: EntExpRsmElTitleToRsmElTitlesCommService,
private commToPortfolioRsmElTitleService: PortfolioRsmElTitleToRsmElTitlesCommService
) {
super(el);
this.commToAbtMeRsmElTitle = this.commToAbtMeRsmElTitleService.rsmElTitleToRsmElTitles
.subscribe(() => { this.consumeEventFromRsmElTitle(); });
this.commToContactInfoRsmElTitle = this.commToContactInfoRsmElTitleService
.rsmElTitleToRsmElTitles.subscribe(() => { this.consumeEventFromRsmElTitle(); });
this.commToEntExpRsmElTitle = this.commToEntExpRsmElTitleService
.rsmElTitleToRsmElTitles.subscribe(() => { this.consumeEventFromRsmElTitle(); });
this.commToPortfolioRsmElTitle = this.commToPortfolioRsmElTitleService
.rsmElTitleToRsmElTitles.subscribe(() => { this.consumeEventFromRsmElTitle(); });
}
ngOnDestroy(): void {
this.logger.debug("EducationRsmElTitleDirective", "ngOnDestroy", "Start");
this.commToAbtMeRsmElTitle.unsubscribe();
this.commToContactInfoRsmElTitle.unsubscribe();
this.commToEntExpRsmElTitle.unsubscribe();
this.commToPortfolioRsmElTitle.unsubscribe();
this.logger.debug("EducationRsmElTitleDirective", "ngOnDestroy", "Finish");
}
protected consumeEventFromRsmElTitle(): void {
this.logger.debug("EducationRsmElTitleDirective", "consumeEventFromRsmElTitle", "Start");
this.commToRsmElService.doNotShowRsmEl();
this.logger.debug("EducationRsmElTitleDirective", "consumeEventFromRsmElTitle", "Finish");
}
protected showPressed() {
this.logger.debug("EducationRsmElTitleDirective", "showPressed", "Start");
super.showPressed();
this.logger.debug("EducationRsmElTitleDirective", "showPressed", "Finish");
}
protected showNonPressed() {
this.logger.debug("EducationRsmElTitleDirective", "showNonPressed", "Start");
super.showNonPressed();
this.commToRsmElService.showRsmEl();
this.commToRsmElTitlesService.doNotShowRsmEl();
this.logger.debug("EducationRsmElTitleDirective", "showNonPressed", "Finish");
}
}
<file_sep>/frontend/src/app/about-me/about-me.component.ts
import { Component, OnInit } from '@angular/core';
import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
import { Icon } from "./icon";
import { PROFILE } from "./mock-profile";
@Component({
selector: 'app-about-me',
templateUrl: './about-me.component.html',
styleUrls: ['./about-me.component.sass']
})
export class AboutMeComponent implements OnInit {
title: String;
profile: Icon;
constructor(private sanitizer: DomSanitizer) {
this.title = "About Me";
this.profile = PROFILE;
}
ngOnInit(): void {
}
pngSrcFromBase64(base64Val: string): SafeResourceUrl {
return this.sanitizer.bypassSecurityTrustResourceUrl(
'data:image/png;base64,' + base64Val
)
}
}
<file_sep>/frontend/src/app/reusable-services/rsm-el-title-to-rsm-el-titles-comm.service.ts
import { Observable, Subject } from 'rxjs';
export class RsmElTitleToRsmElTitlesCommService {
public rsmElTitleToRsmElTitles: Observable<void>;
private rsmElTitleToRsmElTitlesSource: Subject<void>;
constructor() {
this.rsmElTitleToRsmElTitlesSource = new Subject<void>();
this.rsmElTitleToRsmElTitles = this.rsmElTitleToRsmElTitlesSource.asObservable();
}
public doNotShowRsmEl(): void {
this.rsmElTitleToRsmElTitlesSource.next();
}
}
<file_sep>/frontend/src/app/about-me/icon.ts
import { ImageAbs } from "../reusable-classes/image-abs";
export class Icon extends ImageAbs {
constructor(src: string, alt: string) {
super(src, alt);
}
}
<file_sep>/frontend/src/app/app.module.ts
import { Injector, NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { AboutMeComponent } from './about-me/about-me.component';
import { AbtMeRsmElDirective } from './about-me/rsm-el.directive';
import { AbtMeRsmElTitleDirective } from './about-me/rsm-el-title.directive';
import { ContactInfoComponent } from './contact-info/contact-info.component';
import { ContactInfoRsmElDirective } from './contact-info/rsm-el.directive';
import { ContactInfoRsmElTitleDirective } from './contact-info/rsm-el-title.directive';
import { EducationComponent } from './education/education.component';
import { EducationRsmElDirective } from './education/rsm-el.directive';
import { EducationRsmElTitleDirective } from './education/rsm-el-title.directive';
import { EnterpriseExperienceComponent } from './enterprise-experience/enterprise-experience.component';
import { EntExpRsmElTitleDirective } from './enterprise-experience/rsm-el-title.directive';
import { EntExpRsmElDirective } from './enterprise-experience/rsm-el.directive';
import { PortfolioComponent } from './portfolio/portfolio.component';
import { PortfolioRsmElTitleDirective } from './portfolio/rsm-el-title.directive';
import { PortfolioRsmElDirective } from './portfolio/rsm-el.directive';
@NgModule({
declarations: [
AppComponent,
AboutMeComponent,
AbtMeRsmElDirective,
AbtMeRsmElTitleDirective,
ContactInfoComponent,
ContactInfoRsmElDirective,
ContactInfoRsmElTitleDirective,
EducationComponent,
EducationRsmElTitleDirective,
EducationRsmElDirective,
EnterpriseExperienceComponent,
EntExpRsmElTitleDirective,
EntExpRsmElDirective,
PortfolioComponent,
PortfolioRsmElTitleDirective,
PortfolioRsmElDirective
],
imports: [
BrowserModule,
AppRoutingModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule {
static injector: Injector;
constructor(injector: Injector) {
AppModule.injector = injector;
}
}
<file_sep>/frontend/src/app/contact-info/mock-chips.ts
import { Icon } from "./icon";
import { Chip } from "./chip";
export const CHIPS: Chip[] = [
new Chip(
'<KEY>
"LinkedIn icon",
"https://www.linkedin.com/in/maxim-ch/",
"LinkedIn"
),
new Chip(
'<KEY>NsV<KEY>
"Gmail icon",
"",
"<EMAIL>"
)
]<file_sep>/frontend/src/app/contact-info/contact-info.component.ts
import { Component, OnInit } from '@angular/core';
import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
import { Chip } from "./chip";
import { CHIPS } from "./mock-chips";
@Component({
selector: 'app-contact-info',
templateUrl: './contact-info.component.html',
styleUrls: ['./contact-info.component.sass']
})
export class ContactInfoComponent implements OnInit {
title: String;
chips: Chip[];
constructor(private sanitizer: DomSanitizer) {
this.title = "Contact Details";
this.chips = CHIPS;
}
ngOnInit(): void {
}
pngSrcFromBase64(base64Val: string): SafeResourceUrl {
return this.sanitizer.bypassSecurityTrustResourceUrl(
'data:image/png;base64,' + base64Val
)
}
}
<file_sep>/frontend/src/app/contact-info/rsm-el-title-to-rsm-el-comm.service.ts
import { Injectable } from '@angular/core';
import { RsmElTitleToRsmElCommService } from "../reusable-services/rsm-el-title-to-rsm-el-comm.service";
@Injectable({
providedIn: 'root'
})
export class ContactInfoRsmElTitleToRsmElCommService extends RsmElTitleToRsmElCommService {
}
<file_sep>/frontend/src/app/portfolio/portfolio.component.spec.ts
import { DebugNode } from '@angular/core';
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { environment } from "../../environments/environment";
import { AppModule } from "../app.module";
import { PortfolioComponent } from "./portfolio.component";
import { PortfolioRsmElDirective } from './rsm-el.directive';
import { PortfolioRsmElTitleDirective } from './rsm-el-title.directive';
describe('PortfolioComponent', () => {
let component: PortfolioComponent;
let fixture: ComponentFixture<PortfolioComponent>;
let portfolioRsmElTitleDirectiveQueryResponse: DebugNode[];
let portfolioRsmElDirectiveQueryResponse: DebugNode[];
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ PortfolioComponent, PortfolioRsmElDirective, PortfolioRsmElTitleDirective ],
imports: [ AppModule ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(PortfolioComponent);
component = fixture.componentInstance;
fixture.detectChanges();
portfolioRsmElTitleDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(PortfolioRsmElTitleDirective)
);
portfolioRsmElDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(PortfolioRsmElDirective)
)
});
it('should create', () => {
expect(component).toBeTruthy();
});
it("should have as title \"Portfolio\"", () => {
expect(component.title).toEqual('Portfolio');
});
it("should contain the directives", () => {
expect(portfolioRsmElTitleDirectiveQueryResponse).toHaveSize(1);
expect(portfolioRsmElDirectiveQueryResponse).toHaveSize(1);
});
it("should render the PortfolioRsmElTitleDirective with default style", () => {
let directive: DebugNode = portfolioRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
expect(el.innerHTML).toContain("Portfolio");
expect(el.classList).toContain("rsm-el-title");
});
it("should show PortfolioRsmElTitleDirective as pressed when touch starts", () => {
let directive: DebugNode = portfolioRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
expect(el.classList).toContain("pressed");
});
it(
"should show PortfolioRsmElTitleDirective as non pressed when touch ends",
(done) => {
let directive: DebugNode = portfolioRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
setTimeout(() => {
expect(el.classList).not.toContain("pressed");
done();
}, environment.event.rendered_delay);
}
);
it(
"should show PortfolioRsmElDirective when PortfolioRsmElTitleDirective touch ends",
(done) => {
let directive: DebugNode = portfolioRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
directive = portfolioRsmElDirectiveQueryResponse[0];
el = directive.nativeNode;
setTimeout(() => {
expect(el.classList).toContain("rsm-el-shown");
expect(el.classList).not.toContain("rsm-el-not-shown");
done();
}, environment.event.rendered_delay);
}
);
});
<file_sep>/frontend/src/app/enterprise-experience/mock-positions.ts
import { Position } from "./position";
import { TOOLS } from "./mock-sw-tools";
export const POSITIONS: Position[] = [
new Position(
"2020",
"Present",
"IBM - SaaS QA Automation Engineer",
[
"Automation Framework Backend Maintenance",
"Automation Framework Frontend Maintenance",
"Automation Workers Management",
"Features Development for Automation Framework"
],
TOOLS
),
new Position(
"2017",
"2020",
"IBM - SaaS QA Student Position",
[
"Backend API testing",
"Prediction Algorithms testing",
"Frontend features testing",
"Contributing automated tests to Regression"
],
[]
)
]<file_sep>/frontend/src/app/reusable-enums/rsm-el-state.ts
export enum RsmElState {
NOT_SHOWN,
SHOWN
}
<file_sep>/frontend/src/app/enterprise-experience/position.ts
import { SoftwareTool } from "./software-tool";
export class Position {
end: String
responsibilities: String[];
start: String
softwareTools: SoftwareTool[];
title: String;
constructor(
start: String,
end: String,
title: String,
responsibilities: String[],
softwareTools: SoftwareTool[]
) {
this.start = start;
this.end = end;
this.title = title;
this.responsibilities = responsibilities;
this.softwareTools = softwareTools;
}
public hasResponsibilities(): Boolean {
return this.responsibilities.length > 0;
}
public hasSoftwareTools(): Boolean {
return this.softwareTools.length > 0;
}
}<file_sep>/frontend/src/app/about-me/about-me.component.spec.ts
import { DebugNode } from '@angular/core';
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { environment } from "../../environments/environment";
import { AppModule } from "../app.module";
import { AboutMeComponent } from "./about-me.component";
import { AbtMeRsmElDirective } from './rsm-el.directive';
import { AbtMeRsmElTitleDirective } from './rsm-el-title.directive';
describe('AboutMeComponent', () => {
let component: AboutMeComponent;
let fixture: ComponentFixture<AboutMeComponent>;
let abtMeRsmElTitleDirectiveQueryResponse: DebugNode[];
let abtMeRsmElDirectiveQueryResponse: DebugNode[];
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ AboutMeComponent, AbtMeRsmElTitleDirective, AbtMeRsmElDirective ],
imports: [ AppModule ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(AboutMeComponent);
component = fixture.componentInstance;
fixture.detectChanges();
abtMeRsmElTitleDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(AbtMeRsmElTitleDirective)
);
abtMeRsmElDirectiveQueryResponse = fixture.debugElement.queryAll(
By.directive(AbtMeRsmElDirective)
)
});
it("should create", () => {
expect(component).toBeTruthy();
});
it("should have as title \"About Me\"", () => {
expect(component.title).toEqual('About Me');
});
it("should contain the directives", () => {
expect(abtMeRsmElTitleDirectiveQueryResponse).toHaveSize(1);
expect(abtMeRsmElDirectiveQueryResponse).toHaveSize(1);
});
it("should render the AbtMeRsmElTitleDirective with default style", () => {
let directive: DebugNode = abtMeRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
expect(el.innerHTML).toContain("About Me");
expect(el.classList).toContain("rsm-el-title");
});
it("should show AbtMeRsmElTitleDirective as pressed when touch starts", () => {
let directive: DebugNode = abtMeRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
expect(el.classList).toContain("pressed");
});
it(
"should show AbtMeRsmElTitleDirective as non pressed when touch ends",
(done) => {
let directive: DebugNode = abtMeRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
setTimeout(() => {
expect(el.classList).not.toContain("pressed");
done();
}, environment.event.rendered_delay);
}
);
it(
"should show AbtMeRsmElDirective when AbtMeRsmElTitleDirective touch ends",
(done) => {
let directive: DebugNode = abtMeRsmElTitleDirectiveQueryResponse[0];
let el: HTMLElement = directive.nativeNode;
el.dispatchEvent(new Event("touchstart"));
el.dispatchEvent(new Event("touchend"));
directive = abtMeRsmElDirectiveQueryResponse[0];
el = directive.nativeNode;
setTimeout(() => {
expect(el.classList).toContain("rsm-el-shown");
expect(el.classList).not.toContain("rsm-el-not-shown");
done();
}, environment.event.rendered_delay);
}
);
});
| c6d5e2c9495e80d01516ed98a1d5094da6e8f9f0 | [
"Markdown",
"TypeScript"
] | 30 | TypeScript | Z0neNp/maxim_chanturiay | ec65719f734fce5ec8c5bf4d08121ead0be565c9 | e1e5e20b58176ed55de0e0141e18c3707ef8de83 | |
refs/heads/master | <file_sep>import React, {useState} from 'react'
import contacts from '../contacts.json'
import styled from "styled-components"
const StyledDiv = styled.div`
img {
width: 50px;
}
`
const Contacts = () => {
let firstFive = []
for(let i = 0; i < 5; i ++){
firstFive.push(contacts[i])
}
const [contactActors, setContactActors] = useState(firstFive)
const handleRandom = () => {
const randomIndex = Math.floor(Math.random() * (contacts.length))
setContactActors([...contactActors, contacts[randomIndex]])
}
const sortName = () => {
setContactActors([...contactActors].sort((a,b) =>
a.name.localeCompare(b.name)
))
}
const sortPopularity = () => {
setContactActors([...contactActors].sort((a,b) =>
b.popularity - a.popularity
))
}
const handleRemove = (index) => {
// const contactsCopy = contactActors
// contactActors.splice(index, 1)
// setContactActors([...contactsCopy])
setContactActors(contactActors.filter((contact, i) => i !== index))
}
return (
<StyledDiv>
<h1><b>Iron Contacts</b></h1>
<button onClick={handleRandom}>Add random contact</button>
<button onClick={sortName}>Sort by name</button>
<button onClick={sortPopularity}>Sort by popularity</button>
<table>
<thead>
<tr>
<th><b>Picture</b></th>
<th><b>Name</b></th>
<th><b>Popularity</b></th>
</tr>
</thead>
<tbody>
{contactActors.map((star, index) => (
<tr key={index}>
<td><img src={star.pictureUrl}/></td>
<td>{star.name}</td>
<td>{star.popularity}</td>
<td><button onClick={() => handleRemove(index)}>Delete</button></td>
</tr>
))}
</tbody>
</table>
</StyledDiv>
)
}
export default Contacts | c116546cc046399b1322cba341fbca56ad9e137f | [
"JavaScript"
] | 1 | JavaScript | AdrianoOGarda/lab-react-ironcontacts | 9a2ea4f649e104e9fbdded902cbef20b9ac5103f | e7b5c082dc2a66ccf5e085d4b355085d7b31f045 | |
refs/heads/master | <repo_name>Aladin-mf/New-folder<file_sep>/hell.py
n=int( input("name:"))
if n>0:
print("is positive")
elif n<0:
print("is nagetive")
else :
print("is zero")<file_sep>/NEW.c
#include <stdio.h>
int main(void){
printf("hello,seriuzly");
}
| db85932e8337e60139749299d5bd6dce05b0de9e | [
"C",
"Python"
] | 2 | Python | Aladin-mf/New-folder | e80a2fba8252d33be8ecb63c53b795bb4665b805 | fb0eec8db71a18b1993cf292689a9cfe83cc8e57 | |
refs/heads/master | <file_sep>package fr.dashboard.server;
import org.eclipse.egit.github.core.Repository;
import org.eclipse.egit.github.core.service.RepositoryService;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
@RestController
public class Gith {
/**
* Liste les repositories Github recherchés
* @param search Nom du repository recherché
* @return Liste des repositories correspondant à la recherche
*/
@RequestMapping({"/githubRepoSearch"})
public static ArrayList<ArrayList<String>> githubRepoSearch(@RequestParam("rechearch") String search) {
ArrayList<ArrayList<String>> results = new ArrayList<ArrayList<String>>();
ArrayList<String> tmp = new ArrayList<String>();
RepositoryService service = new RepositoryService();
try {
for (Repository repo : service.getRepositories(search)) {
tmp = new ArrayList<String>();
tmp.add(repo.getName());
tmp.add(repo.getUrl());
tmp.add(repo.getHomepage());
tmp.add(repo.getDescription());
tmp.add(repo.getOwner().getName());
tmp.add(repo.getOwner().getUrl());
}
results.add(tmp);
} catch (IOException e) {
e.printStackTrace();
}
return results;
}
}<file_sep>package fr.dashboard.server;
//import org.springframework.data.annotation.Id;
public class MyUser {
/* @Id
public String id;
public String email;
public String password;
public MyUser() {
}
public MyUser(String firstName, String lastName) {
this.email = firstName;
this.password = <PASSWORD>;
}
@Override
public String toString() {
return String.format(
"Customer[id=%s, firstName='%s', lastName='%s']",
id, email, password);
}*/
}<file_sep>buildscript {
repositories {
maven { url "https://repo.spring.io/libs-milestone" }
maven { url "https://repo.spring.io/libs-snapshot" }
mavenLocal()
}
dependencies {
classpath('gradle.plugin.com.palantir.gradle.docker:gradle-docker:0.13.0')
}
}
plugins {
id 'org.springframework.boot' version '2.2.0.RELEASE'
id 'io.spring.dependency-management' version '1.0.8.RELEASE'
id 'java'
}
group = 'fr.dashboard.server'
version = '0.0.1-SNAPSHOT'
sourceCompatibility = '1.8'
repositories {
mavenCentral()
maven { url "https://repo.spring.io/milestone" }
maven { url "https://repo.spring.io/libs-milestone" }
}
dependencies {
implementation 'org.springframework.boot:spring-boot-starter-web'
compile("org.springframework.boot:spring-boot-starter-thymeleaf")
compile("org.springframework.boot:spring-boot-devtools")
compile("org.springframework.boot:spring-boot-starter-test")
compile 'org.twitter4j:twitter4j-stream:4.0.6'
compile 'org.springframework.social:spring-social-facebook:3.0.0.M1'
compile 'org.springframework.social:spring-social-core:2.0.0.M2'
compile 'org.springframework.social:spring-social-config:2.0.0.M2'
implementation 'org.eclipse.mylyn.github:org.eclipse.egit.github.core:1.1.2'
compile group: 'org.facebook4j', name: 'facebook4j-core', version: '2.4.13'
implementation 'org.mariadb.jdbc:mariadb-java-client:2.1.2'
// https://mvnrepository.com/artifact/com.google.api-client/google-api-client
compile group: 'com.google.api-client', name: 'google-api-client', version: '1.25.0'
// https://mvnrepository.com/artifact/com.google.apis/google-api-services-youtube
compile group: 'com.google.apis', name: 'google-api-services-youtube', version: 'v3-rev212-1.25.0'
compile group: 'com.google.apis', name: 'google-api-services-youtubeAnalytics', version: 'v1-rev14-1.13.2-beta'
// https://mvnrepository.com/artifact/com.google.apis/google-api-services-youtubereporting
compile group: 'com.google.apis', name: 'google-api-services-youtubereporting', version: 'v1-rev1-1.20.0'
// https://mvnrepository.com/artifact/com.google.oauth-client/google-oauth-client-jetty
compile("com.google.oauth-client:google-oauth-client-jetty:1.25.0") {
exclude group: 'org.mortbay.jetty', module: 'servlet-api'
}
// https://mvnrepository.com/artifact/com.google.http-client/google-http-client-jackson2
compile group: 'com.google.http-client', name: 'google-http-client-jackson2', version: '1.25.0'
// https://mvnrepository.com/artifact/com.google.collections/google-collections
compile group: 'com.google.collections', name: 'google-collections', version: '1.0'
implementation ("org.springframework.boot:spring-boot-starter-web:2.0.1.RELEASE")
implementation ("org.springframework.security.oauth.boot:spring-security-oauth2-autoconfigure:2.0.0.RELEASE")
implementation ("org.webjars:js-cookie:2.1.0")
implementation ("org.webjars:jquery:2.1.1")
implementation("org.springframework.boot:spring-boot-starter-security")
implementation ("org.webjars:bootstrap:3.2.0")
implementation ("org.webjars:webjars-locator-core:0.35")
implementation ("org.springframework.boot:spring-boot-configuration-processor:2.0.1.RELEASE")
testImplementation('org.springframework.boot:spring-boot-starter-test') {
exclude group: 'org.junit.vintage', module: 'junit-vintage-engine'
}
}
test {
useJUnitPlatform()
}
bootJar {
mainClassName = 'fr.dashboard.server.DashboardServer'
}
apply plugin: 'com.palantir.docker'
task unpack(type: Copy) {
dependsOn bootJar
from(zipTree(tasks.bootJar.outputs.files.singleFile))
into("build/dependency")
}
docker {
name "${project.group}/${bootJar.baseName}"
copySpec.from(tasks.unpack.outputs).into("dependency")
buildArgs(['DEPENDENCY': "dependency"])
}<file_sep>package fr.dashboard.server;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.InputStream;
import java.net.DatagramSocket;
import java.net.InetAddress;
/**
* Donne accès à la page about.json
*/
@RestController
public class About {
static String convertStreamToString(java.io.InputStream is) {
java.util.Scanner s = new java.util.Scanner(is).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
/**
* Lit le fichier about.json, et l'actualise l'IP ainsi que le temps actuel avant de le mapper
* @return about.json
* @throws Exception
*/
@RequestMapping({"/about.json"})
public static String about() throws Exception {
ClassLoader classloader = Thread.currentThread().getContextClassLoader();
InputStream is = classloader.getResourceAsStream("about.json");
String to_find = "\"current_time\": ";
String to_find_2 = "\"host\": ";
String data = convertStreamToString(is);
StringBuilder tmp = new StringBuilder(data);
tmp.insert(data.indexOf(to_find) + to_find.length(), System.currentTimeMillis());
try(final DatagramSocket socket = new DatagramSocket()){
socket.connect(InetAddress.getByName("8.8.8.8"), 8080);
String ip = socket.getLocalAddress().getHostAddress();
tmp.insert(30, "\"" + ip + "\"");
}
return tmp.toString();
}
}<file_sep>rootProject.name = 'epidashboard'
<file_sep>package fr.dashboard.server.Twitter;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import twitter4j.*;
import twitter4j.conf.ConfigurationBuilder;
import java.util.List;
import java.util.stream.Collectors;
@RestController
public class TwitTweetSearch {
public static Twitter getTwitterinstance() {
/**
* if not using properties file, we can set access token by following way
*/
ConfigurationBuilder cb = new ConfigurationBuilder();
cb.setDebugEnabled(true)
.setOAuthConsumerKey("")
.setOAuthConsumerSecret("")
.setOAuthAccessToken("")
.setOAuthAccessTokenSecret("");
TwitterFactory tf = new TwitterFactory(cb.build());
Twitter twitter = tf.getSingleton();
/*
Twitter twitter = TwitterFactory.getSingleton();
*/
return twitter;
}
@RequestMapping({"/twitterTweetCreation"})
public static String createTweet(@RequestParam("rechearch") String tweet) throws TwitterException {
Twitter twitter = getTwitterinstance();
System.out.println(tweet);
Status status = twitter.updateStatus(tweet);
return status.getText();
}
@RequestMapping({"/twitterTimeline"})
public static List<String> getTimeLine() throws TwitterException {
Twitter twitter = getTwitterinstance();
List<Status> statuses = twitter.getHomeTimeline();
return statuses.stream().map(
item -> item.getText()).collect(
Collectors.toList());
}
@RequestMapping({"/twitterSendMessage"})
public static String sendDirectMessage(@RequestParam("name") String recipientName, @RequestParam("rechearch") String msg) throws TwitterException {
Twitter twitter = getTwitterinstance();
DirectMessage message = twitter.sendDirectMessage(recipientName, msg);
return message.getText();
}
@RequestMapping({"/twitterTweetSearch"})
public static List<String> searchtweets(@RequestParam("rechearch") String search) throws TwitterException {
Twitter twitter = getTwitterinstance();
Query query = new Query(search);
QueryResult result = twitter.search(query);
List<Status> statuses = result.getTweets();
return statuses.stream().map(
item -> item.getText()).collect(
Collectors.toList());
}
/*
@RequestMapping({"/twitter"})
public static List<String> searchtweets() throws TwitterException {
Twitter twitter = getTwitterinstance();
Query query = new Query("source:twitter4j baeldung");
QueryResult result = twitter.search(query);
List<Status> statuses = result.getTweets();
System.out.println(statuses.stream().map(
item -> item.getText()).collect(
Collectors.toList()));
return statuses.stream().map(
item -> item.getText()).collect(
Collectors.toList());
}
*/
public static void streamFeed() {
StatusListener listener = new StatusListener(){
@Override
public void onException(Exception e) {
e.printStackTrace();
}
@Override
public void onDeletionNotice(StatusDeletionNotice arg) {
System.out.println("Got a status deletion notice id:" + arg.getStatusId());
}
@Override
public void onScrubGeo(long userId, long upToStatusId) {
System.out.println("Got scrub_geo event userId:" + userId + " upToStatusId:" + upToStatusId);
}
@Override
public void onStallWarning(StallWarning warning) {
System.out.println("Got stall warning:" + warning);
}
@Override
public void onStatus(Status status) {
System.out.println(status.getUser().getName() + " : " + status.getText());
}
@Override
public void onTrackLimitationNotice(int numberOfLimitedStatuses) {
System.out.println("Got track limitation notice:" + numberOfLimitedStatuses);
}
};
TwitterStream twitterStream = new TwitterStreamFactory().getInstance();
twitterStream.addListener(listener);
twitterStream.sample();
}
}
<file_sep># Dashboard
Application web permettant de gérer divers services contenant chacun au moins 1 widgets.
## Technologie utilisé
- Java
- Html
- Css
- Jquery
- Ajax
- Docker
- Spring boot
- IntelliJ
## Installation
Conditions préalables
- docker
- Java 1.8
Fedora
```bash
sudo yum install docker-ce
```
or
```bash
sudo dnf install docker-ce
```
Ubuntu
```bash
sudo apt-get install docker-ce
```
## Docker
Run docker
```python
cd DEV_dashboard_2019
sudo docker-compose build
```
Run the Docker container:
```python
sudo docker-compose up
```
### Compile sans docker.
Pour ceux qui ne veulent pas docker, vous pouvez run le programme comme ci-dessous:
```python
rm -rf build && ./gradlew build bootJar && java -jar build/libs/epidashboard-0.0.1-SNAPSHOT.jar > .log
```
## Voir le résultat
http://localhost:8080
## Authentifications disponible
- Github
- Facebook
- Google
- Youtube
- Twitter
- Login/password
## Services disponible(12)
- Météo
- Cinéma
- Steam
- Reddit
- Twitter
- Deezer
- MemeCat
- Giphy
- Youtube
- Github
- IpLocalize
- Twitch
## Widgets disponible(19)
- Météo: actuelle
- Météo: prévision
- Cinéma: Film à l'affiche
- Cinéma: Recherche de film
- Steam: Nombre de joueurs connecté
- Steam: Recherche de news de jeux
- Reddit: Recherche de subreddit
- Twitter: Recherche de Tweets
- Twitter: Recherche de users
- Twitter: Possibilité de Tweeter
- Deezer: Recherche et lancement de musique
- MemeCat: Affichage de meme de cat aléatoire par thème
- Giphy: Recherche de Gif
- Youtube: Recherche de chaine youtube
- Youtube: Recherche de vidéo youtube
- Github: Recherche de repos
- IpLocalize: localisation des ip
- Twitch: Affiche N top streamers les plus recherchés
- Twitch: Affiche N top steams les plus recherchés
## Développement
Projet Epitech réalisé sur une période de 1 mois en groupe de 2 (<NAME> et <NAME>).<file_sep>version: '3'
services:
server:
build: epidashboard
ports:
- "8080:8080"
depends_on:
- db
environment:
- MYSQL_HOST=db
- MYSQL_PORT=3306
db:
image: mariadb
restart: always
container_name: db
environment:
- MYSQL_DATABASE=dashboard
- MYSQL_ROOT_PASSWORD=<PASSWORD>
ports:
- "3307:3306"
volumes:
- ./mysql-dump:/docker-entrypoint-initdb.d
- mysql-db:/var/lib/mysql
volumes:
mysql-db: | 3ed081cdaad3d0c870bfa1e5fbfd18344805f77a | [
"Markdown",
"Java",
"YAML",
"Gradle"
] | 8 | Java | Daykem/Dashboard_2019 | 5cab3073453d0bb4d4fe5dbd8d7640a14f0c4614 | ce046100b68e73dfc62d90e13be7ca2a9426dae2 | |
refs/heads/master | <repo_name>ha-cs20-course-activities/efficient-quiz<file_sep>/README.md
# efficient-quiz
CS20 Example - Functions (Define & Invoke, Local & Global Variables, Parameters & Arguments, Return Values, Organization and Efficiency)
<file_sep>/main.js
// Quiz Program
"use strict";
// Add Event Listener
document.getElementById("mark").addEventListener("click", markQuiz);
// Event Functions
function markQuiz() {
// Initialize Score Variable
let score = 0;
// Mark questions
score += markQuestion("q1", "A");
score += markQuestion("q2", "B");
score += markQuestion("q3", "C");
score += markQuestion("q4", "D");
// Output final score
document.getElementById("score").innerHTML = score;
}
// Question FUnctions
function markQuestion(qNum, qAnswer) {
let answer = document.getElementById(qNum + "Answer").value;
if (answer == qAnswer) {
document.getElementById(qNum + "Result").innerHTML = "Correct";
document.getElementById(qNum + "Answer").style.backgroundColor = "green";
document.getElementById(qNum + "Div").style.border = "2px solid green";
return 1;
} else {
document.getElementById(qNum + "Result").innerHTML = "Incorrect";
document.getElementById(qNum + "Answer").style.backgroundColor = "red";
document.getElementById(qNum + "Div").style.border = "2px solid red";
return 0;
}
}
| 1c56711bcc5ffe6b21004c419e0701a96642c3d6 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | ha-cs20-course-activities/efficient-quiz | 17b7be33b8322cf10796d45b9c0332c2ff487126 | 6c64b3f3c184ef76afc818c9593954a4667673a2 | |
refs/heads/master | <file_sep>//
// ViewController.swift
// dartmarker
//
// Created by <NAME> on 2018/08/06.
// Copyright © 2018年 com.takumi0kimura. All rights reserved.
//
import UIKit
import NCMB
class ViewController: UIViewController, UITableViewDelegate, UITableViewDataSource, UIApplicationDelegate {
private var magnifyView: MagnifyView?
@IBOutlet weak var myView: UIView!
@IBOutlet var captionTableView: UITableView!
var captionArray = [NCMBObject]()
override func viewWillAppear(_ animated: Bool) {
loadData()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
loadData()
self.myView = UIView()
self.myView?.backgroundColor = UIColor(white: 0x2E2E2E, alpha: 1.0)
self.view.backgroundColor = UIColor.black
// ->背景色をkuro色に変更する
//↓テーブル用
//データ・ソースメソッドをこのファイル内で処理しますよ
captionTableView.delegate = self
captionTableView.dataSource = self
//メモ一覧の不要な線を消す。
// captionTableView.tableFooterView = UIView()
/* //カスタムセルの登録
let nib = UINib(nibName: "ResultsTableViewCell", bundle: Bundle.main)
captionTableView.register(nib, forCellReuseIdentifier: "Cell")
}
*/
//ここからテーブル用
}
// 1. TableViewに表示するデータの個数
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return captionArray.count
}
// 2. TableViewに表示するデータの内容
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "Cell")!
cell.textLabel?.text = captionArray[indexPath.row].object(forKey: "Distance") as? String
// Cellを返す
return cell
}
func loadData() {
let query = NCMBQuery(className: "distance")
query?.findObjectsInBackground({ (result, error) in
if error != nil {
print("error")
} else {
self.captionArray = result as! [NCMBObject]
self.captionTableView.reloadData()
}
})
let trialnum = String(captionArray.count)
trials.text = trialnum
}
//ここまでテーブル用
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
@IBOutlet var label: UILabel!
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let point = touches.first?.location(in: self.view)
if magnifyView == nil
{
magnifyView = MagnifyView.init(frame: CGRect(x: 0, y: 0, width: 100, height: 100))
magnifyView?.viewToMagnify = self.view
magnifyView?.setTouchPoint(pt: point!)
self.view.addSubview(magnifyView!)
}
loadData()
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if let allTouches = event?.allTouches {
for touch in allTouches {
let location = touch.location(in: self.view) //一番親のビュー
Swift.print(location)
//タップした所の座標をアウトプットする
//let rect = CGRect(x: 0, y: 146, width: 375, height: 375)
let circleLayer = CAShapeLayer.init()
let circleFrame = CGRect.init(x: 53, y: 195, width: 280, height: 280)
circleLayer.frame = circleFrame
// 当たり範囲をターゲットの的と同じにする
let isContainsPoint = circleFrame.contains(location)
// タップされた座標が当たり範囲に入ってるか確かめる
if isContainsPoint == true {
// タップされた座標が当たり範囲の中だった場合のみ距離を出す
let distance = sqrt(pow(location.x - 188,2) + pow(location.y - 336,2))
Swift.print(distance)
//ダーツの中心の座標は(188.0, 336.0)
//距離を中心座標とタップした点座標から求める
let distanceint = Int(distance)
label.text = String(distanceint)
//距離をlabelに表示させる
let distancetext = String(distanceint)
let object = NCMBObject(className: "distance")
object?.setObject(distancetext, forKey: "Distance")
object?.saveInBackground({ (error) in
if error != nil {
//エラーが発生したら
print("error")
}else {
//保存に成功した場合
print("success")
}
})
}
let trialnum = String(captionArray.count)
trials.text = trialnum
if magnifyView != nil
{
magnifyView?.removeFromSuperview()
magnifyView = nil
}
}
}
loadData()
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
let point = touches.first?.location(in: self.view)
magnifyView?.setTouchPoint(pt: point!)
magnifyView?.setNeedsDisplay()
}
@IBOutlet var trials: UILabel!
@IBAction func addImage(sender: UITapGestureRecognizer) {
// UIImageView(画像)作成
let circle = UIImageView(image: UIImage(named: "circle")) /* named:"画像名" */
// UIImageViewの中心座標をタップされた位置に設定
circle.center = sender.location(in: self.view)
// UIImageView(画像)を追加
self.view.addSubview(circle)
}
}
<file_sep>//
// ResultsTableViewCell.swift
// dartmarker
//
// Created by <NAME> on 2018/08/07.
// Copyright © 2018年 com.takumi0kimura. All rights reserved.
//
import UIKit
class ResultsTableViewCell: UITableViewCell {
@IBOutlet weak var trial: UILabel!
@IBOutlet weak var distance: UILabel!
}
| 626a9ad6138ef4a6beb4bfbaa0932d7244f80987 | [
"Swift"
] | 2 | Swift | shachitaku/dartmarker | f15cbb458e2ac850f71b3abde20d1ca815db2b14 | 36abcb42b69977b260b3f5d4ba6cb6bc31e66b4e | |
refs/heads/master | <file_sep>export interface OAuth2Options {
loginUrl?: string;
clientId?: string;
clientSecret?: string;
redirectUri?: string;
}
export interface TokenResponse {
access_token: string;
refresh_token: string;
}
export class OAuth2 {
constructor(options: OAuth2Options)
public getAuthorizationUrl(params: { scope?: string, response_type?: string, client_id?: string, redirect_uri?: string }): string;
}<file_sep># Installation
> `npm install --save @types/jsforce`
# Summary
This package contains type definitions for jsforce (https://github.com/jsforce/jsforce).
# Details
Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/jsforce
Additional Details
* Last updated: Thu, 09 Nov 2017 08:43:08 GMT
* Dependencies: fs, stream, express, glob
* Global values: none
# Credits
These definitions were written by <NAME> <https://github.com/dolanmiu>, <NAME> <https://github.com/netes>.
| 85c9d73c4b1939ddadb8818c9d33668eaa97c548 | [
"Markdown",
"TypeScript"
] | 2 | TypeScript | rk-7/types-jsforce | 02a613fe9ef7fbb73273403d5dca19d925ce37ab | e57f49cd83cd45ced291b94234a9732838985e8c | |
refs/heads/master | <repo_name>itsJustAbe/BerrySensor<file_sep>/BluetoothClient.py
import bluetooth
import sensor
import datetime
import sys
import time
def bluetooth_msg():
# Kruno's Bluetooth
bd_addr = "B8:27:EB:94:1F:03"
port = 1
sock = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
sock.connect((bd_addr, port))
while True:
try:
# assembling data received from the sensor
log_line = sensor.temp()
# sending data
sock.send(log_line)
# sleep time
time.sleep(5)
except:
print("connection error")
bluetooth_msg()
<file_sep>/testClient.py
import paho.mqtt.client as mqtt
import time
# This is the Publisher
# Add client Ip address on the same network
CLIENT_IP = ""
client = mqtt.Client()
client.connect(CLIENT_IP, 1883, 60)
i = 0
word = '<PASSWORD>!'
while i < 10:
word = word + " " + str(i)
print "sending data"
client.publish("topic/test", word)
i = i + 1
time.sleep(2)
print "disconnecting..."
client.disconnect()
<file_sep>/server.py
#!/usr/bin/python
import time
import os
import select
from datetime import datetime
import bluetooth
import paho.mqtt.client as mqtt
from logentries import LogentriesHandler
import logging
import pyowm
# sensor utils
import sensor
# Add log entries API key here
LOG_ENTRIES_API_KEY = ''
# Getting external weather data to compare with internal temperature and humididty
def get_weather():
user_id = os.uname()[1]
protocol="EXTERNAL"
owm = pyowm.OWM('1d11da8b7fd7895acd7d78bf97be9839')
observation = owm.weather_at_place('Dublin,IE')
weather = observation.get_weather()
humidity = weather.get_humidity()
temperature = weather.get_temperature('celsius')["temp"]
format_string = '{} Protocol={} SensorID={}, temperature={}, humidity={} \n'.format(str(datetime.now()), protocol,
user_id,temperature, humidity)
log.info(format_string)
# Format string to be sent to log entries
def get_reading(protocol):
humidity, temperature, user_id = sensor.temp()
format_string = '{} Protocol={} SensorID={}, temperature={}, humidity={} \n'.format(str(datetime.now()), protocol,
user_id,temperature, humidity)
return format_string
# Connect to the mqtt publisher
def mqtt_connect():
# create mqtt connection
client = mqtt.Client()
client.connect("127.0.0.1",1883,60)
client.loop_start()
return client
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
client.subscribe("topic/test")
def on_message(client, userdata, msg):
# get mqtt message and send to logentries
mqtt_msg = msg.payload.decode()
if mqtt_msg:
print "Mqtt message:", mqtt_msg
log.info(mqtt_msg)
def send_mqtt_data(client):
client.on_connect = on_connect
client.on_message = on_message
# Creating bluetooth server
def bluetooth_connect():
# set bluetooth
server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
port = 1
server_sock.bind(("",port))
server_sock.listen(1)
client_sock,address = server_sock.accept()
print "Accepted connection from ",address
return client_sock
# Recieve data coming from bluetooth and send to log entries
def send_bluetooth_data(client_sock):
try:
print "checking for bluetooth message!"
client_sock.setblocking(0)
ready = select.select([client_sock], [], [], 1)
if ready[0]:
remote_data = client_sock.recv(4096)
#remote_data = client_sock.recv(1024)
if remote_data:
print "Remote data:", remote_data
log.info(remote_data)
except:
pass
# send data from local gateway server
def send_local_log():
log_line = get_reading("LOCAL")
log.info(log_line)
# Set lognetires
log = logging.getLogger('logentries')
log.setLevel(logging.INFO)
log.addHandler(LogentriesHandler(LOG_ENTRIES_API_KEY))
client_sock = bluetooth_connect()
client = mqtt_connect()
send_mqtt_data(client)
while True:
get_weather()
send_local_log()
print "Receive data from MQTT\n"
send_mqtt_data(client)
print "Receive data from Bluetooth\n"
send_bluetooth_data(client_sock)
time.sleep(2)
client_sock.close()
server_sock.close()
<file_sep>/servermqtt.py
#!/usr/bin/env python
import paho.mqtt.client as mqtt
# This is the Subscriber
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
client.subscribe("topic/test")
def on_message(client, userdata, msg):
#if msg.payload.decode() == "Hello world!":
print(msg.payload.decode())
client = mqtt.Client()
#client.connect("172.16.31.10",1883,60)
client.connect("127.0.0.1",1883,60)
client.on_connect = on_connect
#client.on_message = on_message
while True:
client.on_message = on_message
print "sleep for 1 sec"
time.sleep(1)
#client.loop_forever()
#client.disconnect()<file_sep>/README.md
# IoT_sensor_monitor
Raspberry Pi sensor monitor
<file_sep>/SensorScript.py
# assembler for all the script data
def assembler(dt, my_list, user_id):
# converting received values to float
humidity = float(my_list[0])
humidity_decimal = float(my_list[1])/10
temperature = float(my_list[2])
temperature_decimal = float(my_list[3])/10
# adding the decimal bits
humidity = humidity + humidity_decimal
temperature = temperature + temperature_decimal
# formatting data to be sent to the bluetooth device
format_string = '{} SensorID={}, temperature={}, humidity={} \n'.format(dt.now, user_id, temperature, humidity)
return format_string
<file_sep>/Testlog.py
from logentries import LogentriesHandler
import logging
import time
LOGENTRIES_TOKEN = "<PASSWORD>"
log = logging.getLogger('logentries')
log.setLevel(logging.INFO)
test = LogentriesHandler(LOGENTRIES_TOKEN)
log.addHandler(test)
log.warn("Warning message")
log.info("Info message")
sleep(10)
<file_sep>/main_client.py
import time
from datetime import datetime
import bluetooth
import sensor
import paho.mqtt.client as mqtt
CLIENT_IP = ""
def mqtt_connect():
# Connect to MQTT
client = mqtt.Client()
client.connect(CLIENT_IP, 1883, 60)
return client
def bluetooth_connect(sock):
# Kruno's Bluetooth
bd_addr = "B8:27:EB:94:1F:03"
port = 1
# connecting to the said device
sock.connect((bd_addr, port))
def get_reading(protocol):
humidity, temperature, user_id = sensor.temp()
# formatting data to be sent to the bluetooth device
format_string = '{} Protocol={} SensorID={}, temperature={}, humidity={} \n'.format(str(datetime.now()), protocol,
user_id,temperature, humidity)
return format_string
def bluetooth_msg(sock):
try:
# assembling data received from the sensor
log_line = get_reading("RFCOMM")
# sending data
sock.send(log_line)
# sleep time
time.sleep(5)
except:
print("connection error")
def mqtt_msg(client):
# The message
log_line = get_reading("MQTT")
print ("sending data")
# Publishing message to be recived by the other pi
client.publish("topic/test", log_line)
# SLeep timer
time.sleep(5)
# Connect to the bluetooth device
sock = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
bluetooth_connect(sock)
while True:
# The time constraint 1 hour = 10 seconds, runs for 2 minutes
t_end = time.time() + 60 * 2
client = mqtt_connect()
#start sending with MQTT
while time.time() < t_end:
mqtt_msg(client)
# Console Output
print ("Handing off to RFCOMM")
# Time constraint running for 2 minutes
t_end = time.time() + 60 * 2
# Start Bluetooth
while time.time() < t_end:
bluetooth_msg(sock)
# Console output
print ("Handing off to MQTT")
<file_sep>/sensor.py
# client script
import RPi.GPIO as GPIO
import time
import os
gpio = 4
user_id = os.uname()[1]
def bin2dec(string_num):
return str(int(string_num, 2))
def temp():
data = []
GPIO.setmode(GPIO.BCM)
GPIO.setup(gpio, GPIO.OUT)
GPIO.output(gpio, GPIO.HIGH)
time.sleep(0.025)
GPIO.output(gpio, GPIO.LOW)
time.sleep(0.02)
GPIO.setup(gpio, GPIO.IN, pull_up_down=GPIO.PUD_UP)
for i in range(0, 3500):
data.append(GPIO.input(4))
bit_count = 0
tmp = 0
count = 0
HumidityBit = ""
TemperatureBit = ""
HumidityDecimalBit = ""
TemperatureDecimalBit = ""
crc = ""
try:
while data[count] == 1:
tmp = 1
count = count + 1
for i in range(0, 32):
bit_count = 0
while data[count] == 0:
tmp = 1
count = count + 1
while data[count] == 1:
bit_count = bit_count + 1
count = count + 1
if bit_count > 16:
# Humidity bit
if 0 <= i < 8:
HumidityBit = HumidityBit + "1"
# Humidity decimal
if 8 <= i < 16:
HumidityDecimalBit = HumidityDecimalBit + "1"
# Temperature
if 16 <= i < 24:
TemperatureBit = TemperatureBit + "1"
# Temperature decimal
if 24 <= i < 32:
TemperatureDecimalBit = TemperatureDecimalBit + "1"
else:
# Humidity bit
if 0 <= i < 8:
HumidityBit = HumidityBit + "0"
# Humidity decimal
if 8 <= i < 16:
HumidityDecimalBit = HumidityDecimalBit + "0"
# Temperature
if 16 <= i < 24:
TemperatureBit = TemperatureBit + "0"
# Temperature decimal
if 24 <= i < 32:
TemperatureDecimalBit = TemperatureDecimalBit + "0"
except:
#print("ERR_RANGE")
return temp()
try:
for i in range(0, 8):
bit_count = 0
while data[count] == 0:
tmp = 1
count = count + 1
while data[count] == 1:
bit_count = bit_count + 1
count = count + 1
if bit_count > 16:
crc = crc + "1"
else:
crc = crc + "0"
except:
#print("ERR_RANGE")
return temp()
# Display
humidity = float(bin2dec(HumidityBit))
humidity_decimal = float(bin2dec(HumidityDecimalBit)) / 10
temperature_decimal = float(bin2dec(TemperatureDecimalBit)) / 10
temperature = float(bin2dec(TemperatureBit))
# adding the decimal bits
humidity = humidity + humidity_decimal
temperature = temperature + temperature_decimal
# Returning the values parsed from the sensor
return humidity, temperature, user_id
| 4469a7274e3a5aa809a2ec2d41cff721393d6c85 | [
"Markdown",
"Python"
] | 9 | Python | itsJustAbe/BerrySensor | 092486795c924faae5d941cf870447ec2ee1ce74 | 706f2666e393ccc12b847393e4bdb8247b3b094a | |
refs/heads/master | <repo_name>Oguzhanka/face_attractiveness<file_sep>/helper.py
"""
This script is inspired from the work of:
https://danijar.com/structuring-your-tensorflow-models/
Author: <NAME>
Lazy property decorator is used in the model of my project.
"""
import functools
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
def cache_func(func):
attr = '_cache_' + func.__name__
@property
@functools.wraps(func)
def decorator(self):
if not hasattr(self, attr):
setattr(self, attr, func(self))
return getattr(self, attr)
return decorator
INIT_METHODS = {"xavier": tf.initializers.glorot_normal,
"gaussian": tf.random_normal_initializer,
"uniform": tf.uniform_unit_scaling_initializer}
<file_sep>/config.py
"""
Configuration file to change the model & experiment parameters.
"""
IMAGE_SIZE = (80, 80, 3) # Spatial dimensions of the image.
class DataParams:
"""
Configuration Parameters related to data preprocessing.
"""
def __init__(self):
self.input_size = IMAGE_SIZE[0]
self.input_dims = IMAGE_SIZE[2]
class ModelParams:
"""
Model parameters.
"""
def __init__(self):
"""
:attr model_type: Name of the model type. Choose large or compact.
:attr batch_size: Batch size for the images.
:attr num_epochs: Number of epochs.
:attr weight_init: Weight initialization method. Choose xavier or gaussian.
:attr learning_rate: Learning rate.
:attr batch_norm: Flag toggling the batch normalization layers.
:attr loss_type: Loss type for the training. Choos l1 or l2.
:attr l2_loss: Flag toggling the L2 weight regularization.
:attr alpha: L2 regularization parameter.
:attr dropout: Flag toggling the dropout layers.
:attr keep_rate: Dropout keep probability.
"""
self.model_type = "compact"
self.batch_size = 32
self.num_epochs = 25
self.weight_init = "xavier"
self.learning_rate = 1e-3
self.batch_norm = False
self.loss_type = "l2"
self.l2_loss = False
self.alpha = 1e-3
self.dropout = False
self.keep_rate = 0.5
<file_sep>/models/cnn_model.py
"""
Model implementation.
"""
from helper import cache_func, INIT_METHODS
import tensorflow as tf
class CNNModel:
"""
CNN model implementation. Covers the implementations for both the large and the
compact network.
"""
def __init__(self, data, target, model_params, data_params):
self.data = data
self.target = target
self.params = model_params
self.data_params = data_params
# Weight initialization object arguments.
if self.params["weight_init"] == "gaussian":
init_args = {"mean": 0.0,
"stddev": 1}
else:
init_args = {}
if model_params["model_type"] == "large": # LargeNet Implementation.
self.num_layers = 9 # Number of conv. layers.
self.num_deep = 4 # Number of dense layers.
self.strides = [1, 1, 1, 1, 1, 1, 1, 1]
self.weight_dict = {"W_c_1": tf.compat.v1.get_variable(shape=(7, 7, self.data_params["input_dims"], 16),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_1"),
"b_c_1": tf.Variable(tf.zeros([16])),
"W_c_2": tf.compat.v1.get_variable(shape=(7, 7, 16, 16),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_2"),
"b_c_2": tf.Variable(tf.zeros([16])),
"W_c_3": tf.compat.v1.get_variable(shape=(5, 5, 16, 32),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_3"),
"b_c_3": tf.Variable(tf.zeros([32])),
"W_c_4": tf.compat.v1.get_variable(shape=(5, 5, 32, 32),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_4"),
"b_c_4": tf.Variable(tf.zeros([32])),
"W_c_5": tf.compat.v1.get_variable(shape=(5, 5, 32, 64),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_5"),
"b_c_5": tf.Variable(tf.zeros([64])),
"W_c_6": tf.compat.v1.get_variable(shape=(3, 3, 64, 128),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_6"),
"b_c_6": tf.Variable(tf.zeros([128])),
"W_c_7": tf.compat.v1.get_variable(shape=(3, 3, 128, 256),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_7"),
"b_c_7": tf.Variable(tf.zeros([256])),
"W_c_8": tf.compat.v1.get_variable(shape=(3, 3, 256, 512),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_c_8"),
"b_c_8": tf.Variable(tf.zeros([512])),
"W_1": tf.compat.v1.get_variable(shape=(34 ** 2 * 512, 512),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_1"),
"b_1": tf.Variable(tf.zeros([512])),
"W_2": tf.compat.v1.get_variable(shape=[512, 256],
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_2"),
"b_2": tf.Variable(tf.zeros([256])),
"W_3": tf.compat.v1.get_variable(shape=(256, 1),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_3"),
"b_3": tf.Variable(tf.zeros([1]))}
else: # Implementation of the compact network.
self.num_layers = 5
self.num_deep = 4
self.strides = [3, 1, 1, 1]
self.weight_dict = {"W_c_1": tf.compat.v1.get_variable(shape=(5, 5, self.data_params["input_dims"], 16),
initializer=INIT_METHODS[self.params["weight_init"]](
**init_args),
name="W_c_1"),
"b_c_1": tf.Variable(tf.zeros([16])),
"W_c_2": tf.compat.v1.get_variable(shape=(5, 5, 16, 32),
initializer=INIT_METHODS[self.params["weight_init"]](
**init_args),
name="W_c_2"),
"b_c_2": tf.Variable(tf.zeros([32])),
"W_c_3": tf.compat.v1.get_variable(shape=(3, 3, 32, 64),
initializer=INIT_METHODS[self.params["weight_init"]](
**init_args),
name="W_c_3"),
"b_c_3": tf.Variable(tf.zeros([64])),
"W_c_4": tf.compat.v1.get_variable(shape=(3, 3, 64, 128),
initializer=INIT_METHODS[self.params["weight_init"]](
**init_args),
name="W_c_4"),
"b_c_4": tf.Variable(tf.zeros([128])),
"W_1": tf.compat.v1.get_variable(shape=(10 ** 2 * 128, 1024),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_1"),
"b_1": tf.Variable(tf.zeros([1024])),
"W_2": tf.compat.v1.get_variable(shape=[1024, 256],
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_2"),
"b_2": tf.Variable(tf.zeros([256])),
"W_3": tf.compat.v1.get_variable(shape=(256, 1),
initializer=INIT_METHODS[self.params["weight_init"]](**init_args),
name="W_3"),
"b_3": tf.Variable(tf.zeros([1]))}
for key, val in self.weight_dict.items(): # Rename the layers.
self.weight_dict[key] = tf.identity(self.weight_dict[key], name=key)
# Flag indicating whether the current session is training or prediction.
self.training = tf.compat.v1.placeholder(tf.bool, shape=[])
self.layer_out = None
self.convs = {}
self.biass = {}
self.activations = {}
self.pools = {}
self.weight = {"W_c_1": None}
self.densed = {}
self.biased = {}
self.activated = {}
self.dense_weights = {}
self.bias_weights = {}
self._loss = None
# Initialize the lazy properties.
_ = self.optimize
_ = self.eval_loss
self.epoch_counter = 0
@cache_func
def predict(self):
"""
Forward function for the models.
:return: Output of the model.
"""
for c in range(1, self.num_layers):
if c == 1:
input_ = self.data
self.weight["W_c_1"] = tf.transpose(self.weight_dict["W_c_1"], [3, 0, 1, 2])
else:
input_ = pool
conv = tf.nn.conv2d(input_, self.weight_dict[f"W_c_{c}"], self.strides[c-1], "VALID")
self.convs[c] = tf.transpose(conv, [3, 0, 1, 2])
bias = tf.nn.bias_add(conv, self.weight_dict[f"b_c_{c}"])
self.biass[c] = tf.transpose(bias, [3, 0, 1, 2])
if self.params["batch_norm"]:
bias = tf.compat.v1.layers.batch_normalization(bias, axis=-1, training=self.training,
momentum=0.7)
activation = tf.nn.relu(bias)
self.activations[c] = tf.transpose(activation, [3, 0, 1, 2])
pool = tf.nn.pool(activation, (3, 3),
"MAX", padding="VALID")
self.pools[c] = tf.transpose(pool, [3, 0, 1, 2])
flatten = tf.compat.v1.layers.flatten(pool, name=None, data_format='channels_last')
layer_out = flatten
for d in range(1, self.num_deep):
densed = tf.matmul(layer_out, self.weight_dict[f"W_{d}"],
transpose_a=False, transpose_b=False)
self.densed[d] = densed
self.dense_weights[d] = self.weight_dict[f"W_{d}"]
layer_out = densed + self.weight_dict[f"b_{d}"]
self.biased[d] = layer_out
self.bias_weights = self.weight_dict[f"b_{d}"]
if self.params["batch_norm"]:
layer_out = tf.compat.v1.layers.batch_normalization(layer_out, axis=-1, training=self.training,
momentum=0.7)
if d != self.num_deep - 1:
layer_out = tf.nn.relu(layer_out)
self.activated[d] = layer_out
if self.params["dropout"]:
layer_out = tf.cond(self.training, lambda: tf.nn.dropout(layer_out, self.params["keep_rate"]),
lambda: layer_out)
return layer_out
@cache_func
def optimize(self):
"""
One step optimization for the specified loss function.
:return: optimizer.
"""
loss = self.loss
optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=self.params["learning_rate"],
beta1=0.8, beta2=0.8)
train_op = optimizer.minimize(loss)
return train_op
@cache_func
def loss(self):
"""
Overall loss function that contains the data loss and the regularization losses.
:return: Overall loss.
"""
self._loss = self.data_loss
if self.params["l2_loss"]:
self._loss += self.l2_loss
return self._loss
@cache_func
def data_loss(self):
"""
Data loss from the label predictions.
:return: data loss.
"""
if self.params["loss_type"] == "l1":
loss = tf.reduce_mean(tf.abs(tf.subtract(self.target, self.predict)))
elif self.params["loss_type"] == "l2":
loss = tf.reduce_mean(tf.pow(tf.subtract(self.target, self.predict), 2))
else:
loss = 0.0
return loss
@cache_func
def eval_loss(self):
"""
Evaluation loss, L1.
:return: evaluation loss.
"""
loss = tf.reduce_mean(tf.abs(tf.subtract(self.target, tf.math.round(self.predict))))
return loss
@cache_func
def l2_loss(self):
"""
L2 regularization loss.
:return: Regularization loss.
"""
l2_loss = 0.0
for key, val in self.weight_dict.items():
l2_loss += self.params["alpha"] * tf.nn.l2_loss(val)
return l2_loss
def evaluate(self):
"""
:return:
"""
return tf.reduce_mean(tf.abs(tf.subtract(self.target, tf.math.round(self.predict))))
<file_sep>/README.md
# Assessing Face Attractivity With Convolutional Networks
## Author
<NAME>
## Folder structure
## images/
This folder contains the input data for all the training, validation and the test splits. Therefore,
this folder has three subfolders:
- **training/**
- **validation/**
- **test/**
In each of those folders, input images are located, which are named with their regression
labels. For example, for the file name "1_AF5.jpg", the regression label is the 1 at the beginning.
### models/
Contains the model implementation script. There is only one model script in this folder. In this scirpt,
forward function for the model and the loss functions are implemented.
## outs/
This is a temporary folder for the checkpoints generated by the model summary functions. These functions
record the model weights and the layer outputs at each iteration. These checkpoints are loaded
by the tensorboard.
### config.py
This script contains the model and data preprocessing parameters. Before running the code,
parameters will be modified here. Available parameters are as listed below:
- **Model type**: Type of the model to run. In the report, two models are introduced, the
LargeNet (selected with "large") and the CharmNet (selected with "compact").
- **Batch Size**: Size of the batches for the mini-batches.
- **Epoch Numbers**: Number of training epochs.
- **Weight Initialization**: Weight initialization method. Can be either "gaussian" or "xavier".
- **Learning Rate**: Learning rate of the parameters.
- **Batch Normalization**: Flag for toggling the batch normalization layers between the convolutional
and the dense layers.
- **Loss Type**: Type of the loss function to be used in the training. Can be "l1" or "l2" for the
L1 and L2 losses.
- **L2 Loss**: Flag toggling the L2 weight regularization.
- **alpha**: Loss weight for the regularization loss.
- **Dropout**: Toggles the dropout layers between the dense layers.
- **Keep Rate**: Keep probability for the neurons in the dense layers.
### data_processor.py
This script contains the implementation of the data processor class. This class reads the images
from the default data folder path which is "./images/". For each of the training, validation or the
test split, all data is read and their labels are stored separately.
During training, random mini-batches are generated from the whole list of images. These batches are
given to the model for each call. For the validation and the test sets, whole data is returned
without generating mini-batches.
### helper.py
In this script, we have imported a wrapper function to be used in the model implementation. This
wrapper is the lazy_property written by the
https://danijar.com/structuring-your-tensorflow-models/
### main.py
This is the main script of the project. It does not accept any arguments and takes the run
configurations from the "config.py". Here, the parameter and layer logging functions are initialized.
Moreover, the model and the data are prepared.
Training is done in a main for loop which iterates over the whole data for the specified number of
epochs.
## How to run?
We are using the Tensorflow 2.2 with the GPU support. In addition, the scikit-image is needed. After
the requirements are installed with the following command,
```bash
pip3 install -r requirements.txt
```
the main script can be run with the following command,
```bash
python3 main.py
```
When the training starts, print statements will inform the user about the training procedure. In addition
to the verbosity provided by the print statements, it is possible to inspect the model weights and
the layer histograms via tensorboard with the following command,
```bash
tensorboard --logdir ./
```
<file_sep>/requirements.txt
absl-py==0.9.0
asn1crypto==0.24.0
astunparse==1.6.3
cachetools==4.0.0
certifi==2019.11.28
chardet==3.0.4
click==7.1.1
cryptography==2.1.4
cycler==0.10.0
decorator==4.4.2
Flask==1.1.1
gast==0.3.3
google-auth==1.12.0
google-auth-oauthlib==0.4.1
google-pasta==0.2.0
grpcio==1.27.2
h5py==2.10.0
idna==2.6
imageio==2.8.0
itsdangerous==1.1.0
Jinja2==2.11.1
Keras-Preprocessing==1.1.0
keyring==10.6.0
keyrings.alt==3.0
kiwisolver==1.1.0
Markdown==3.2.1
MarkupSafe==1.1.1
matplotlib==3.2.1
networkx==2.4
numpy==1.18.2
oauthlib==3.1.0
opt-einsum==3.2.0
Pillow==7.0.0
protobuf==3.11.3
pyasn1==0.4.8
pyasn1-modules==0.2.8
pycrypto==2.6.1
pygobject==3.26.1
pyparsing==2.4.6
python-apt==1.6.5+ubuntu0.2
python-dateutil==2.8.1
PyWavelets==1.1.1
pyxdg==0.25
requests==2.23.0
requests-oauthlib==1.3.0
rsa==4.0
scikit-image==0.16.2
scipy==1.4.1
SecretStorage==2.3.1
six==1.14.0
tensorboard==2.2.0
tensorboard-plugin-wit==1.6.0.post2
tensorflow-estimator==2.2.0rc0
tensorflow-gpu==2.2.0rc2
termcolor==1.1.0
urllib3==1.25.8
Werkzeug==1.0.0
wrapt==1.12.1
<file_sep>/main.py
import os
import numpy as np
import matplotlib.pyplot as plt
import tensorflow.compat.v1 as tf
from data_processor import DataProcessor
from models.cnn_model import CNNModel
import config
tf.disable_v2_behavior()
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
if __name__ == "__main__":
data_params = config.DataParams().__dict__
model_params = config.ModelParams().__dict__
# Data generation for the training, validation and the test.
data = DataProcessor("train")
data.init_random_batches(model_params["batch_size"])
val_data = DataProcessor("val")
test_data = DataProcessor("test")
# Placeholders for the model input and output.
x = tf.placeholder("float", (None, data_params["input_size"],
data_params["input_size"], data_params["input_dims"]))
y = tf.placeholder("float", (None, 1))
# Model class and weight initialization. Also, the step initialization for the epochs.
model = CNNModel(model_params=model_params, data_params=data_params, data=x, target=y)
global_step = tf.Variable(0, dtype=tf.int32, trainable=False)
init = tf.global_variables_initializer()
# Initiation of the main Session.
with tf.Session() as sess:
# Summary writer folder.
file_writer = tf.summary.FileWriter('./outs', sess.graph)
sess.run(init)
if not data.has_next():
print("All data finished")
data.init_random_batches(model_params["batch_size"])
# Enable updating through batch normalization and dropout layers.
extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
# Namefield descriptions of the parameters that are logged through each iteration.
with tf.name_scope("Layer-1") as scope:
tf.summary.image(f"Filters", model.weight[f"W_c_{1}"]) # First conv. layer
for c in range(1, model.num_layers):
with tf.name_scope("ConvLayer-" + str(c)) as scope:
tf.summary.histogram(f"Convolved Image", model.convs[c]) # Image after convolution.
tf.summary.histogram(f"Filter", model.weight_dict[f"W_c_{1}"]) # Filter histogram.
tf.summary.histogram(f"Biased Image", model.biass[c]) # Biased image histogram.
tf.summary.histogram(f"Bias", model.weight_dict[f"b_c_{c}"]) # Bias histogram.
tf.summary.histogram(f"Activation Image", model.activations[c]) # Image hist. after the activation.
tf.summary.histogram(f"Pooled Image", model.pools[c]) # Image hist. after the pooling.
for d in range(1, model.num_deep):
with tf.name_scope("DenseLayer-" + str(d)) as scope:
tf.summary.histogram("Dense Output", model.densed[d]) # Output hist. after the weights.
tf.summary.histogram("Biased Output", model.biased[d]) # Output hist after the bias.
try:
tf.summary.histogram("Activated Output", model.activated[d]) # Output after batch_norm layer.
except KeyError:
pass
merged_summary_op = tf.summary.merge_all() # Merges all summaries from different logs.
for epoch in range(model_params["num_epochs"]):
global_step += 1
print("\nEPOCH: " + str(epoch))
data.init_random_batches(batch_size=model_params["batch_size"]) # Shuffle the training data.
mean_cost = 0.0
len_ = 0
while True: # For all mini-batches...
batch_xs, batch_ys = data.next_batches() # Get the next mini-batch.
if batch_xs is None:
break
sess.run([model.optimize, extra_update_ops], feed_dict={model.data: batch_xs,
model.target: batch_ys,
model.training: True})
# Optimize the model for a single step.
cost = sess.run(model.eval_loss, feed_dict={model.data: batch_xs,
model.target: batch_ys,
model.training: False})
# Compute the loss function.
# And print for every batch.
print(f"\rTraining Loss: {cost}", end="", flush=True)
mean_cost += cost
len_ += 1
# Print the average loss for all batches in the training set.
print(f"\rTraining Loss: {mean_cost / len_}")
val_x, val_y = val_data.get_data()
val_loss, summ = sess.run([model.eval_loss, merged_summary_op], feed_dict={model.data: val_x,
model.target: val_y,
model.training: False})
file_writer.add_summary(summ, global_step.eval(session=sess))
file_writer.add_summary(summary=tf.Summary(value=[tf.Summary.Value(tag="Train-Loss",
simple_value=mean_cost / len_),
]), global_step=global_step.eval(session=sess))
file_writer.add_summary(summary=tf.Summary(value=[tf.Summary.Value(tag="Reg-Loss",
simple_value=model.l2_loss.eval()),
]), global_step=global_step.eval(session=sess))
file_writer.add_summary(summary=tf.Summary(value=[tf.Summary.Value(tag="Val-Loss", simple_value=val_loss),
]), global_step=global_step.eval(session=sess))
print(f"Validation Loss: {val_loss}")
file_writer.flush()
test_x, test_y = test_data.get_data()
test_loss = sess.run([model.eval_loss], feed_dict={model.data: test_x,
model.target: test_y,
model.training: False})
print("Test Loss: " + str(test_loss))
random_indices = np.random.choice(list(range(test_x.shape[0])), 9)
random_images = [test_x[idx][None, :] for idx in random_indices]
random_preds = [sess.run(model.predict, feed_dict={model.data: image, model.training: False})
for image in random_images]
random_labels = [test_y[idx] for idx in random_indices]
for i, image, prediction, label in zip(range(9), random_images, random_preds, random_labels):
plt.subplot(3, 3, i+1)
plt.tight_layout()
plt.imshow(np.mean(((image[0] - image.min()) / (image.max() - image.min()) * 255), axis=-1).astype(int))
plt.title("True Score: {}".format(label))
plt.xlabel("Model prediction: \n{}".format(prediction[0]))
plt.savefig("./samples.png", figsize=(70, 70))
<file_sep>/data_processor.py
"""
Data processing class. Reads the raw data from the folder and
generates mini-batches and labels.
"""
from skimage import transform, io
import numpy as np
import pathlib
import random
import time
class DataProcessor:
"""
Data processing class implementation.
"""
def __init__(self, data_type):
"""
Initialization.
:param data_type: Type of the split of the data. Choose train, validation or test.
"""
if data_type == "train":
self.data_list = list(pathlib.Path("images/training/").glob('*'))
elif data_type == "validation":
self.data_list = list(pathlib.Path("images/validation/").glob('*'))
else:
self.data_list = list(pathlib.Path("images/test/").glob('*'))
self.data = []
self.label = []
self.__read_data_to_tensor()
self.data_copy = []
self.label_copy = []
self.middle_data = None
self.middle_label = None
self.cursor = 0
def get_data(self):
"""
Stacks the list of data and labels read from the files.
:return: Tensor of images.
"""
return np.stack(self.data, axis=0), np.stack(self.label, axis=0)[:, None]
def init_random_batches(self, batch_size):
"""
Creates random mini-batches from the read and transformed data.
:param batch_size: Size of the random mini-batches.
:return: None.
"""
self.__reset_data()
self.cursor = 0
index = self.__get_index(self.data)
random.shuffle(index)
self.middle_data = list()
self.middle_label = list()
for item in range(len(index)):
self.middle_data.append(self.data[index[item]])
self.middle_label.append(self.label[index[item]])
self.data_copy = self.__divide_batches(self.middle_data, batch_size)
self.label_copy = self.__divide_batches(self.middle_label, batch_size)
def next_batches(self):
"""
Returns the next mini-batch. If there are no mini-batches left, returns None.
:return: (batch_x, batch_y)
"""
self.cursor += 1
if self.has_next():
return self.data_copy[self.cursor - 1], np.array(self.label_copy[self.cursor - 1])[:, None]
else:
return None, None
def has_next(self):
"""
Flag to check whether there are any more batches to return.
:return: Flag.
"""
return self.cursor != len(self.data_copy)
def __reset_data(self):
"""
Stores the current data to an additional array.
:return: None.
"""
self.data_copy = self.data.copy()
self.label_copy = self.label.copy()
def __read_data_to_tensor(self):
"""
Reads the raw data from files to a single list of tensors.
:return: None.
"""
start_time = time.time()
for image_path in self.data_list:
self.data.append(self.__decode_img(io.imread(str(image_path))))
self.label.append(self.__get_label(image_path.stem))
whole_data = np.stack(self.data, axis=0)
data_mean = whole_data.mean(axis=0)
data_std = whole_data.std(axis=0)
norm_data = (whole_data - data_mean) / data_std
self.data = [data for data in norm_data]
print("Reading images takes %s\n" % (time.time() - start_time))
@staticmethod
def __get_index(data):
"""
Returns the indices of the current data list.
:param data: list of image tensors.
:return: List of indices.
"""
return list(range(len(data)))
@staticmethod
def __divide_batches(data, batch_size):
"""
Divides and returns the batches for the specified batch size.
:param data: List of image tensors.
:param batch_size: Batch size for mini-batches.
:return: Mini-batch.
"""
data = np.stack(data, axis=0)
return [data[i:i + batch_size] for i in range(0, int(data.shape[0]), batch_size - 1)]
@staticmethod
def __decode_img(img):
"""
Decodes the image arrays to float type with fixed size of 80x80.
:param img: Image array.
:return: Decoded image array.
"""
img = img.astype(float)
return transform.resize(img, [80, 80])
@staticmethod
def __get_label(file_path):
"""
Reads the label from the corresponding file path of the image.
:param file_path: File path of the image.
:return: label.
"""
parts = file_path.split("_")[0]
return parts
| de694e19a666f242a6e1a0e392d2a123e0f3bdf1 | [
"Markdown",
"Python",
"Text"
] | 7 | Python | Oguzhanka/face_attractiveness | 87977a6935f9a12d1188cbdc382f7a57ad3ef169 | 6d8e1e8af783a8b91d32fe0a35c62efb8083c5dd | |
refs/heads/master | <repo_name>sanin25/test-3.ru<file_sep>/app/Http/routes.php
<?php
/*
|--------------------------------------------------------------------------
| Application Routes
|--------------------------------------------------------------------------
|
| Here is where you can register all of the routes for an application.
| It's a breeze. Simply tell Laravel the URIs it should respond to
| and give it the controller to call when that URI is requested.
|
*/
Route::get('/', function () {
return view('welcome');
});
Route::auth();
Route::get('/admin', 'adminController@index');
Route::group(["" => "admin"], function(){
Route::get("addpage", [
"as" => "addPage",
"uses" => "AdminController@addpage",
]);
Route::post("/savepage",["as" => "savePage", "uses" => "adminController@savepage"]);
Route::get("/getpage",["as" => "getPage", "uses" => "adminController@getpage"]);
});
<file_sep>/packages/src/Phpner/Lafiles/Http/routes.php
<?php
Route::group(['middleware' => config('lafiles.middleware') ? config('lafiles.middleware') : null], function () {
Route::get('lafiles/containers', [
'as' => 'lafiles.containers',
'uses' => '\Jasekz\Laradrop\Http\Controllers\LaradropController@getContainers'
]);
Route::post('lafiles/move', [
'as' => 'lafiles.move',
'uses' => '\Jasekz\Laradrop\Http\Controllers\LaradropController@move'
]);
Route::post('lafiles/create', [
'as' => 'lafiles.create',
'uses' => '\Jasekz\Laradrop\Http\Controllers\LaradropController@create'
]);
Route::resource('lafiles', '\Jasekz\Laradrop\Http\Controllers\LaradropController');
});<file_sep>/readme.md
# Laravel sanin25 pack
<file_sep>/app/Http/Controllers/AdminController.php
<?php
namespace App\Http\Controllers;
use App\Http\Requests;
use App\Pages;
use DB;
use Illuminate\Http\Request;
class AdminController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth');
}
/**
* Show the application dashboard.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
return view('admin/admin');
}
public function addpage(){
return view('admin/add_page');
}
public function savepage(Request $request)
{
// $fil = explode();
var_dump($request->input("img"));
// return redirect()->route('addPage')->with('status', $title);
}
public function getpage()
{
$page = DB::table('pages')->select('img_id')->get();
$img_id = explode("|",$page[0]->img_id);
$file = DB::table('laradrop_files')->whereIn('id', $img_id )->get();
dd($file);
}
}
| 69a8819b941a3b261b68eb71259dc82912a4ec00 | [
"Markdown",
"PHP"
] | 4 | PHP | sanin25/test-3.ru | 116d3cc125e0ddefd6264555adc48b0369ae49b7 | 23c288512c52b4e72cf91645598208a1e59e8ad0 | |
refs/heads/master | <file_sep>var turn = 0;
var currentGame = 0;
function player(){
if(turn % 2){
return 'O'
}else{
return 'X'
}
}
function updateState(square){
token = player();
$(square).text(token)
}
function setMessage(string){
$('div#message').text(string)
}
function checkWinner(){
var won = false;
let board = {};
$('td').text(function(index, string){
board[index] = string
});
const WIN_COMBOS = [[0,1,2], [3,4,5], [6,7,8], [0,3,6],
[1,4,7], [2,5,8], [0,4,8], [2,4,6]];
WIN_COMBOS.some(function(combo){
if(board[combo[0]] != "" && board[combo[0]] == board[combo[1]] && board[combo[1]] == board[combo[2]]){
setMessage(`Player ${board[combo[0]]} Won!`);
won = true;
return true;
}
});
return won
}
function resetBoard(){
$('td').text('')
turn = 0
currentGame = 0
}
function attachListeners(){
$('td').on('click', function() {
if(this.textContent == "" && !checkWinner()){
doTurn(this)
}
});
$('button#previous').on('click', previousGames);
$('button#save').on('click', saveGame);
$('button#clear').on('click', resetBoard);
}
function saveGame(){
let board = []
$('td').text(function(index, currentcontent){
board.push(currentcontent)
})
var dataForCreate = { state: board }
if(currentGame){
$.ajax({
type: 'PATCH',
url: `/games/${currentGame}`,
data: dataForCreate
});
}else{
$.post('/games', dataForCreate, function(game){
currentGame = game.data.id;
addButtonForGame(game)
})
}
}
function doTurn(square){
updateState(square);
turn++;
if(checkWinner()){
saveGame();
resetBoard();
}else if (turn == 9){
setMessage("Tie game.")
saveGame();
resetBoard();
}
}
function previousGames(){
$('#games').empty();
$.get('/games', (games) => {
if (games.data.length) {
games.data.forEach((game) => addButtonForGame(game));
}
});
}
function addButtonForGame(game){
$('#games').append(`<button id="gameid-${game.id}">${game.id}</button><br>`);
$(`#gameid-${game.id}`).on('click', () => reloadGame(game.id))
}
function reloadGame(id){
$('#message').empty();
$.get(`/games/${id}`, function(response){
currentGame = response.data.id
var state = response.data.attributes["state"]
turn = state.join('').length;
let index = 0;
for (let y = 0; y < 3; y++) {
for (let x = 0; x < 3; x++) {
document.querySelector(`[data-x="${x}"][data-y="${y}"]`).innerHTML = state[index];
index++;
}
}
})
}
$(document).ready(function(){
attachListeners()
})
| a7df52e9a779f315833f1dd1f7bf442c626ceecb | [
"JavaScript"
] | 1 | JavaScript | sgibson47/js-tictactoe-rails-api-v-000 | 095c65f3b1a07cdc94f26d2fff515fd2ad27aec6 | 33fcdc8b2dae105e87edb7be18b68e7231122d29 | |
refs/heads/main | <file_sep># **Password generator with strength bar and custom password options**

**HTML + SCSS + JS**
<file_sep>const passInput = document.querySelector(".passwordWrap__password");
const passButton = document.querySelector(".generateButton");
const howMany = document.querySelector(".howManyChar")
const rangeBar = document.querySelector(".rangeBar")
const upperCheck = document.querySelector(".uppercaseCheck")
const lowerCheck = document.querySelector(".lowercaseCheck")
const numbersCheck = document.querySelector(".numbersCheck")
const symbolsCheck = document.querySelector(".symbolsCheck")
const strengthBar = document.querySelector(".strengthBar")
const copyBtn = document.querySelector(".copyBtn")
howMany.addEventListener('input', syncChar)
rangeBar.addEventListener('input', syncChar)
copyBtn.addEventListener("click", () => {
passInput.select()
document.execCommand("copy");
})
function syncChar(e) {
howMany.value = e.target.value
rangeBar.value = e.target.value
passInput.value = generatePassword();
checkStrength()
}
passButton.addEventListener('click', () => {
passInput.value = generatePassword()
checkStrength()
})
function checkStrength() {
if (howMany.value < 5) {
strengthBar.style = "width:20%;background:rgb(238, 70, 70);box-shadow: 0 2px 5px rgb(238, 70, 70);"
} else if (howMany.value < 10) {
strengthBar.style = "width:45%;background:rgb(248, 175, 58);box-shadow: 0 2px 5px rgb(248, 175, 58);"
} else if (howMany.value < 15) {
strengthBar.style = "width:65%;background:rgb(169, 246, 67);box-shadow: 0 2px 5px rgb(169, 246, 67);"
} else if (howMany.value < 20) {
strengthBar.style = "width:85%;background:rgb(169, 246, 67);box-shadow: 0 2px 5px rgb(169, 246, 67);"
} else if ((howMany.value > 20) && (numbersCheck.checked || symbolsCheck.checked)) {
strengthBar.style = "width:100%;background:rgb(169, 246, 67);box-shadow: 0 2px 5px rgb(169, 246, 67);"
}
}
function generatePassword() {
let password = ''
let myChars = lowerCaseChars
if (upperCheck.checked) myChars = myChars.concat(upperCaseChars)
if (numbersCheck.checked) myChars = myChars.concat(numbersChars)
if (symbolsCheck.checked) myChars = myChars.concat(specialChars)
myChars.sort((a, b) => {
return a - b
})
for (i = 0; i <= parseInt(howMany.value - 1); i++) {
let randVal = Math.floor(Math.random() * myChars.length)
password += String.fromCharCode(myChars[randVal])
}
return password
}
const upperCaseChars = arrayCharacters(65, 90)
const lowerCaseChars = arrayCharacters(97, 122)
const numbersChars = arrayCharacters(48, 57)
const specialChars = arrayCharacters(33, 47)
function randomNumber(tab) {
const passChar = Math.floor(Math.random() * (tab[tab.length - 1] - tab[0]) + tab[0])
return passChar
}
function arrayCharacters(low, high) {
const array = []
for (i = low; i <= high; i++) {
array.push(i)
}
return array
}
passInput.value = generatePassword();
checkStrength(); | fd968c33a29293b6bf8e6bf872c2b244d7a802c5 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | Frontmaniaac/Password-Generator | 67e5d6c0c3969b5313cb18a355d604dc82bb282d | 60c1ac5ace2c256afa4d5e2729db8867aefc5725 | |
refs/heads/master | <file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/*
* File: MinHeap.h
* Author: carloco
*
* Created on December 20, 2017, 2:41 PM
*/
#ifndef MINHEAP_H
#define MINHEAP_H
class MinHeap {
//private:
double** heap;
int numElementos = 0;
int maxElementos = 0;
void Intercambia(int i, int j);
void minHeapify(int i, int j);
public:
//MinHeap(const MinHeap& orig);
//virtual ~MinHeap();
MinHeap(int maxsize);
void insert(long posElement, float distanceToQuery);
void buildHeap();
int getMinPoint();
double getMinScore() ;
double** getArray();
int getCantidad();
void showArray();
void imprimirDistancias();
};
#endif /* MINHEAP_H */
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/*
* File: NodeSKQ.cpp
* Author: carlos
*
* Created on January 13, 2018, 3:57 AM
*/
#include "NodeSKQ.h"
NodeSKQ::NodeSKQ() {
}
NodeSKQ::NodeSKQ(double* c) {
this->coordenadas = c;
}
NodeSKQ::NodeSKQ(double* c, vector<int> &p) {
this->coordenadas = c;
this->palabras = p;
}
<file_sep># cBiK (Compact Bitmaps and Implicit KD-Tree)
Compact data structure to solve spatio-textual queries (Boolean kNN, Ranked kNN and Range Searching).
The main file has different instructions to make the calls both to build the index and to execute the queries found in the Build_cBiK file.
### The datasets to be tested must have the following format
**ID ID Latitude Longitude Keywords, for example:**
* 1 1 26.353048008480023 -80.14893944389951 Barnes Noble Cafe Coffee Shop Coffee
* 2 2 41.198486328125 36.720489501953125 Efor Arcade
* 3 3 -6.2474358618473085 106.92876179761407 Kampus Kelapa Dua College Administrative Building Administrative
* 4 4 25.536826784689794 -103.45327377319336 Cimaco Department Store Department
### The queries format to kNN:
**Latitude Longitude Keywords**
* 27.60043156821203 -89.82445466748851 Tucuruvi
* 28.93397095670383 44.05666482108714 Adanali
* 31.94235423517235 -68.08543093424582 Raks
### The queries format to Range Searching:
**Latitude1 Longitude1 Latitude2 Longitude2 Keywords**
* 14.3956825620347 120.9119237743221 14.45925975974265 120.9775720752873 Im<NAME>
<file_sep>/*
* File: main.cpp
* Author: carlos
*
* Created on 26 de noviembre de 2017, 0:05
*/
#include <vector>
#include <list>
#include <iostream>
#include "Build_cBiK.h"
#include "NodeSKQ.h"
//Para leer archivos
#include <fstream>
#include <string>
using namespace std;
using namespace std::chrono;
using timer = std::chrono::high_resolution_clock;
int main(int argc, char **argv) {
//Parametros generales
string dataSet = "datasetTweets3M";
//string dataSet = "datasetPOIs";
//string dataSet = "dataset15Puntos";
//Parametros de consulta
//double query[2] = {4.1, 3.2};
//double query2[2] = {4.1, 3.2};
//int k = 1;
int numKeys = 3;
int nTest = 1000;
//double alpha = 0.3;
//string queryDataSet = dataSet+"_query"+to_string(numKeys);
//string queryDataSet = "datasetTweets0M_query"+to_string(numKeys);
string queryDataSet = dataSet+"_rQuery_20K_"+to_string(numKeys);
/**************************************************************************/
/***************************** BUILD cBiK *********************************/
/**************************************************************************/
/*memory_monitor::start();
auto start = timer::now();
Build_cBiK cBiK = Build_cBiK(dataSet);
auto stop = timer::now();
memory_monitor::stop();
cout << "\n =================== ESTADISTICAS ===================" << endl;
cout << "construction time in seconds: " << duration_cast<seconds>(stop-start).count() << endl;
std::cout << "peak usage = " << memory_monitor::peak() / (1024*1024) << " MB" << std::endl;
cBiK.printMapa();*/
/**************************************************************************/
/****************************** FIND KNN *********************************/
/**************************************************************************/
/*Build_cBiK cBiK = Build_cBiK();
cBiK.load_cBiK(dataSet);
cBiK.printMapa();
vector<std::string> queryKey;
int numToken;
int ciclo=0;
string palabras = "";
vector<vector<std::string>> arregloPalabras(nTest);
double arregloPuntos[nTest][2];
ifstream lecturaQuery;
lecturaQuery.open(queryDataSet,ios::in);
for(string linea; getline(lecturaQuery, linea); ) {
stringstream registro(linea);
if(ciclo < nTest) {
palabras = "";
numToken = 0;
queryKey.clear();
for (string dato; getline(registro, dato, ' '); ) {
if(numToken == 0) {
//LATITUD
arregloPuntos[ciclo][0] = stod(dato);
}else if(numToken == 1) {
//LONGITUD
arregloPuntos[ciclo][1] = stod(dato);
}else if(numToken > 1){
//PALABRAS CLAVES
palabras = palabras + dato + " - ";
queryKey.push_back(dato);
//cout << dato << ", ";
}
numToken++;
}
//Se cargan los datos
arregloPalabras[ciclo] = queryKey;
}
ciclo++;
}
lecturaQuery.close();
//Ejecuta las pruebas
auto start = timer::now();
for(int i=0; i<arregloPalabras.size(); i++) {
//cout << (ciclo+1) << "/" << nTest << " >> " << palabras << "(" << query[0] << " , " << query[1] << ") ===>> ";
//cBiK.findRKNN(arregloPuntos[i], arregloPalabras[i], k, alpha);
cBiK.findBKNN(arregloPuntos[i], arregloPalabras[i], k);
}
auto stop = timer::now();
cout << "******************************* TEST *******************************" << endl;
cout << "Tiempo BKNN: " << (duration_cast<milliseconds>(stop-start).count())/1000.0 << " == Errores: " << cBiK.contFailB << endl;
cout << "********************************************************************" << endl;
*/
/*cout << "******************************* TEST *******************************" << endl;
cout << "Tiempo RKNN: " << (duration_cast<milliseconds>(stop-start).count())/1000.0 << " == Errores: " << cBiK.contFailR << endl;
cout << "********************************************************************" << endl;*/
/**************************************************************************/
/*************************** RANGE SEARCH *********************************/
/**************************************************************************/
Build_cBiK cBiK = Build_cBiK();
cBiK.load_cBiK(dataSet);
cBiK.printMapa();
double timeRangeSearch = 0.0;
vector<std::string> queryKey;
int numToken;
int ciclo=0;
string palabras = "";
vector<vector<std::string>> arregloPalabras(nTest);
double arregloPuntos[nTest][4];
ifstream lecturaQuery;
lecturaQuery.open(queryDataSet,ios::in);
for(string linea; getline(lecturaQuery, linea); ) {
stringstream registro(linea);
if(ciclo < nTest) {
palabras = "";
numToken = 0;
queryKey.clear();
for (string dato; getline(registro, dato, ' '); ) {
if(numToken == 0) {
//LATITUD
//query[0] = stod(dato);
arregloPuntos[ciclo][0] = stod(dato);
}else if(numToken == 1) {
//LONGITUD
//query[1] = stod(dato);
arregloPuntos[ciclo][1] = stod(dato);
}else if(numToken == 2) {
//LONGITUD
//query2[0] = stod(dato);
arregloPuntos[ciclo][2] = stod(dato);
}else if(numToken == 3) {
//LONGITUD
//query2[1] = stod(dato);
arregloPuntos[ciclo][3] = stod(dato);
}else if(numToken > 3){
//PALABRAS CLAVES
palabras = palabras + dato + " - ";
queryKey.push_back(dato);
//cout << dato << ", ";
}
numToken++;
}
//Se cargan los datos
arregloPalabras[ciclo] = queryKey;
}
ciclo++;
}
lecturaQuery.close();
//Ejecuta las pruebas
auto start = timer::now();
for(int i=0; i<arregloPalabras.size(); i++) {
cBiK.rangeQuery(arregloPuntos[i], arregloPalabras[i]);
}
auto stop = timer::now();
cout << "******************************* TEST *******************************" << endl;
cout << "Tiempo RSKQ: " << (duration_cast<milliseconds>(stop-start).count())/1000.0 << " == Errores: " << cBiK.contFailRS << endl;
cout << "********************************************************************" << endl;
//Se libera la memoria de las coordenadas
/*for (int i = 0; i < cantidadElementos; i++) {
delete [] coordinates[i];
}
delete [] coordinates;*/
return 0;
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/*
* File: MaxHeap.cpp
* Author: carloco
*
* Created on December 20, 2017, 2:41 PM
*/
#include "MinHeap.h"
#include <iostream>
#include <limits>
using namespace std;
/*MinHeap::MinHeap(const MinHeap& orig) {
}
//Destructor
MinHeap::~MinHeap() {
}*/
//Constructor con el tamanio del Heap
MinHeap::MinHeap(int maxsize) {
heap = new double *[maxsize];
for (int i = 0; i < maxsize; i++) {
heap[i] = new double[2];
heap[i][0] = 0;//indice del punto mas cercano
heap[i][1] = numeric_limits<double>::min();//ponderacion del punto
}
maxElementos = maxsize;
}
void MinHeap::Intercambia(int i, int j) {
double temp[2];
temp[0] = heap[i][0];
temp[1] = heap[i][1];
heap[i][0] = heap[j][0];
heap[i][1] = heap[j][1];
heap[j][0] = temp[0];
heap[j][1] = temp[1];
}
void MinHeap::minHeapify(int i, int j) {
if ((2 * i + 1) <= j) {
int k;
if ((2 * i + 2) <= j) {
k = (heap[2 * i + 2][1] <= heap[2 * i + 1][1]) ? 2 * i + 2 : 2 * i + 1;
} else {
k = 2 * i + 1;
}
//Ordena en base a la distancia del punto
if (heap[i][1] > heap[k][1]) {
Intercambia(i, k);
minHeapify(k, j);
}
}
}
void MinHeap::insert(long posElement, float distanceToQuery) {
//verifica que llene el heap
/*if(numElementos < maxElementos) {
heap[numElementos][0] = posElement;
heap[numElementos][1] = distanceToQuery;
numElementos++;
buildHeap();
}else {
//Se asume que el punto que viene es menor al MinHeap
heap[0][0] = posElement;
heap[0][1] = distanceToQuery;
minHeapify(0, maxElementos - 1);
}*/
if(numElementos < maxElementos) {
numElementos++;
}
heap[0][0] = posElement;
heap[0][1] = distanceToQuery;
minHeapify(0, maxElementos - 1);
}
void MinHeap::buildHeap() {
//for (int i = (sizeof (heap) / sizeof (heap[0])) / 2; i >= 0; i--) {
//cout << "Max Elementos: " << maxElementos << endl;
for (int i = (maxElementos * 0.5); i >= 0; i--) {
//cout << "I: " << i << endl;
minHeapify(i, maxElementos - 1);
}
}
int MinHeap::getMinPoint() {
return heap[0][0];
}
double MinHeap::getMinScore() {
return heap[0][1];
}
double** MinHeap::getArray() {
return heap;
}
int MinHeap::getCantidad() {
return numElementos;
}
void MinHeap::showArray() {
for(int i=0; i<maxElementos; i++) {
cout << heap[i][0] << "|";
}
cout << endl;
}
void MinHeap::imprimirDistancias() {
for(int i=0; i<maxElementos; i++) {
cout << heap[i][1] << "|";
}
cout << endl;
}<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/*
* File: Build_cBiK.h
* Author: carlos
*
* Created on January 16, 2018, 3:28 PM
*/
#ifndef BUILD_CBIK_H
#define BUILD_CBIK_H
#include <stdbool.h>
#include <stdlib.h>
#include <vector>
#include <list>
#include <iostream>
#include <queue>
#include "NodeSKQ.h"
#include "MaxHeap.h"
#include "MinHeap.h"
#include <set>
//Para los tiempos
#include <time.h>
#include <sys/time.h>
#include <sdsl/bit_vectors.hpp>
//Para leer archivos
#include <fstream>
//Para la funcion hash
#include <unordered_map>
#include <string>
#include <locale>
using namespace sdsl;
using namespace std;
class Build_cBiK {
public:
int contFailB = 0;
int contFailR = 0;
int contFailRS = 0;
Build_cBiK(string datasetName);
Build_cBiK();
void load_cBiK(string datasetName);
void printMapa();
double findBKNN(const double* query, const vector<std::string> &queryKey, int k);
double findRKNN(const double* query, const vector<std::string> &queryKey, int k, double alpha);
double rangeQuery(const double* query, const vector<std::string> &queryKey);
void rangeSearchQuery(const double* query1, const double* query2, vector<int> queryKey, int start, int end, int profundidad, vector<int> &result);
private:
//Atributos generales de construccion
string datasetName;
//En esta variable se guarda los puntos recibidos
vector<NodeSKQ> nodosSKQ;
//En esta variable se guardan las referencias finales del KD-Tree
vector<NodeSKQ> nodosKdTree;
long long int numKeys = 0;
bit_vector mapa;
sd_vector<> sdVectorKeywords;
sd_vector<> sdVectorResume;
//vector<long long int> vectorResume;
set<long long int> vectorResumeFinal;
set<long long int> vectorKeys;
//variable global para la distancia maxima de un punto
double flag[2];
//para convertir las palabras
locale loc;
//Para el rank
rank_support_v<0> mapRank;
rank_support_v<1> mapRank_1;
//Atributos del cBiK
std::unordered_map<std::string,int> hashMapKeys;
//Distancia maxima entre dos puntos
double dmax = 162000;
void loadDataset();
void create_cBiK();
void export_cBiK();
string toLower(string &palabra);
void initializeReference(vector<NodeSKQ>& coordinates, vector<NodeSKQ>& reference);
double superKeyCompare(const double *a, const double *b, const long p, const long dim);
void mergeSort(vector<NodeSKQ> &reference, vector<NodeSKQ>& temporary, const long low, const long high, const long p, const long dim);
long removeDuplicates(vector<NodeSKQ>& reference, const long i, const long dim);
void buildKdTree(vector< vector<NodeSKQ> >& references, vector<NodeSKQ>& temporary, const long start, const long end, const long dim, const long depth);
set<int> generateResume(int start, int end);
double searchEuclideanDistance(const double* p1, const double* p2);
bool checkKeywords(vector<int> query, int pos);
bool checkSummaryLeft(vector<int> query, int pos, int start, int end);
bool checkSummaryRight(vector<int> query, int pos, int start, int end);
double timeval_diff(struct timeval *a, struct timeval *b);
//Helper Indices cBiK
long long int getNewSummaryIndex(int i);
void printVector();
void printTree();
void printKdTree(int start, int end, int depth);
void printTuple(double* tuple);
void printKeys(vector<bit_vector> arrBits);
void printKey(bit_vector arrBits);
//Funciones para las ponderaciones
double getSpatialScore(const double* query, int pos);
double getKeywordsTextualScore(vector<int> queryKey, int pos);
double getTotalScore(double &spatialScore, double &textualScore, double &alpha);
double* getSummariesTextualScores(vector<int> queryKey, int pos, int start, int end);
MaxHeap searchBKNN(const double* query, vector<int> queryKey, int start, int end, const int profundidad, MaxHeap &heap, int totalK);
MinHeap searchRKNN(const double* query, vector<int> queryKey, int start, int end, MinHeap &heap, int totalK, double alpha);
};
#endif /* BUILD_CBIK_H */
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/*
* File: NodeSKQ.h
* Author: carlos
*
* Created on January 13, 2018, 3:57 AM
*/
#ifndef NODESKQ_H
#define NODESKQ_H
#include <vector>
#include <list>
#include <iostream>
using namespace std;
//#include <sdsl/bit_vectors.hpp>
//using namespace sdsl;
class NodeSKQ {
//private:
public:
double* coordenadas;
vector<int> palabras;
NodeSKQ();
NodeSKQ(double* c);
NodeSKQ(double* c, vector<int> &p);
};
#endif /* NODESKQ_H */
<file_sep>/*
* File: Build_cBiK.cpp
* Author: carlos
*
* Created on January 16, 2018, 3:28 PM
*/
#include "Build_cBiK.h"
Build_cBiK::Build_cBiK(string datasetName) {
this->datasetName = datasetName;
loadDataset();
create_cBiK();
export_cBiK();
}
Build_cBiK::Build_cBiK() {
}
string Build_cBiK::toLower(string &palabra) {
string str;
for (string::size_type i=0; i<palabra.length(); i++){
str = str + tolower(palabra[i],loc);
}
return str;
}
void Build_cBiK::load_cBiK(string datasetName) {
this->datasetName = datasetName;
/**************************************************************************/
/********************** CARGAR HASHMAP(KEY,ID) **************************/
/**************************************************************************/
cout << "CARGANDO HASHMAP >> " << "cBiK_"+datasetName+"_hashmap" << endl;
ifstream lecturaHashMap;
lecturaHashMap.open("cBiK_"+datasetName+"_hashmap",ios::in);
//HASHING PALABRA-ID
int contTkn=0;
string keyHash = "";
int idHash = 0;
//Se lee cada linea del archivo de texto
for(string linea; getline(lecturaHashMap, linea); ) {
stringstream registro(linea);
contTkn = 0;
keyHash = "";
idHash = 0;
//Lee cada elemento de la linea
for (string dato; getline(registro, dato, ' '); ) {
if(contTkn == 0) {
//PALABRA
keyHash = dato;
}else if(contTkn == 1) {
//ID
idHash = stoi(dato);
}
contTkn++;
}
hashMapKeys[keyHash] = idHash;
}
lecturaHashMap.close();
cout << "TOTAL PALABRAS:" << hashMapKeys.size() << endl;
this->numKeys = hashMapKeys.size();
/**************************************************************************/
/************************** CARGAR PUNTOS *******************************/
/**************************************************************************/
cout << "CARGANDO PUNTOS >> " << "cBiK_"+datasetName+"_points" << endl;
long long int totalObjects = 0;
ifstream lecturaPalabras;
lecturaPalabras.open("cBiK_"+datasetName+"_points",ios::in);
//Se lee cada linea del archivo de texto
for(string linea; getline(lecturaPalabras, linea); ) {
totalObjects++;
}
lecturaPalabras.close();
cout << "Total de objetos >> " << totalObjects << endl;
lecturaPalabras.open("cBiK_"+datasetName+"_points",ios::in);
//arreglo para guardar los puntos
double **coordinates;
coordinates = new double *[totalObjects];
for(int i = 0; i<totalObjects; i++) {
coordinates[i] = new double[2];//dos dimensiones
}
//HASHING PALABRA-ID
int contToken=0;
int i=0;
int idKey = 0;
//Se lee cada linea del archivo de texto
for(string linea; getline(lecturaPalabras, linea); ) {
stringstream registro(linea);
contToken = 0;
//Lee cada elemento de la linea
for (string dato; getline(registro, dato, ' '); ) {
if(contToken == 0) {
//LATITUD
coordinates[i][0] = stod(dato);
}else if(contToken == 1) {
//LONGITUD
coordinates[i][1] = stod(dato);
}
contToken++;
}
i++;
}
lecturaPalabras.close();
nodosKdTree.resize(totalObjects);
for (int i = 0; i < nodosKdTree.size(); i++) {
nodosKdTree[i] = NodeSKQ(&(coordinates[i][0]));
}
/**************************************************************************/
/********************** CARGAR BITMAP MAPA ******************************/
/**************************************************************************/
cout << "CARGANDO BITMAP MAPA >> " << "cBiK_"+datasetName+"_map.sdsl" << endl;
load_from_file(mapa, "cBiK_"+datasetName+"_map.sdsl");
cout << "\t NUMERO ELEMENTOS MAPA >> " << mapa.size()*0.5 << endl;
//Se crea el rank del bitmap del mapa
mapRank_1 = rank_support_v<1>(&mapa);
/**************************************************************************/
/********************** CARGAR BITMAP KEYWORDS **************************/
/**************************************************************************/
cout << "CARGANDO BITMAP KEYWORDS >> " << "cBiK_"+datasetName+"_keywords.sdsl" << endl;
load_from_file(sdVectorKeywords, "cBiK_"+datasetName+"_keywords.sdsl");
cout << "\t NUMERO ELEMENTOS KEYWORDS >> " << sdVectorKeywords.size() << endl;
/**************************************************************************/
/********************** CARGAR BITMAP SUMMARY ***************************/
/**************************************************************************/
cout << "CARGANDO BITMAP SUMMARY >> " << "cBiK_"+datasetName+"_summary.sdsl" << endl;
load_from_file(sdVectorResume, "cBiK_"+datasetName+"_summary.sdsl");
cout << "\t NUMERO ELEMENTOS SUMMARY >> " << sdVectorResume.size() << endl;
}
void Build_cBiK::export_cBiK() {
/**************************************************************************/
/********************** EXPORTAR HASHMAP(KEY,ID) **************************/
/**************************************************************************/
cout << "EXPORTANDO HASHMAP >> " << "cBiK_"+datasetName+"_hashmap" << endl;
ofstream archivoHM("cBiK_"+datasetName+"_hashmap");
for (auto& x: hashMapKeys) {
archivoHM << x.first << " " << x.second << endl;
}
archivoHM.close();
/**************************************************************************/
/************************** EXPORTAR PUNTOS *******************************/
/**************************************************************************/
cout << "EXPORTANDO PUNTOS >> " << "cBiK_"+datasetName+"_points" << endl;
ofstream archivoP("cBiK_"+datasetName+"_points");
for(int i=0; i<nodosKdTree.size(); i++) {
archivoP << setprecision(16) << nodosKdTree[i].coordenadas[0] << " " << nodosKdTree[i].coordenadas[1] << endl;
}
archivoP.close();
/**************************************************************************/
/********************** EXPORTAR BITMAP MAPA ******************************/
/**************************************************************************/
cout << "EXPORTANDO BITMAP MAPA >> " << "cBiK_"+datasetName+"_map.sdsl" << endl;
store_to_file(mapa, "cBiK_"+datasetName+"_map.sdsl");
/**************************************************************************/
/********************** EXPORTAR BITMAP KEYWORDS **************************/
/**************************************************************************/
cout << "EXPORTANDO BITMAP KEYWORDS >> " << "cBiK_"+datasetName+"_keywords.sdsl" << endl;
store_to_file(sdVectorKeywords, "cBiK_"+datasetName+"_keywords.sdsl");
/**************************************************************************/
/********************** EXPORTAR BITMAP SUMMARY ***************************/
/**************************************************************************/
cout << "EXPORTANDO BITMAP SUMMARY >> " << "cBiK_"+datasetName+"_summary.sdsl" << endl;
store_to_file(sdVectorResume, "cBiK_"+datasetName+"_summary.sdsl");
}
void Build_cBiK::loadDataset() {
long long int totalObjects = 0;
cout << "LEYENDO DATASET >> " << datasetName << endl;
/**************************************************************************/
/*************************** CUENTA OBJETOS *******************************/
/**************************************************************************/
ifstream lecturaPalabras;
lecturaPalabras.open(datasetName,ios::in);
//Se lee cada linea del archivo de texto
for(string linea; getline(lecturaPalabras, linea); ) {
totalObjects++;
}
lecturaPalabras.close();
cout << "Total de objetos >> " << totalObjects << endl;
/**************************************************************************/
/*************************** CARGAR OBJETOS *******************************/
/**************************************************************************/
lecturaPalabras.open(datasetName,ios::in);
//arreglo para guardar los puntos
double **coordinates;
coordinates = new double *[totalObjects];
for(int i = 0; i<totalObjects; i++) {
coordinates[i] = new double[2];//dos dimensiones
}
//arreglo para guardar las palabras claves
vector< vector<int> > indexKeys;
//HASHING PALABRA-ID
int contToken=0;
int i=0;
int idKey = 0;
vector<int> arregloClaves;
std::unordered_map<std::string,int>::const_iterator got;
//Se lee cada linea del archivo de texto
for(string linea; getline(lecturaPalabras, linea); ) {
stringstream registro(linea);
contToken = 0;
arregloClaves.clear();
//Lee cada elemento de la linea
for (string dato; getline(registro, dato, ' '); ) {
if(contToken == 2) {
//LATITUD
coordinates[i][0] = stod(dato);
}else if(contToken == 3) {
//LONGITUD
coordinates[i][1] = stod(dato);
}else if(contToken > 3){
//PALABRAS CLAVES
string laPalabra = toLower(dato);
got = hashMapKeys.find(laPalabra);
if(got == hashMapKeys.end()) {
//No existe la palabra
hashMapKeys[laPalabra] = idKey;
arregloClaves.push_back(idKey);
idKey++;
}else {
//Ya existe, se agrega al arreglo
arregloClaves.push_back(got->second);
}
}
contToken++;
}
indexKeys.push_back(arregloClaves);
i++;
}
lecturaPalabras.close();
/**************************************************************************/
/*************************** VECTOR nodosSKQ*******************************/
/**************************************************************************/
cout << "TOTAL PALABRAS:" << hashMapKeys.size() << endl;
this->numKeys = hashMapKeys.size();
//Se guardan las coordenadas y las palabras en un objeto
this->nodosSKQ.clear();
this->nodosSKQ.resize(totalObjects);
for (int i = 0; i < this->nodosSKQ.size(); i++) {
this->nodosSKQ[i] = NodeSKQ(&(coordinates[i][0]), (indexKeys[i]));
}
}
void Build_cBiK::create_cBiK(){
cout << "\nCREANDO INDICE cBiK >> " << endl;
struct timeval iniTime2, endTime2;
double secs;
//Marco el tiempo de inicio
gettimeofday(&iniTime2, NULL);
vector<NodeSKQ>& coordinates = this->nodosSKQ;
const long numDimensions = 2;//numero de dimensiones
//Inicializa y ordena la referencia del arreglo
vector< vector<NodeSKQ> > references(numDimensions, vector<NodeSKQ>(coordinates.size()));
vector<NodeSKQ> temporary(coordinates.size());
for (long i = 0; i < references.size(); i++) {
initializeReference(coordinates, references.at(i));
mergeSort(references[i], temporary, 0, references[i].size() - 1, i, numDimensions);
}
//Elimine las referencias a coordenadas duplicadas mediante una pasada a través de cada matriz de referencia.
vector<long> end(references.size());
for (long i = 0; i < end.size(); i++) {
end[i] = removeDuplicates(references[i], i, numDimensions);
}
//Verifique que se elimino la misma cantidad de referencias de cada matriz de referencia.
for (long i = 0; i < end.size() - 1; i++) {
for (long j = i + 1; j < end.size(); j++) {
if (end.at(i) != end.at(j)) {
cout << "reference removal error" << endl;
exit(1);
}
}
}
//Se crea el arreglo final
long long int totalSize = (end.at(0) + 1);
nodosKdTree.resize(totalSize);
//Se crea el mapa del doble de la cantidad de elementos
//La mitad es para el mapeo de nodos internos y hojas, la otra mitad es para indicar los nodos con resumenes
mapa = bit_vector(totalSize*2,0);
cout << "\t...CONSTRUYENDO iKD-TREE >> " << endl;
//Crea el KD-Tree por Medianas y marca el mapa
buildKdTree(references, temporary, 0, end.at(0), numDimensions, 0);
//Crea el constructor del arreglo compacto de palabras claves
sd_vector_builder localCompactKeywords((totalSize*numKeys), vectorKeys.size());
//Se copian las palabras claves al arreglo compacto
for (set<long long int>::iterator it=vectorKeys.begin(); it!=vectorKeys.end(); ++it) {
localCompactKeywords.set(*it);
}
//Se limpia el contenido de vectorKeys para ahorrar memoria
vectorKeys.clear();
//Crea el arreglo compacto de palabras claves
sd_vector<> sdKeywords(localCompactKeywords);
sdVectorKeywords = sdKeywords;
//Se crea el rank del bitmap del mapa
mapRank_1 = rank_support_v<1> (&mapa);
long long int nodosInterno = mapRank_1(mapa.size()*0.5);
cout << "\t...CONSTRUYENDO SUMMARY >> " << endl;
//Se cuentan los bits resumenes encendido para crear el arreglo
set<int> totalBitsResumen = generateResume(0, totalSize-1);
totalBitsResumen.clear();
cout << "\t...CONSTRUYENDO COMPACT SUMMARY >> " << endl;
//cout << "TOTAL DE UNOS:" << vectorResumeFinal.size() << endl;
long long int nodosInternosResumen = mapRank_1(mapa.size());
//Crea el arreglo compacto de resumenes
sd_vector_builder localCompactResume(((nodosInternosResumen-nodosInterno)*2*numKeys), vectorResumeFinal.size());
//Copia los bits del arreglo compacto
int i = 0;
for (set<long long int>::iterator it=vectorResumeFinal.begin(); it!=vectorResumeFinal.end(); ++it) {
localCompactResume.set(*it);
}
//Se limpia el contenido de vectorResumeFinal para ahorrar memoria
vectorResumeFinal.clear();
sd_vector<> sdResume(localCompactResume);
sdVectorResume = sdResume;
//marco el tiempo final
gettimeofday(&endTime2, NULL);
secs = timeval_diff(&endTime2, &iniTime2);
printf("Tiempo: %.16g microseg -- %.16g miliseg -- %.16g seg\n", secs * 1000000.0, secs * 1000.0, secs);
cout << endl;
// Verify the k-d tree and report the number of KdNodes.
/*long numberOfNodes = root->verifyKdTree(numDimensions, 0);
cout << endl << "Number of nodes = " << numberOfNodes << endl;*/
}
/*
* Initialize a reference array by creating references into the coordinates array.
*
* calling parameters:
*
* coordinates - a vector<NodeSKQ> of pointers to each of the (x, y, z, w...) tuples
* reference - a vector<NodeSKQ> that represents one of the reference arrays
*/
void Build_cBiK::initializeReference(vector<NodeSKQ>& coordinates, vector<NodeSKQ>& reference) {
for (long j = 0; j < coordinates.size(); j++) {
reference.at(j) = coordinates.at(j);
}
}
/*
* The superKeyCompare method compares two double arrays in all k dimensions,
* and uses the sorting or partition coordinate as the most significant dimension.
*
* calling parameters:
*
* a - a double*
* b - a double*
* p - the most significant dimension
* dim - the number of dimensions
*
* returns: a double result of comparing two double arrays
*/
double Build_cBiK::superKeyCompare(const double *a, const double *b, const long p, const long dim) {
double diff = 0;
for (long i = 0; i < dim; i++) {
long r = i + p;
// A fast alternative to the modulus operator for (i + p) < 2 * dim.
r = (r < dim) ? r : r - dim;
diff = a[r] - b[r];
if (diff != 0) {
break;
}
}
return diff;
}
/*
* The mergeSort function recursively subdivides the array to be sorted
* then merges the elements. Adapted from <NAME>'s "Algorithms
* in C++" p. 166. Addison-Wesley, Reading, MA, 1992.
*
* calling parameters:
*
* reference - a vector<NodeSKQ> that represents the reference array to sort
* temporary - a temporary array into which to copy intermediate results;
* this array must be as large as the reference array
* low - the start index of the region of the reference array to sort
* high - the high index of the region of the reference array to sort
* p - the sorting partition (x, y, z, w...)
* dim - the number of dimensions
* depth - the depth of subdivision
*/
void Build_cBiK::mergeSort(vector<NodeSKQ> &reference, vector<NodeSKQ>& temporary, const long low, const long high, const long p, const long dim) {
long i, j, k;
if (high > low) {
// Evite el desbordamiento al calcular la mediana.
const long mid = low + ((high - low) >> 1);
// Recursivamente subdivide las mitades inferior y superior de la matriz.
mergeSort(reference, temporary, low, mid, p, dim);
mergeSort(reference, temporary, mid + 1, high, p, dim);
// Combina los resultados para este nivel de subdivisión.
for (i = mid + 1; i > low; i--) {
temporary[i - 1] = reference[i - 1];
}
for (j = mid; j < high; j++) {
temporary[mid + (high - j)] = reference[j + 1]; // Evite el desbordamiento de direcciones.
}
for (k = low; k <= high; k++) {
if(superKeyCompare(temporary[i].coordenadas, temporary[j].coordenadas, p, dim) < 0) {
reference[k] = temporary[i++];
}else {
reference[k] = temporary[j--];
}
}
}
}
/*
* Check the validity of the merge sort and remove duplicates from a reference array.
*
* calling parameters:
*
* reference - a vector<NodeSKQ> that represents one of the reference arrays
* i - the leading dimension for the super key
* dim - the number of dimensions
*
* returns: the end index of the reference array following removal of duplicate elements
*/
long Build_cBiK::removeDuplicates(vector<NodeSKQ>& reference, const long i, const long dim) {
long end = 0;
for (long j = 1; j < reference.size(); j++) {
double compare = superKeyCompare(reference[j].coordenadas, reference[j - 1].coordenadas, i, dim);
if (compare < 0) {
cout << "merge sort failure: superKeyCompare(ref[" << j << "], ref["
<< j - 1 << "], (" << i << ") = " << compare << endl;
exit(1);
} else if (compare > 0) {
reference[++end] = reference[j];
}
}
return end;
}
/*
* This function builds a k-d tree by recursively partitioning the
* reference arrays and adding kdNodes to the tree. These arrays
* are permuted cyclically for successive levels of the tree in
* order that sorting occur on x, y, z, w...
*
* calling parameters:
*
* references - a vector< vector<NodeSKQ> > of pointers to each of the (x, y, z, w...) tuples
* temporary - a vector<NodeSKQ> that is used as a temporary array
* start - start element of the reference arrays
* end - end element of the reference arrays
* dim - the number of dimensions
* depth - the depth in the tree
*
* returns: a KdNode pointer to the root of the k-d tree
*/
void Build_cBiK::buildKdTree(vector< vector<NodeSKQ> >& references, vector<NodeSKQ>& temporary, const long start, const long end, const long dim, const long depth) {
// The axis permutes as x, y, z, w... and addresses the referenced data.
long axis = depth % dim;
if (end == start) {
//Se agrega la key al punto, siempre es una hoja
nodosKdTree[end] = references[0][end];
//Se copian las palabras claves segun la posicion del futuro arreglo de bits
for(int i=0; i<nodosKdTree[end].palabras.size(); i++) {
vectorKeys.insert((end*numKeys)+nodosKdTree[end].palabras[i]);
}
//cout << "Start: " << start << endl;
} else if (end == start + 1) {
// Two references were passed to this function in sorted order, so store the start
// element at this level of the tree and store the end element as the > child.
nodosKdTree[start] = references[0][start];
nodosKdTree[end] = references[0][end];
//Marco los nodos internos en el mapa
mapa[start] = 1;
//Se copian las palabras claves segun la posicion del futuro arreglo de bits
for(int i=0; i<nodosKdTree[start].palabras.size(); i++) {
vectorKeys.insert((start*numKeys)+nodosKdTree[start].palabras[i]);
}
for(int i=0; i<nodosKdTree[end].palabras.size(); i++) {
vectorKeys.insert((end*numKeys)+nodosKdTree[end].palabras[i]);
}
//cout << "Start: " << start << " End: " << end << endl;
} else if (end == start + 2) {
// Three references were passed to this function in sorted order, so
// store the median element at this level of the tree, store the start
// element as the < child and store the end element as the > child.
nodosKdTree[start + 1] = references[0][start + 1];
nodosKdTree[start] = references[0][start];
nodosKdTree[end] = references[0][end];
//Marco los nodos internos en el mapa
mapa[start + 1] = 1;
//Se copian las palabras claves segun la posicion del futuro arreglo de bits
for(int i=0; i<nodosKdTree[(start + 1)].palabras.size(); i++) {
vectorKeys.insert(((start + 1)*numKeys)+nodosKdTree[(start + 1)].palabras[i]);
}
for(int i=0; i<nodosKdTree[start].palabras.size(); i++) {
vectorKeys.insert((start*numKeys)+nodosKdTree[start].palabras[i]);
}
for(int i=0; i<nodosKdTree[end].palabras.size(); i++) {
vectorKeys.insert((end*numKeys)+nodosKdTree[end].palabras[i]);
}
//cout << "Start: " << start << " Start + 1: " << (start+1) << " End: " << end << endl;
} else if (end > start + 2) {
// More than three references were passed to this function, so
// the median element of references[0] is chosen as the tuple about
// which the other reference arrays will be partitioned. Avoid
// overflow when computing the median.
const long median = start + ((end - start) / 2);
//Se guardan los puntos de la mediana del arreglo
nodosKdTree[median] = references[0][median];
//Marco los nodos internos en el mapa
mapa[median] = 1;
//Marco los nodos internos que tienen resumenes explicitos sumando la cantidad de elementos
mapa[median+nodosKdTree.size()] = 1;
//Se copian las palabras claves segun la posicion del futuro arreglo de bits
for(int i=0; i<nodosKdTree[median].palabras.size(); i++) {
vectorKeys.insert((median*numKeys)+nodosKdTree[median].palabras[i]);
}
//cout << "Median: " << median << endl;
// Copy references[0] to the temporary array before partitioning.
for (long i = start; i <= end; i++) {
temporary[i] = references[0][i];
}
// Process each of the other reference arrays in a priori sorted order
// and partition it by comparing super keys. Store the result from
// references[i] in references[i-1], thus permuting the reference
// arrays. Skip the element of references[i] that that references
// a point that equals the point that is stored in the new k-d node.
long lower, upper, lowerSave, upperSave;
for (long i = 1; i < dim; i++) {
// Process one reference array. Compare once only.
lower = start - 1;
upper = median;
for (long j = start; j <= end; j++) {
double compare = superKeyCompare(references[i][j].coordenadas, nodosKdTree[median].coordenadas, axis, dim);
if (compare < 0) {
references[i - 1][++lower] = references[i][j];
} else if (compare > 0) {
references[i - 1][++upper] = references[i][j];
}
}
// Check the new indices for the reference array.
if (lower < start || lower >= median) {
cout << "incorrect range for lower at depth = " << depth << " : start = "
<< start << " lower = " << lower << " median = " << median << endl;
exit(1);
}
if (upper <= median || upper > end) {
cout << "incorrect range for upper at depth = " << depth << " : median = "
<< median << " upper = " << upper << " end = " << end << endl;
exit(1);
}
if (i > 1 && lower != lowerSave) {
cout << " lower = " << lower << " != lowerSave = " << lowerSave << endl;
exit(1);
}
if (i > 1 && upper != upperSave) {
cout << " upper = " << upper << " != upperSave = " << upperSave << endl;
exit(1);
}
lowerSave = lower;
upperSave = upper;
}
// Copy the temporary array to references[dim-1] to finish permutation.
for (long i = start; i <= end; i++) {
references[dim - 1][i] = temporary[i];
}
// Recursively build the < branch of the tree.
buildKdTree(references, temporary, start, lower, dim, depth + 1);
// Recursively build the > branch of the tree.
buildKdTree(references, temporary, median + 1, upper, dim, depth + 1);
} else if (end < start) {
// This is an illegal condition that should never occur, so test for it last.
cout << "error has occurred at depth = " << depth << " : end = " << end
<< " < start = " << start << endl;
exit(1);
}
}
long long int Build_cBiK::getNewSummaryIndex(int i) {
long long int nodosInternos = mapRank_1(mapa.size()*0.5);
long long int posResumen = mapRank_1(i+nodosKdTree.size());
return 2*numKeys*(posResumen-nodosInternos);
}
set<int> Build_cBiK::generateResume(int start, int end) {
set<int> vectorRetorno;
if (end == start) {
//Se agrega la key al punto, siempre es una hoja
for(int i=0; i<nodosKdTree[end].palabras.size(); i++) {
vectorRetorno.insert(nodosKdTree[end].palabras[i]);
}
} else if (end == start + 1) {
//Padre start
//HD end
//variable pos tiene la posicion del bit de inicio del correspondiente resumen
for(int i=0; i<nodosKdTree[end].palabras.size(); i++) {
vectorRetorno.insert(nodosKdTree[end].palabras[i]);
}
} else if (end == start + 2) {
//Padre start + 1
//HI start
//HD end
for(int i=0; i<nodosKdTree[start].palabras.size(); i++) {
vectorRetorno.insert(nodosKdTree[start].palabras[i]);
}
for(int i=0; i<nodosKdTree[end].palabras.size(); i++) {
vectorRetorno.insert(nodosKdTree[end].palabras[i]);
}
} else if (end > start + 2) {
// Padre median
const long median = start + ((end - start) / 2);
long long int pos = getNewSummaryIndex(median);
// Recursively build the < branch of the tree.
set<int> resumenI = generateResume(start, median-1);
//Copio el hijo izquierdo
int hijoI = (start+median-1) * 0.5;
for(int i=0; i<nodosKdTree[hijoI].palabras.size(); i++) {
resumenI.insert(nodosKdTree[hijoI].palabras[i]);
}
//Agrego al vector de resumenes las posiciones del resumen izquierdo
for (set<int>::iterator it=resumenI.begin(); it!=resumenI.end(); ++it){
vectorRetorno.insert(*it);
vectorResumeFinal.insert(pos+*it);
}
resumenI.clear();
// Recursively build the > branch of the tree.
set<int> resumenD = generateResume(median + 1, end);
//Copio el hijo derecho
int hijoD = (median + 1 + end) * 0.5;
for(int i=0; i<nodosKdTree[hijoD].palabras.size(); i++) {
resumenD.insert(nodosKdTree[hijoD].palabras[i]);
}
//Agrego al vector de resumenes las posiciones del resumen derecho
for (set<int>::iterator it=resumenD.begin(); it!=resumenD.end(); ++it){
vectorRetorno.insert(*it);
vectorResumeFinal.insert(pos+numKeys+*it);
}
resumenD.clear();
} else if (end < start) {
// This is an illegal condition that should never occur, so test for it last.
cout << "error has occurred at : end = " << end << " < start = " << start << endl;
exit(1);
}
return vectorRetorno;
}
double Build_cBiK::searchEuclideanDistance(const double* p1, const double* p2) {
//calcula la distancia para dos dimensiones
return (p2[0] - p1[0])*(p2[0] - p1[0]) + (p2[1] - p1[1])*(p2[1] - p1[1]);
}
bool Build_cBiK::checkKeywords(vector<int> query, int pos) {
bool resp = true;
int i=0;
long long int posInicial = pos*numKeys;
while(i<query.size() && resp){
if(sdVectorKeywords[(posInicial+query[i])] == 0) {
resp = false;
}
i++;
}
return resp;
}
bool Build_cBiK::checkSummaryLeft(vector<int> query, int pos, int start, int end) {
bool resp = true;
if(mapa[pos] == 0) {
//Es una hoja
resp = false;
}else {
//Es un nodo interno, se revisa si es un un nodo interno final
if (end == start) {
//Siempre es una hoja, no tiene resumen
resp = false;
} else if (end == start + 1) {
//NI start
//HD end
//No tiene resumen izquierdo
resp = false;
} else if (end == start + 2) {
//NI start + 1
//HI start
//HD end
//El resumen izquierdo corresponde a la keyword del HI
int i=0;
long long int posInicial = start*numKeys;
while(i<query.size() && resp){
if(sdVectorKeywords[(posInicial+query[i])] == 0) {
resp = false;
}
i++;
}
} else if (end > start + 2) {
//NI pos
//El resumen izquierdo corresponde al summary de pos
int i=0;
long long int posInicial = getNewSummaryIndex(pos);
while(i<query.size() && resp){
if(sdVectorResume[(posInicial+query[i])] == 0) {
resp = false;
}
i++;
}
} else if (end < start) {
// This is an illegal condition that should never occur, so test for it last.
cout << "error has occurred at : end = " << end << " < start = " << start << endl;
exit(1);
}
}
return resp;
}
bool Build_cBiK::checkSummaryRight(vector<int> query, int pos, int start, int end) {
bool resp = true;
if(mapa[pos] == 0) {
//Es una hoja
resp = false;
}else {
//Es un nodo interno, se revisa si es un un nodo interno final
if (end == start) {
//Siempre es una hoja, no tiene resumen
resp = false;
} else if (end == start + 1) {
//NI start
//HD end
//El resumen derecho corresponde a la keyword del HD
int i=0;
long long int posInicial = end*numKeys;
while(i<query.size() && resp){
if(sdVectorKeywords[(posInicial+query[i])] == 0) {
resp = false;
}
i++;
}
} else if (end == start + 2) {
//NI start + 1
//HI start
//HD end
//El resumen derecho corresponde a la keyword del HD
int i=0;
long long int posInicial = end*numKeys;
while(i<query.size() && resp){
if(sdVectorKeywords[(posInicial+query[i])] == 0) {
resp = false;
}
i++;
}
} else if (end > start + 2) {
//NI pos
//El resumen derecho corresponde al summary de pos
int i=0;
long long int posInicial = getNewSummaryIndex(pos);
while(i<query.size() && resp){
if(sdVectorResume[(posInicial+numKeys+query[i])] == 0) {
resp = false;
}
i++;
}
} else if (end < start) {
// This is an illegal condition that should never occur, so test for it last.
cout << "error has occurred at : end = " << end << " < start = " << start << endl;
exit(1);
}
}
return resp;
}
double Build_cBiK::timeval_diff(struct timeval *a, struct timeval *b){
return
(double)(a->tv_sec + (double)a->tv_usec/1000000) -
(double)(b->tv_sec + (double)b->tv_usec/1000000);
}
void Build_cBiK::printMapa() {
//printTree();
cout << endl;
cout << "******************************* MAPA *******************************" << endl;
cout << "Largo mapa:" << mapa.size()*0.5 << endl;
cout<< "Mapa size original: " << size_in_bytes(mapa)<< endl;
//cout << mapa << endl;
cout << "***************************** KEYWORDS *****************************" << endl;
cout << "Largo:" << sdVectorKeywords.size() << endl;
cout<< "[keywords size]: " << size_in_bytes(sdVectorKeywords)<< endl;
/*for(int i=0; i<mapa.size()*0.5; i++) {
for(int j=0; j<numKeys; j++) {
cout << sdVectorKeywords[(i*numKeys)+j];
}
cout << endl;
}*/
//cout << sdVectorKeywords << endl;
cout << endl;
cout << "***************************** RESUMENES ****************************" << endl;
cout << "Largo:" << sdVectorResume.size() << endl;
cout<< "[Resume size]: " << size_in_bytes(sdVectorResume)<< endl;
/*long long int nodosInterno = mapRank_1(mapa.size()*0.5);
long long int nodosInternosResumen = mapRank_1(mapa.size());
for(int i=0; i<(nodosInternosResumen-nodosInterno); i++) {
for(int j=0; j<numKeys; j++) {
cout << sdVectorResume[(i*2*numKeys)+j];
}
cout << " - ";
for(int j=0; j<numKeys; j++) {
cout << sdVectorResume[(i*2*numKeys)+j+numKeys];
}
cout << endl;
}*/
//cout << sdVectorResume << endl;
cout << endl;
}
void Build_cBiK::printVector() {
/*cout << endl;
cout << "****************************** VECTOR ******************************" << endl;
for (int i = 0; i < nodosKdTree.size(); i++) {
cout << i << ".- ";
printTuple(nodosKdTree[i].coordenadas);
cout << " - ";
//Imprime las keys propias del punto
cout << *nodosKdTree[i].palabras << endl;
}
cout << "********************************************************************" << endl;
cout << endl;*/
}
void Build_cBiK::printTree() {
cout << endl;
cout << "************************** ARBOL KD-TREE ***************************" << endl;
printKdTree(0, (nodosKdTree.size() - 1), 0);
cout << "********************************************************************" << endl;
cout << endl;
}
void Build_cBiK::printKdTree(int start, int end, int depth) {
int mid = (start + end) / 2;
if (end > mid) {
printKdTree(mid + 1, end, depth + 1);
}
for (int i = 0; i < depth; i++) {
cout << " ";
}
cout << start << "," << end << " Pos:" << mid << " == ";
printTuple(nodosKdTree[mid].coordenadas);
cout << " == ";
int pos = numKeys*mid;
for(int i=0; i<numKeys; i++) {
if(sdVectorKeywords[i+pos] == 1) {
cout << "1";
}else {
cout << "0";
}
}
/*for(int i=0; i<numKeys ; i++) {
bool existe = false;
for(int j=0; j<nodosKdTree[mid].palabras.size(); j++) {
//cout << "Revisa: " << nodosKdTree[mid].palabras[j] << " - " << i << endl;
if(nodosKdTree[mid].palabras[j] == i) {
existe = true;
}
}
if(existe) {
cout << "1";
}else {
cout << "0";
}
}*/
cout << endl;
if (start < mid) {
printKdTree(start, mid - 1, depth + 1);
}
}
void Build_cBiK::printTuple(double* tuple) {
printf("%.16g, %.16g", tuple[0], tuple[1]);
//cout << "(" << tuple[0] << "," << tuple[1] << ")";
}
void Build_cBiK::printKeys(vector<bit_vector> arrBits) {
for(int i=0; i<arrBits.size(); i++) {
cout << "(" << arrBits[i] << ")";
}
}
void Build_cBiK::printKey(bit_vector arrBits) {
cout << "(" << arrBits << ")";
}
double Build_cBiK::rangeQuery(const double* query, const vector<std::string> &queryKey) {
//struct timeval iniTime2, endTime2;
//double secs;
//Atributos basicos para la busqueda
int start = 0;
int end = nodosKdTree.size() - 1;
int depth = 0;
//Indice de los keys
vector<int> indexQueryKey;
string str;
//Variable donde se guardan los indices a los puntos que estan dentro de la region
vector<int> result;
//gettimeofday(&iniTime2, NULL);
//Se buscan los indices de las palabras claves recibidas
for(int i=0; i<queryKey.size(); i++) {
//printTuple(query);
str = queryKey[i];
str = toLower(str);
indexQueryKey.push_back(hashMapKeys[str]);
//cout << str << " >> " << hashMapKeys[str] << endl;
}
//Se ordenan los puntos del rectangulo
double minPoint[2];
double maxPoint[2];
//query[0] = latitud p1
//query[1] = longitud p1
//query[2] = latitud p2
//query[3] = longitud p2
minPoint[0] = (query[0] < query[2]) ? query[0] : query[2];
maxPoint[0] = (query[0] > query[2]) ? query[0] : query[2];
minPoint[1] = (query[1] < query[3]) ? query[1] : query[3];
maxPoint[1] = (query[1] > query[3]) ? query[1] : query[3];
//Se realiza la busqueda por rango
rangeSearchQuery(minPoint, maxPoint, indexQueryKey, start, end, depth, result);
//gettimeofday(&endTime2, NULL);
/*if(result.empty()) {
cout << "\nNo hay puntos dentro del rango dado." << endl;
contFailRS++;
}else {
for(int i=0; i<result.size(); i++) {
printTuple(nodosKdTree[result[i]].coordenadas);
cout << endl;
}
}*/
//cout << endl;
//secs = timeval_diff(&endTime2, &iniTime2);
return 0;//secs * 1000.0;//
}
void Build_cBiK::rangeSearchQuery(const double* query1, const double* query2, vector<int> queryKey, int start, int end, int profundidad, vector<int> &result) {
//Determina el eje x - y dada la profundidad
const int p = profundidad % 2;//Debido a las dos dimensiones
int mid = (start + end) * 0.5;
//Revisa si el punto esta contenido en el rectangulo
if( (query1[0] <= nodosKdTree[mid].coordenadas[0] && nodosKdTree[mid].coordenadas[0] <= query2[0]) &&
(query1[1] <= nodosKdTree[mid].coordenadas[1] && nodosKdTree[mid].coordenadas[1] <= query2[1])){
//Revisa si posee las palabras claves
if(checkKeywords(queryKey, mid)) {
result.push_back(mid);
}
}
//cout << "Punto visitado: ";
//printTuple(nodosKdTree[mid].coordenadas);
//cout << endl;
//Si es distinto es porque es un nodo interno
if(start != end) {
//Me aseguro de obtener los valores menor y mayor correctos
double menor;
double mayor;
if(query1[p] > query2[p]) {
mayor = query1[p];
menor = query2[p];
}else {
mayor = query2[p];
menor = query1[p];
}
if(mayor <= nodosKdTree[mid].coordenadas[p]) {
//Revisa si existe la palabra clave por la izquierda
if(checkSummaryLeft(queryKey, mid, start, end)) {
rangeSearchQuery(query1, query2, queryKey, start, mid-1, profundidad + 1, result);
}
}else if(menor > nodosKdTree[mid].coordenadas[p]) {
//Revisa si existe la palabra clave por la derecha
if(checkSummaryRight(queryKey, mid, start, end)) {
rangeSearchQuery(query1, query2, queryKey, mid+1, end, profundidad + 1, result);
}
}else {
//Se revisan ambos lados
if(checkSummaryLeft(queryKey, mid, start, end)) {
rangeSearchQuery(query1, query2, queryKey, start, mid-1, profundidad + 1, result);
}
if(checkSummaryRight(queryKey, mid, start, end)) {
rangeSearchQuery(query1, query2, queryKey, mid+1, end, profundidad + 1, result);
}
}
}
}
double Build_cBiK::findBKNN(const double* query, const vector<std::string> &queryKey, int k) {
//cout << "**************************** KNN KDTREE ****************************" << endl;
struct timeval iniTime2, endTime2;
double secs;
//Atributos basicos para la busqueda
int start = 0;
int end = nodosKdTree.size() - 1;
int depth = 0;
string str;
int totalPuntos = nodosKdTree.size();
/*long long int posInicial = 187361*numKeys;
for(int i=0; i<numKeys; i++) {
if(sdVectorKeywords[posInicial+i] == 1){
cout << "indice: " << i << endl;
}
}*/
//cout << "ESPECIAL: " << mapa[187361] << endl;
//Indice de los keys
vector<int> indexQueryKey;
if (k < 1) {
cout << "k debe ser mayor o igual a 1" << endl;
}else if (k > totalPuntos) {
cout << "k debe ser menor o igual al total de puntos (" << totalPuntos << ")" << endl;
}else{
MaxHeap heap = MaxHeap(k);
gettimeofday(&iniTime2, NULL);
for(int i=0; i<queryKey.size(); i++) {
//printTuple(query);
str = queryKey[i];
str = toLower(str);
indexQueryKey.push_back(hashMapKeys[str]);
//cout << str << " >> " << hashMapKeys[str] << endl;
}
heap = searchBKNN(query, indexQueryKey, start, end, depth, heap, k);
/*gettimeofday(&endTime2, NULL);
secs = timeval_diff(&endTime2, &iniTime2);
//Muestra el tiempo por cada consulta
cout << secs * 1000.0 << endl;
//Se pide el arreglo (HEAP) para mostrar los puntos
double** elArreglo;
elArreglo = heap.getArray();
for (int i = 0; i < k; i++) {
if(elArreglo[i][1] == numeric_limits<double>::max()) {
cout << "\tK=" << (i+1) << " => No existe." << endl;
//contFailB++;
}else {
cout << "\tK=" << (i+1);
cout << " => Punto: ";
printTuple(nodosKdTree[(int)elArreglo[i][0]].coordenadas);
cout << " -- ";
//printKey(*nodosKdTree[(int)elArreglo[i][0]].palabras);
cout << " Distancia: " << elArreglo[i][1];
cout << endl;
}
}*/
}
//cout << "********************************************************************" << endl;
//cout << endl;
return secs * 1000.0;//retorna las milesimas
}
MaxHeap Build_cBiK::searchBKNN(const double* query, vector<int> queryKey, int start, int end, const int profundidad, MaxHeap &heap, int totalK) {
//Determina el eje x - y dada la profundidad
const int p = profundidad % 2;//Debido a las dos dimensiones
int mid = (start + end) * 0.5;
//cout << "Punto visitado: ";
//printTuple(nodosKdTree[mid].coordenadas);
//cout << endl;
//Es un nodo interno
if(start != end) {
if(query[p] <= nodosKdTree[mid].coordenadas[p]) {
//Busca por la izquierda si existen las palabras claves
//cout << "START: " << start << " == MID: " << mid << " == END: " << end << " == CHECK LEFT: " << checkSummaryLeft(queryKey, mid, start, end) << endl;
if(checkSummaryLeft(queryKey, mid, start, end)) {
//Actualiza el Heap
searchBKNN(query, queryKey, start, mid-1, profundidad + 1, heap, totalK);
//Busca si intersecta un subespacio mediante la circunferencia
if ((abs(nodosKdTree[mid].coordenadas[p] - query[p]) < heap.getDistanceMax()) || (heap.getCantidad() < totalK)) {
//Intersecta asi que busca coincidencia de las keys hacia la derecha
if(checkSummaryRight(queryKey, mid, start, end)) {
searchBKNN(query, queryKey, mid+1, end, profundidad + 1, heap, totalK);
}
}
}else {
//cout << "START: " << start << " == MID: " << mid << " == END: " << end << " == CHECK RIGHT: " << checkSummaryRight(queryKey, mid, start, end) << endl;
if(checkSummaryRight(queryKey, mid, start, end)) {
//Actualiza el Heap
searchBKNN(query, queryKey, mid+1, end, profundidad + 1, heap, totalK);
}
}
}
if(query[p] >= nodosKdTree[mid].coordenadas[p]) {
//Busca por la derecha si existen las palabras claves
//cout << "START: " << start << " == MID: " << mid << " == END: " << end << " == CHECK RIGHT: " << checkSummaryRight(queryKey, mid, start, end) << endl;
if(checkSummaryRight(queryKey, mid, start, end)) {
//Actualiza el Heap
searchBKNN(query, queryKey, mid+1, end, profundidad + 1, heap, totalK);
//Busca si intersecta un subespacio mediante la circunferencia
if ((abs(nodosKdTree[mid].coordenadas[p] - query[p]) < heap.getDistanceMax()) || (heap.getCantidad() < totalK)) {
//Intersecta asi que busca coincidencia de las keys hacia la izquierda
if(checkSummaryLeft(queryKey, mid, start, end)) {
//Actualiza el Heap
searchBKNN(query, queryKey, start, mid-1, profundidad + 1, heap, totalK);
}
}
}else {
//cout << "START: " << start << " == MID: " << mid << " == END: " << end << " == CHECK LEFT: " << checkSummaryLeft(queryKey, mid, start, end) << endl;
if(checkSummaryLeft(queryKey, mid, start, end)) {
//Actualiza el Heap
searchBKNN(query, queryKey, start, mid-1, profundidad + 1, heap, totalK);
}
}
}
}
//Revisa el punto
if ((searchEuclideanDistance(query, nodosKdTree[mid].coordenadas) < heap.getDistanceMax()) || (heap.getCantidad() < totalK)) {
if(checkKeywords(queryKey, mid)) {
heap.insert(mid, searchEuclideanDistance(query, nodosKdTree[mid].coordenadas));
}
}
return heap;
}
double Build_cBiK::findRKNN(const double* query, const vector<std::string> &queryKey, int k, double alpha) {
//cout << "**************************** KNN KDTREE ****************************" << endl;
//struct timeval iniTime2, endTime2;
double secs;
//Atributos basicos para la busqueda
int start = 0;
int end = nodosKdTree.size() - 1;
string str;
int totalPuntos = nodosKdTree.size();
//Indice de los keys
vector<int> indexQueryKey;
//Se valida k
if (k < 1) {
cout << "k debe ser mayor o igual a 1" << endl;
}else if (k > totalPuntos) {
cout << "k debe ser menor o igual al total de puntos (" << totalPuntos << ")" << endl;
}else{
//Se valida alfa
if(alpha < 0) {
cout << "El valor de alfa no puede ser negativo" << endl;
}else if(alpha > 1) {
cout << "El valor de alfa no puede ser mayor a 1" << endl;
}else {
MinHeap heap = MinHeap(k);
//gettimeofday(&iniTime2, NULL);
for(int i=0; i<queryKey.size(); i++) {
//printTuple(query);
str = queryKey[i];
str = toLower(str);
indexQueryKey.push_back(hashMapKeys[str]);
//cout << str << " >> " << hashMapKeys[str] << endl;
}
heap = searchRKNN(query, indexQueryKey, start, end, heap, k, alpha);
//gettimeofday(&endTime2, NULL);
//secs = timeval_diff(&endTime2, &iniTime2);
//Muestra el tiempo por cada consulta
/*cout << secs * 1000.0 << endl;
//Se pide el arreglo (HEAP) para mostrar los puntos
double** elArreglo;
elArreglo = heap.getArray();
for (int i = 0; i < k; i++) {
if(elArreglo[i][1] == numeric_limits<double>::min()) {
cout << "\tK=" << (i+1) << " => No existe." << endl;
//contFailR++;
}else {
cout << "\tK=" << (i+1);
cout << " => Punto: ";
printTuple(nodosKdTree[(int)elArreglo[i][0]].coordenadas);
cout << " -- ";
//printKey(*nodosKdTree[(int)elArreglo[i][0]].palabras);
cout << " Score: " << elArreglo[i][1];
cout << endl;
}
}*/
}
}
//cout << endl;
return secs * 1000.0;//retorna las milesimas
}
MinHeap Build_cBiK::searchRKNN(const double* query, vector<int> queryKey, int start, int end, MinHeap &heap, int totalK, double alpha) {
int mid = (start + end) * 0.5;
//Revisa el puntaje de clasificacion del punto
double spatialScore = getSpatialScore(query, mid);
double textualScore = getKeywordsTextualScore(queryKey, mid);
double totalScore = getTotalScore(spatialScore, textualScore, alpha);
//cout << "MID: " << mid << " ++ Punto visitado: ";
//printTuple(nodosKdTree[mid].coordenadas);
//cout << " >> SPATIALSCORE: " << spatialScore << " == TEXTUAL SCORE: " << textualScore << " == TOTAL SCORE: " << totalScore;
//cout << endl;
//Es un nodo interno
if(start != end) {
double *summaryTextualScores = getSummariesTextualScores(queryKey, mid, start, end);
double scoreLeft = getTotalScore(spatialScore, summaryTextualScores[0], alpha);
double scoreRight = getTotalScore(spatialScore, summaryTextualScores[1], alpha);
//cout << "SCORE LEFT: " << scoreLeft << " == SCORE RIGHT: " << scoreRight << endl;
if(scoreLeft > scoreRight) {//Cuando el score es mayor por la izquierda
//Ya que es mayor, me aseguro de que existen palabras claves y no es necesario hacer scoreLeft > 0
//Busco por la izquierda
searchRKNN(query, queryKey, start, mid-1, heap, totalK, alpha);
//Si no se tienen los k elementos, se busca por la derecha
//cout << "Cantidad >> " << heap.getCantidad() << endl;
if((scoreRight > heap.getMinScore()) || (scoreRight > 0 && (heap.getCantidad() < totalK))) {
searchRKNN(query, queryKey, mid+1, end, heap, totalK, alpha);
}
}else if(scoreRight > scoreLeft){//Cuando el score es mayor por la derecha o igual
//Busco por la derecha
searchRKNN(query, queryKey, mid+1, end, heap, totalK, alpha);
//Si no se tienen los k elementos, se busca por la izquierda
//cout << "Cantidad >> " << heap.getCantidad() << endl;
if((scoreLeft > heap.getMinScore()) || (scoreLeft > 0 && (heap.getCantidad() < totalK))) {
searchRKNN(query, queryKey, start, mid-1, heap, totalK, alpha);
}
}else {
//Son iguales, da lo mismo que lado elegir, por defecto derecha
//Ya que izquierda y derecha son iguales solo me aseguro de que sean distinto a cero
//Si son cero no es necesario buscar por ningun lado
if(scoreRight != 0) {
//Busco por la derecha
searchRKNN(query, queryKey, mid+1, end, heap, totalK, alpha);
//Como son iguales, necesito buscar por el otro lado ya que existe el mismo score
searchRKNN(query, queryKey, start, mid-1, heap, totalK, alpha);
}
}
}
if ((totalScore > heap.getMinScore()) || (heap.getCantidad() < totalK)) {
//Si es distinto a cero significa q existe al menos un termino buscado
if(totalScore != 0) {
heap.insert(mid, totalScore);
}
}
return heap;
}
double Build_cBiK::getTotalScore(double &spatialScore, double &textualScore, double &alpha) {
//Si no existen ninguna palabra clave se da puntaje cero, se desprecia el puntaje espacial
return textualScore == 0 ? 0 : ((alpha*spatialScore)+(1-alpha)*(textualScore));
}
double Build_cBiK::getSpatialScore(const double* query, int pos) {
//cout << setprecision(16) << "SPATIAL SCOOOOOOOOOOOOOOOOOOORE: " << (1 - (searchEuclideanDistance(query, nodosKdTree[pos].coordenadas)/dmax)) << endl;
return (1 - (searchEuclideanDistance(query, nodosKdTree[pos].coordenadas)/dmax));
}
double Build_cBiK::getKeywordsTextualScore(vector<int> queryKey, int pos) {
long long int posInicial = pos*numKeys;
//Relevancia textual
double scoreText = 0;
int i=0;
while(i<queryKey.size()){
if(sdVectorKeywords[(posInicial+queryKey[i])] == 1) {
scoreText += (((double) 1)/((double) queryKey.size()));
}
i++;
}
return scoreText;
}
double* Build_cBiK::getSummariesTextualScores(vector<int> queryKey, int pos, int start, int end) {
//orden del arreglo en puntaje izquierdo y derecho respectivamente
double* textualScores = new double[2];
textualScores[0] = 0;
textualScores[1] = 0;
//Es un nodo interno
if(mapa[pos] == 1) {
if (end == start + 1) {//Es un nodo interno implicito, se revisa en Keywords
//NI start
//HD end
//El resumen derecho corresponde a la keyword del HD, no tiene resumen izquierdo
long long int posInicial = end*numKeys;
int i=0;
while(i<queryKey.size()){
//Resumen derecho
if(sdVectorKeywords[(posInicial+queryKey[i])] == 1) {
textualScores[1] += (((double) 1)/((double) queryKey.size()));
}
i++;
}
} else if (end == start + 2) {//Es un nodo interno implicito, se revisa en Keywords
//NI start + 1
//HI start
//HD end
//El resumen izquierdo corresponde a la keyword del HI
long long int posInicialHI = start*numKeys;
//El resumen derecho corresponde a la keyword del HD
long long int posInicialHD = end*numKeys;
int i=0;
while(i<queryKey.size()){
//Resumen izquierdo
if(sdVectorKeywords[(posInicialHI+queryKey[i])] == 1) {
textualScores[0] += (((double) 1)/((double) queryKey.size()));
}
//Resumen derecho
if(sdVectorKeywords[(posInicialHD+queryKey[i])] == 1) {
textualScores[1] += (((double) 1)/((double) queryKey.size()));
}
i++;
}
}else if(end > start + 2) {//Es un nodo interno explicito
long long int posInicial = getNewSummaryIndex(pos);
int i=0;
while(i<queryKey.size()){
//Resumen izquierdo
if(sdVectorResume[(posInicial+queryKey[i])] == 1) {
textualScores[0] += (((double) 1)/((double) queryKey.size()));
}
//Resumen derecho
if(sdVectorResume[(posInicial+numKeys+queryKey[i])] == 1) {
textualScores[1] += (((double) 1)/((double) queryKey.size()));
}
i++;
}
}
}
return textualScores;
} | c8cb12434edcb2a5671a37ed555a41678c63103e | [
"Markdown",
"C++"
] | 8 | C++ | csanjuanc/cBiK | d7d4c3160b1184dc7c32bccebd2e4ed47e80e54c | 33fa79bcfdcd2ebc4d3f341ff2754e73c1f6b7f8 | |
refs/heads/master | <file_sep>var z = 9;
var myLL = L.latLng(43.4459, 2.8606);
function init() {
// create a map in the "map" div, set the view to a given place and zoom
var map = L.map('map', {
center: myLL,
zoom: z
});
//
// Map Layers
//
var esriLayer = L.tileLayer('http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, '+
'USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, '+
'and the GIS User Community'
}).addTo(map);
var stamenLayer = L.tileLayer('http://{s}.tile.stamen.com/toner-lite/{z}/{x}/{y}.png', {
attribution: 'Map tiles by <a href="http://stamen.com">Stamen Design</a>, <a href="http://creativecommons.org/licenses/by/3.0">CC BY 3.0</a> — Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>',
subdomains: 'abcd',
minZoom: 0,
maxZoom: 20
});
var osmLayer = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: 'OpenStreetMap'
});
var mapqLayer = L.tileLayer('http://{s}.mqcdn.com/tiles/1.0.0/osm/{z}/{x}/{y}.png', {
attribution: 'MapQuest OpenStreetMap',
subdomains: ['otile1', 'otile2', 'otile3', 'otile4']
});
//
// POIS Overlays
//
var overpassDynLayer = new OverpassDynLayer(map).getLayer();
overpassDynLayer.addTo(map);
var t4t35Layer = new T4T35Layer(map).getLayer();
t4t35Layer.addTo(map);
//
// Map Controls and Layers
//
var baseLayers = {
"Satellite": esriLayer,
"OSM": osmLayer,
"MapBox": mapqLayer,
"Basic": stamenLayer,
};
var overLays = {
"Pierres": overpassDynLayer,
"T4T35 Tarn": t4t35Layer,
};
// layers switcher
L.control.layers(
baseLayers,
overLays, {
collapsed: false,
}
).setPosition('topright').addTo(map);
// scale at bottom left
L.control.scale().addTo(map);
// rewrite url to show lat/lon/zoom
// (uses leaflet-hash plugin as submodule)
var hash = new L.Hash(map);
// search field to find place
// (use leaflet-geocoding plugin as submodule)
new L.Control.GeoSearch({
provider: new L.GeoSearch.Provider.OpenStreetMap(),
zoomLevel: 15,
}).addTo(map);
}
init();
<file_sep># pierres
Display dolmens, various standing stones, and other megaliths
## Install
This project has leaflet dependencies included as git
submodules.
The right way to install would be
```bash
git clone [email protected]:RichardHitier/pierres.git
cd pierres/
git submodule init
git submodule update
```
## DATA Sources
* OSM historic=archeological_site nodes
* overpass api
## Hints
* leaflet-layer-overpass plugin
## Features
## Next
* look for url key for link in info
* allow type filtering (display dolmen only for ex)
* add a disable clustering button
* anchor pin before zooming out or openstreetmap link
* import wikipedia poi's from Megalithic portal
* add T4T35 layer
* insert OSM editing poi link
* and look for OSM fixing tools (bad site_type, missing names, etc ...) ?
<file_sep>function OverpassDynLayer(map) {
var jsonMinZoom = 10;
var clusterMinZoom = 12;
var loader = L.DomUtil.get('loader');
function overpassPopup(data, marker) {
var divPopup = "";
if (data.tags.name) divPopup += "<b>name: </b>" + data.tags.name + "<br/>";
if (data.tags.site_type) divPopup += "<b>site_type: </b>" + data.tags.site_type + "<br/>";
if (data.tags.megalith_type) divPopup += "<b>megalith_type: </b>" + data.tags.megalith_type + "<br/>";
if (data.tags.source) divPopup += "<b>source: </b>" + toHref(data.tags.source) + "<br/>";
return divPopup;
}
function jsonLoading(e) {
// show loader
loader.style.display = 'block';
// Disable drag and zoom handlers.
map.dragging.disable();
map.touchZoom.disable();
map.doubleClickZoom.disable();
map.scrollWheelZoom.disable();
//
}
function jsonLoaded(e) {
loader.style.display = 'none';
// Enable drag and zoom handlers.
map.dragging.enable();
map.touchZoom.enable();
map.doubleClickZoom.enable();
map.scrollWheelZoom.enable();
}
function toHref(ref) {
var href = ref;
if (ref.indexOf("http") > -1) href = "<a href=\"" + ref + "\">lien</a>";
return href;
}
function overpassIcon(data, title) {
var imageBase = "";
switch (data.tags.site_type) {
case 'dolmen':
imageBase = 'dolmen';
break;
case "megalih":
case "meglith":
case 'megalith':
imageBase = 'menhir';
switch (data.tags.megalith_type) {
case 'dolmen':
case 'grosssteingrab':
imageBase = 'dolmen';
break;
case 'menhir':
imageBase = 'menhir'
break;
case 'alignment':
imageBase = 'alignment'
break;
case 'stone_circle':
imageBase = 'circle'
break;
case 'passage_grave':
case 'chamber':
default:
imageBase = 'ruine'
}
break;
case 'fortification':
imageBase = 'fortifications';
break;
case 'tumulus':
imageBase = 'tumulus';
break;
default:
imageBase = 'ruine';
break;
/*
"site_type": "petroglyph",
"site_type": "quarry"
"site_type": "villa",
*/
}
if (data.tags.source && data.tags.source.indexOf("t4t35") > -1) imageBase += '_red';
var iconUrl = 'images/' + imageBase + '.png';
return new L.Icon({
iconUrl: iconUrl,
iconSize: new L.Point(32, 37),
iconAnchor: new L.Point(18, 37),
popupAnchor: new L.Point(0, -37)
});
}
this._clusterLayer = new L.MarkerClusterGroup({
disableClusteringAtZoom: clusterMinZoom,
});
var jsonLayer = L.layerJSON({
url: 'http://overpass-api.de/api/interpreter?data=[out:json];node({lat1},{lon1},{lat2},{lon2})[historic=archaeological_site];out;',
propertyItems: 'elements',
propertyTitle: 'tags.name',
propertyLoc: ['lat', 'lon'],
buildIcon: overpassIcon,
buildPopup: overpassPopup,
minZoom: jsonMinZoom,
layerTarget: this._clusterLayer,
}).on('dataloading', jsonLoading)
.on('dataloaded', jsonLoaded).addTo(map);
this.getLayer = function (){
return this._clusterLayer;
}
}
| 0ee7177bed63bad1022ac2de9c7e6aeaf7b922ca | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | RichardHitier/pierres | b7175c27b51b3a90da812e2b9bd98d1a54d1691a | 6952c5cf99e8fadb8794d28143b656dc4647ce3a | |
refs/heads/master | <repo_name>wgtdkp/nvds<file_sep>/test/test_boost_asio.cc
#include <bits/stdc++.h>
#include <boost/asio.hpp>
using namespace boost::asio;
int main() {
io_service io_service;
ip::tcp::resolver resolver(io_service);
ip::tcp::resolver::query query("www.wgtdkp.com", "http");
auto iter = resolver.resolve(query);
decltype(iter) end;
while (iter != end) {
ip::tcp::endpoint ep = *iter++;
std::cout << ep << std::endl;
}
return 0;
}
<file_sep>/src/infiniband.h
#ifndef _NVDS_INFINIBAND_H_
#define _NVDS_INFINIBAND_H_
#include "common.h"
#include "spinlock.h"
#include "transport.h"
#include <cstring>
#include <mutex>
#include <infiniband/verbs.h>
#include <netinet/ip.h>
namespace nvds {
struct Request;
struct Response;
// Derived from RAMCloud Infiniband.h
class Infiniband {
public:
static const uint16_t kPort = 1;
static const uint32_t kMaxSendSge = 12;
static const uint32_t kMaxRecvSge = 12;
static const uint32_t kMaxInlineData = 128;
Infiniband(uint32_t registered_mem_size=1024);
~Infiniband();
// This class includes informations for constructing a connection
struct QueuePairInfo {
uint16_t lid; // Local id
uint32_t qpn; // Queue pair number
uint32_t psn; // Packet sequence number
uint32_t rkey; // Memory region key
uint64_t vaddr; // Virtual address of memory regions
// DEBUG
void Print() const;
};
struct QueuePair {
static const uint32_t kDefaultPsn = 33;
Infiniband& ib;
ibv_qp_type type;
ibv_pd* pd;
ibv_srq* srq;
ibv_qp* qp;
ibv_cq* scq;
ibv_cq* rcq;
uint32_t psn;
QueuePair(Infiniband& ib, ibv_qp_type type,
uint32_t max_send, uint32_t max_recv);
~QueuePair();
DISALLOW_COPY_AND_ASSIGN(QueuePair);
uint32_t GetLocalQPNum() const { return qp->qp_num; }
uint32_t GetPeerQPNum() const;
int GetState() const;
void Activate();
void SetStateRTR(const QueuePairInfo& peer_info);
void SetStateRTS(const QueuePairInfo& peer_info);
};
struct Address {
int32_t ib_port;
uint16_t lid;
uint32_t qpn;
std::string ToString() const;
bool operator==(const Address& other) const {
return ib_port == other.ib_port &&
lid == other.lid &&
qpn == other.qpn;
}
bool operator!=(const Address& other) const {
return !(*this == other);
}
};
struct Buffer {
Buffer* next;
char* buf;
uint32_t size; // Buffer size
uint32_t msg_len; // message length
ibv_mr* mr; // IBV memory region
Address peer_addr; // Peer address of this request
bool is_recv; // Is this a recv buffer
Buffer(char* b, uint32_t size, ibv_mr* mr, bool is_recv=false)
: next(nullptr), buf(b), size(size), mr(mr), is_recv(is_recv) {}
Buffer() : next(nullptr), buf(nullptr), size(0), msg_len(0),
mr(nullptr), is_recv(false) {}
DISALLOW_COPY_AND_ASSIGN(Buffer);
Request* MakeRequest() {
return reinterpret_cast<Request*>(buf + kIBUDPadding);
}
Response* MakeResponse() {
// TODO(wgtdkp): are you sure ?
return reinterpret_cast<Response*>(buf + kIBUDPadding);
}
};
// A registered buffer pool
class RegisteredBuffers {
public:
RegisteredBuffers(ibv_pd* pd, uint32_t buf_size,
uint32_t buf_num, bool is_recv=false);
~RegisteredBuffers();
DISALLOW_COPY_AND_ASSIGN(RegisteredBuffers);
// Used as buffer pool
Buffer* Alloc() {
std::lock_guard<Spinlock> _(spinlock_);
auto ret = root_;
if (root_ != nullptr) {
root_ = root_->next;
}
return ret;
}
void Free(Buffer* b) {
std::lock_guard<Spinlock> _(spinlock_);
b->next = root_;
root_ = b;
}
const Buffer* root() const { return root_; }
private:
uint32_t buf_size_;
uint32_t buf_num_;
void* ptr_;
Buffer* bufs_;
Buffer* root_;
Spinlock spinlock_;
};
uint16_t GetLid(uint16_t port);
Buffer* TryReceive(QueuePair* qp) { return PollCQ(qp->rcq, 1); }
Buffer* TrySend(QueuePair* qp) { return PollCQ(qp->scq, 1); }
Buffer* PollCQ(ibv_cq* cq, int num_entries);
Buffer* Receive(QueuePair* qp);
void PostReceive(QueuePair* qp, Buffer* b);
void PostSend(QueuePair* qp, Buffer* b, uint32_t len,
const Address* peer_addr);
void PostSendAndWait(QueuePair* qp, Buffer* b, uint32_t len,
const Address* peer_addr);
ibv_ah* GetAddrHandler(const Address& addr);
void PostWrite(QueuePair& qp, QueuePairInfo& peer_info);
void PostWriteAndWait(QueuePair& qp, QueuePairInfo& peer_info);
ibv_context* ctx() { return ctx_; }
ibv_pd* pd() { return pd_; }
ibv_mr* mr() { return mr_; }
char* raw_mem() { return raw_mem_; }
uint32_t raw_mem_size() { return raw_mem_size_; }
private:
ibv_context* ctx_;
ibv_pd* pd_;
ibv_mr* mr_;
char* raw_mem_;
uint32_t raw_mem_size_;
std::array<ibv_ah*, UINT16_MAX + 1> addr_handlers_;
};
} // namespace nvds
#endif // _NVDS_INFINIBAND_H_
<file_sep>/src/tablet.cc
#include "tablet.h"
#include "index.h"
#include "request.h"
#include "status.h"
#define OFFSETOF_NVMOBJECT(obj, member) \
offsetof(NVMObject, member) + obj
namespace nvds {
Tablet::Tablet(const IndexManager& index_manager,
NVMPtr<NVMTablet> nvm_tablet, bool is_backup)
: index_manager_(index_manager),
nvm_tablet_(nvm_tablet), allocator_(&nvm_tablet->data) {
info_.is_backup = is_backup;
// Memory region
// FIXME(wgtdkp): how to simulate latency of RDMA read/write to NVM?
mr_ = ibv_reg_mr(ib_.pd(), nvm_tablet_.ptr(), kNVMTabletSize,
IBV_ACCESS_LOCAL_WRITE | IBV_ACCESS_REMOTE_WRITE);
assert(mr_ != nullptr);
for (size_t i = 0; i < kNumReplicas; ++i) {
qps_[i] = new Infiniband::QueuePair(ib_, IBV_QPT_RC,
kMaxIBQueueDepth, kMaxIBQueueDepth);
//qps_[i]->Plumb();
info_.qpis[i] = {
ib_.GetLid(Infiniband::kPort),
qps_[i]->GetLocalQPNum(),
Infiniband::QueuePair::kDefaultPsn,
mr_->rkey,
reinterpret_cast<uint64_t>(nvm_tablet_.ptr())
};
}
}
Tablet::~Tablet() {
for (ssize_t i = kNumReplicas - 1; i >= 0; --i) {
delete qps_[i];
}
int err = ibv_dereg_mr(mr_);
assert(err == 0);
}
Status Tablet::Put(const Request* r, ModificationList& modifications) {
allocator_.set_modifications(&modifications);
assert(r->type == Request::Type::PUT);
auto idx = r->key_hash % kHashTableSize;
uint32_t slot = offsetof(NVMTablet, hash_table) + sizeof(uint32_t) * idx;
auto head = allocator_.Read<uint32_t>(slot);
auto q = slot, p = head;
while (p) {
if (r->key_len != allocator_.Read<uint16_t>(OFFSETOF_NVMOBJECT(p, key_len)) ||
allocator_.Memcmp(OFFSETOF_NVMOBJECT(p, data), r->Key(), r->key_len)) {
q = p;
p = allocator_.Read<uint32_t>(OFFSETOF_NVMOBJECT(p, next));
continue;
}
// There is already the same key, overwrite it.
if (r->val_len < allocator_.Read<uint16_t>(OFFSETOF_NVMOBJECT(p, val_len))) {
// The new value is shorter than the older, store data at its original place.
allocator_.Write(OFFSETOF_NVMOBJECT(p, val_len), r->val_len);
allocator_.Memcpy(OFFSETOF_NVMOBJECT(p, data) + r->key_len, r->Val(), r->val_len);
} else {
auto next = allocator_.Read<uint32_t>(OFFSETOF_NVMOBJECT(p, next));
allocator_.Free(p);
auto size = sizeof(NVMObject) + r->key_len + r->val_len;
p = allocator_.Alloc(size);
// TODO(wgtdkp): handle the situation: `no space`.
assert(p != 0);
allocator_.Write<NVMObject>(p, {next, r->key_len, r->val_len, r->key_hash});
allocator_.Memcpy(OFFSETOF_NVMOBJECT(p, data), r->data, r->key_len + r->val_len);
allocator_.Write(OFFSETOF_NVMOBJECT(q, next), p);
}
return Status::OK;
}
auto size = sizeof(NVMObject) + r->key_len + r->val_len;
p = allocator_.Alloc(size);
// TODO(wgtdkp): handle the situation: `no space`.
assert(p != 0);
// TODO(wgtdkp): use single `memcpy`
allocator_.Write<NVMObject>(p, {head, r->key_len, r->val_len, r->key_hash});
allocator_.Memcpy(OFFSETOF_NVMOBJECT(p, data), r->data, r->key_len + r->val_len);
// Insert the new item to head of the bucket list
allocator_.Write(slot, p);
return Status::OK;
}
Status Tablet::Add(const Request* r, ModificationList& modifications) {
allocator_.set_modifications(&modifications);
assert(r->type == Request::Type::ADD);
auto idx = r->key_hash % kHashTableSize;
uint32_t slot = offsetof(NVMTablet, hash_table) + sizeof(uint32_t) * idx;
auto head = allocator_.Read<uint32_t>(slot);
auto p = head;
while (p) {
if (r->key_len != allocator_.Read<uint16_t>(OFFSETOF_NVMOBJECT(p, key_len)) ||
allocator_.Memcmp(OFFSETOF_NVMOBJECT(p, data), r->Key(), r->key_len)) {
p = allocator_.Read<uint32_t>(OFFSETOF_NVMOBJECT(p, next));
continue;
}
// There is already the same key, return Status::ERROR.
return Status::ERROR;
}
auto size = sizeof(NVMObject) + r->key_len + r->val_len;
p = allocator_.Alloc(size);
// TODO(wgtdkp): handle the situation: `no space`.
assert(p != 0);
// TODO(wgtdkp): use single `memcpy`
allocator_.Write<NVMObject>(p, {head, r->key_len, r->val_len, r->key_hash});
allocator_.Memcpy(OFFSETOF_NVMOBJECT(p, data), r->data, r->key_len + r->val_len);
// Insert the new item to head of the bucket list
allocator_.Write(slot, p);
return Status::OK;
}
Status Tablet::Get(Response* resp, const Request* r,
ModificationList& modifications) {
allocator_.set_modifications(&modifications);
assert(r->type == Request::Type::GET);
auto idx = r->key_hash % kHashTableSize;
auto p = offsetof(NVMTablet, hash_table) + sizeof(uint32_t) * idx;
p = allocator_.Read<uint32_t>(p);
while (p) {
// TODO(wgtdkp): use a different hash to speedup searching
if (r->key_len == allocator_.Read<uint16_t>(OFFSETOF_NVMOBJECT(p, key_len)) &&
allocator_.Memcmp(OFFSETOF_NVMOBJECT(p, data), r->Key(), r->key_len) == 0) {
auto len = allocator_.Read<uint16_t>(OFFSETOF_NVMOBJECT(p, val_len));
allocator_.Memcpy(resp->val, OFFSETOF_NVMOBJECT(p, data) + r->key_len, len);
resp->val_len = len;
return Status::OK;
}
p = allocator_.Read<uint32_t>(OFFSETOF_NVMOBJECT(p, next));
}
return Status::ERROR;
}
Status Tablet::Del(const Request* r, ModificationList& modifications) {
allocator_.set_modifications(&modifications);
assert(r->type == Request::Type::DEL);
auto idx = r->key_hash % kHashTableSize;
auto q = offsetof(NVMTablet, hash_table) + sizeof(uint32_t) * idx;
auto p = allocator_.Read<uint32_t>(q);
while (p) {
if (r->key_len == allocator_.Read<uint16_t>(OFFSETOF_NVMOBJECT(p, key_len)) &&
allocator_.Memcmp(OFFSETOF_NVMOBJECT(p, data), r->Key(), r->key_len) == 0) {
auto next = allocator_.Read<uint32_t>(OFFSETOF_NVMOBJECT(p, next));
allocator_.Write(OFFSETOF_NVMOBJECT(q, next), next);
allocator_.Free(p);
return Status::OK;
}
}
return Status::ERROR;
}
void Tablet::SettingupQPConnect(TabletId id, const IndexManager& index_manager) {
info_ = index_manager.GetTablet(id);
if (info_.is_backup) {
auto master_info = index_manager.GetTablet(info_.master);
assert(!master_info.is_backup);
for (uint32_t i = 0; i < kNumReplicas; ++i) {
if (master_info.backups[i] == id) {
qps_[0]->SetStateRTR(master_info.qpis[i]);
break;
}
assert(i < kNumReplicas - 1);
}
} else {
for (uint32_t i = 0; i < kNumReplicas; ++i) {
auto backup_id = info_.backups[i];
auto backup_info = index_manager.GetTablet(backup_id);
assert(backup_info.is_backup);
qps_[i]->SetStateRTS(backup_info.qpis[0]);
}
}
}
int Tablet::Sync(ModificationList& modifications) {
if (modifications.size() == 0) {
return 0;
}
MergeModifications(modifications);
for (size_t k = 0; k < info_.backups.size(); ++k) {
auto backup = index_manager_.GetTablet(info_.backups[k]);
assert(backup.is_backup);
size_t i = 0;
for (const auto& m : modifications) {
sges[i] = {m.src, m.len, mr_->lkey};
wrs[i].wr.rdma.remote_addr = backup.qpis[0].vaddr + m.des;
wrs[i].wr.rdma.rkey = backup.qpis[0].rkey;
// TODO(wgtdkp): use unique id
wrs[i].wr_id = 1;
wrs[i].sg_list = &sges[i];
wrs[i].num_sge = 1;
wrs[i].opcode = IBV_WR_RDMA_WRITE;
// TODO(wgtdkp): do we really need to signal each send?
wrs[i].send_flags = IBV_SEND_INLINE;
wrs[i].next = &wrs[i+1];
++i;
// DEBUG
break;
}
wrs[i-1].next = nullptr;
wrs[i-1].send_flags |= IBV_SEND_SIGNALED;
struct ibv_send_wr* bad_wr;
int err = ibv_post_send(qps_[k]->qp, &wrs[0], &bad_wr);
if (err != 0) {
throw TransportException(HERE, "ibv_post_send failed", err);
}
}
for (auto& qp : qps_) {
ibv_wc wc;
int r;
while ((r = ibv_poll_cq(qp->scq, 1, &wc)) != 1) {}
if (wc.status != IBV_WC_SUCCESS) {
throw TransportException(HERE, wc.status);
} else {
// std::clog << "success" << std::endl << std::flush;
}
}
return 0;
}
/*
// DEBUG
static void PrintModifications(const ModificationList& modifications) {
for (const auto& m : modifications) {
m.Print();
}
std::cout << std::endl << std::flush;
}
*/
void Tablet::MergeModifications(ModificationList& modifications) {
auto n = modifications.size();
assert(n > 0);
//PrintModifications(modifications);
// Baby, don't worry, n <= 12.
// This simple algorithm should be faster than `union-find`;
for (size_t i = 0; i < n; ++i) {
for (size_t j = i + 1; j < n; ++j) {
if (modifications[j] < modifications[i])
std::swap(modifications[j], modifications[i]);
}
}
//PrintModifications(modifications);
static const size_t gap = 16;
size_t i = 0;
uint64_t src = modifications[0].src;
uint32_t begin = modifications[0].des;
uint32_t end = modifications[0].des + modifications[0].len;
for (const auto& m : modifications) {
if (m.des > end + gap) {
modifications[i++] = {begin, src, end - begin};
begin = m.des;
end = m.des + m.len;
src = m.src;
} else {
end = std::max(end, m.des + m.len);
}
}
modifications[i++] = {begin, src, end - begin};
modifications.resize(i);
//PrintModifications(modifications);
}
} // namespace nvds
<file_sep>/script/startup.sh
#!/bin/bash
../src/build/coordinator > coordinator.txt 2> err_coordinator.txt < /dev/null &
# Waiting for coordinator
sleep 0.1s
first_line=$(head -n 1 coordinator.txt)
first_line_arr=($first_line)
# Compatible with the server's bash
#coord_addr=${first_line_arr[-1]}
coord_addr=${first_line_arr[2]}
while read line; do
server=($line)
echo "starting server [${server[0]}:${server[1]}]"
dir="/home/wgtdkp/nvds/"
bin="./src/build/server"
out="./script/server_${server[1]}.txt"
err="./script/err_server_${server[1]}.txt"
in="/dev/null"
ssh root@${server[0]} "cd ${dir}; nohup ${bin} ${server[1]} ${coord_addr} > ${out} 2> ${err} < ${in} &" < /dev/null
done < servers.txt
<file_sep>/test/test.c
extern int test_main();
int main() {
return test_main();
}
<file_sep>/doc/command.md
1. **_ifconfig -a_** to find the nic device; **_ethtool device_** to show nic information; The machines are 56Gb/s infiniband;
<file_sep>/src/message.h
#ifndef _NVDS_MESSAGE_H_
#define _NVDS_MESSAGE_H_
#include "common.h"
#include "infiniband.h"
#include "json.hpp"
namespace nvds {
class IndexManager;
/*
The message body is in json format, it looks like:
{
"name-0": value,
"name-1": value
}
0. REQ_JOIN :
{
"size": int,
"ib_addr": {
"ib_port": int,
"lid": int,
"pqn": int,
},
"tablets_vaddr": [int],
"tablets_rkey": [int]
}
1. RES_JOIN :
{
"id": int,
"index_manager": IndexManager
}
*/
/*
struct Backup {
ServerId server_id;
TabletId tablet_id;
bool operator==(const Backup& other) const {
return server_id == other.server_id &&
tablet_id == other.tablet_id;
}
bool operator!=(const Backup& other) const {
return !(*this == other);
}
};
using Master = Backup;
*/
struct TabletInfo {
TabletId id;
ServerId server_id;
bool is_backup;
std::array<Infiniband::QueuePairInfo, kNumReplicas> qpis;
union {
// If `is_backup` == true,
// `master` is the master tablet of this backup tablet
TabletId master;
// Else, `backups` is the backups of this makster tablet
std::array<TabletId, kNumReplicas> backups;
};
bool operator==(const TabletInfo& other) const {
return id == other.id &&
server_id == other.server_id &&
is_backup == other.is_backup;
}
bool operator!=(const TabletInfo& other) const {
return !(*this == other);
}
// DEBUG
void Print() const;
};
/* Json representation
{
"id": int,
"active": bool,
"addr": string,
"ib_addr": {
"ib_port": int,
"lid": int,
"qpn": int,
}
"tablets": array[TabletInfo],
}
*/
struct ServerInfo {
ServerId id;
bool active;
std::string addr; // Ip address
Infiniband::Address ib_addr; // Infiniband address
std::array<TabletId, kNumTabletAndBackupsPerServer> tablets;
};
class Message {
public:
enum class SenderType : uint8_t {
COORDINATOR,
SERVER,
CLIENT,
};
enum class Type : uint8_t {
REQ_JOIN, // erver/client ---> coordinator
REQ_LEAVE, // server/client ---> coordinator
RES_JOIN, // coordinator ---> server
RES_CLUSTER_INFO, // coordinator ---> client
QP_INFO_EXCH, // server/client ---> server
ACK_REJECT, // acknowledgement: reject
ACK_ERROR, // aknowledgement: error
ACK_OK, // acknowledgement: ok
};
PACKED(struct Header {
SenderType sender_type;
Type type;
uint32_t body_len;
});
Message() {}
Message(Header header, std::string body)
: header_(std::move(header)), body_(std::move(body)) {
header_.body_len = body_.size();
}
//explicit Message(const char* raw=nullptr);
~Message() {}
static const size_t kHeaderSize = sizeof(Header);
const Header& header() const { return header_; }
Header& header() { return header_; }
const std::string& body() const { return body_; }
std::string& body() { return body_; }
SenderType sender_type() const { return header_.sender_type; }
Type type() const { return header_.type; }
uint32_t body_len() const { return header_.body_len; }
private:
Header header_;
std::string body_;
};
void to_json(nlohmann::json& j, const Infiniband::Address& ia);
void from_json(const nlohmann::json& j, Infiniband::Address& ia);
void to_json(nlohmann::json& j, const Infiniband::QueuePairInfo& qpi);
void from_json(const nlohmann::json& j, Infiniband::QueuePairInfo& qpi);
void to_json(nlohmann::json& j, const TabletInfo& ti);
void from_json(const nlohmann::json& j, TabletInfo& ti);
void to_json(nlohmann::json& j, const ServerInfo& si);
void from_json(const nlohmann::json& j, ServerInfo& si);
void to_json(nlohmann::json& j, const IndexManager& im);
void from_json(const nlohmann::json& j, IndexManager& im);
} // namespace nvds
#endif // _NVDS_MESSAGE_
<file_sep>/script/connect_machine.sh
#!/bin/bash
# How to connect to lab's servers;
sudo route add 172.16.31.10 gw 192.168.1.1
<file_sep>/src/hash.h
#ifndef _NVDS_HASH_H_
#define _NVDS_HASH_H_
#include "MurmurHash2.h"
#include <string>
namespace nvds {
using KeyHash = uint64_t;
static const KeyHash kMaxKeyHash = static_cast<KeyHash>(-1);
struct KeyHashRange {
KeyHash begin;
KeyHash end;
};
static inline KeyHash Hash(const std::string& key) {
return MurmurHash64B(key.c_str(), key.size(), 103);
}
static inline KeyHash Hash(const char* key, size_t key_len) {
return MurmurHash64B(key, key_len, 103);
}
} // namespace nvds
#endif // _NVDS_HASH_H_
<file_sep>/src/experiments/memc.cc
// clang++ -std=c++11 -Wall -o memc -g memc.cc -lmemcached -lrt
#include <iostream>
#include <random>
#include <string>
#include <vector>
#include <cassert>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <libmemcached/memcached.h>
using namespace std;
static vector<string> GenRandomStrings(size_t len, size_t n) {
vector<string> ans;
default_random_engine g;
uniform_int_distribution<int> dist('a', 'z');
for (size_t i = 0; i < n; ++i) {
ans.emplace_back(len, 0);
for (size_t j = 0; j < len; ++j) {
ans[i][j] = dist(g);
}
}
return ans;
}
static inline int64_t TimeDiff(struct timespec* lhs, struct timespec* rhs) {
return (lhs->tv_sec - rhs->tv_sec) * 1000000000 +
(lhs->tv_nsec - rhs->tv_nsec);
}
void Test(const char* server, uint32_t len) {
auto cfg = "--SERVER=" + string(server);
memcached_st* memc = memcached(cfg.c_str(), cfg.size());
assert(memc != nullptr);
static const size_t n = 10000;
auto keys = GenRandomStrings(16, n);
auto vals = GenRandomStrings(len, n);
struct timespec begin, end;
assert(clock_gettime(CLOCK_REALTIME, &begin) == 0);
for (size_t i = 0; i < keys.size(); ++i) {
auto& key = keys[i];
auto& val = vals[i];
memcached_return_t rc = memcached_set(memc, key.c_str(), key.size(), val.c_str(), val.size(), 0, 0);
assert(rc == MEMCACHED_SUCCESS);
}
assert(clock_gettime(CLOCK_REALTIME, &end) == 0);
double t_set = TimeDiff(&end, &begin) * 1.0 / n / 1000;
assert(clock_gettime(CLOCK_REALTIME, &begin) == 0);
for (size_t i = 0; i < keys.size(); ++i) {
auto& key = keys[i];
size_t val_len;
uint32_t flags;
memcached_return_t rc;
memcached_get(memc, key.c_str(), key.size(), &val_len, &flags, &rc);
assert(rc == MEMCACHED_SUCCESS || rc == MEMCACHED_NOTFOUND);
}
assert(clock_gettime(CLOCK_REALTIME, &end) == 0);
double t_get = TimeDiff(&end, &begin) * 1.0 / n / 1000;
memcached_free(memc);
printf("%u, %f, %f\n", len, t_set, t_get);
}
int main(int argc, const char* argv[]) {
assert(argc == 3);
const char* server = argv[1];
uint32_t len = atoi(argv[2]);
//for (uint32_t len = 16; len <= 1024; len <<= 1) {
Test(server, len);
//}
return 0;
}
<file_sep>/src/tablet.h
#ifndef _NVDS_TABLET_H_
#define _NVDS_TABLET_H_
#include "allocator.h"
#include "common.h"
#include "hash.h"
#include "message.h"
#include "modification.h"
#include "response.h"
namespace nvds {
struct Request;
class IndexManager;
// A reasonable prime number
static const uint32_t kHashTableSize = 1000003;
struct NVMObject {
uint32_t next;
uint16_t key_len;
uint16_t val_len;
KeyHash key_hash;
char data[0];
};
struct NVMTablet {
char data[Allocator::kSize];
std::array<uint32_t, kHashTableSize> hash_table;
NVMTablet() {
hash_table.fill(0);
}
};
static const uint32_t kNVMTabletSize = sizeof(NVMTablet);
class Tablet {
public:
//Tablet(const TabletInfo& info, NVMPtr<NVMTablet> nvm_tablet);
Tablet(const IndexManager& index_manager,
NVMPtr<NVMTablet> nvm_tablet,
bool is_backup=false);
~Tablet();
DISALLOW_COPY_AND_ASSIGN(Tablet);
const TabletInfo& info() const { return info_; }
Status Get(Response* resp, const Request* r,
ModificationList& modifications);
Status Del(const Request* r, ModificationList& modifications);
Status Put(const Request* r, ModificationList& modifications);
// Return: Status::ERROR, if there is already the same key; else, Status::OK;
Status Add(const Request* r, ModificationList& modifications);
void SettingupQPConnect(TabletId id, const IndexManager& index_manager);
int Sync(ModificationList& modifications);
private:
static void MergeModifications(ModificationList& modifications);
const IndexManager& index_manager_;
TabletInfo info_;
NVMPtr<NVMTablet> nvm_tablet_;
Allocator allocator_;
// Infiniband
static const uint32_t kMaxIBQueueDepth = 128;
Infiniband ib_;
//Infiniband::Address ib_addr_;
ibv_mr* mr_;
std::array<Infiniband::QueuePair*, kNumReplicas> qps_;
// `kNumReplica` queue pairs share this `rcq_` and `scq_`
static const uint32_t kNumScatters = 16;
std::array<struct ibv_sge, kNumScatters> sges;
std::array<struct ibv_send_wr, kNumScatters> wrs;
};
} // namespace nvds
#endif // _NVDS_TABLET_H_
<file_sep>/src/session.cc
#include "session.h"
namespace nvds {
void Session::Start() {
AsyncRecvMessage(std::make_shared<Message>());
}
void Session::AsyncRecvMessage(std::shared_ptr<Message> msg) {
// Avoid out-of-range of this session
auto self = shared_from_this();
auto body_handler = [this, self, msg](
const boost::system::error_code& err,
size_t bytes_transferred) {
if (!err) {
recv_msg_handler_(self, msg);
} else {
NVDS_ERR(err.message().c_str());
}
};
auto header_handler = [this, self, msg, body_handler](
const boost::system::error_code& err,
size_t bytes_transferred) {
if (!err) {
assert(bytes_transferred >= Message::kHeaderSize);
// Read message body
size_t len = Message::kHeaderSize + msg->body_len() - bytes_transferred;
msg->body().resize(len);
boost::asio::async_read(conn_sock_,
boost::asio::buffer(&msg->body()[0], msg->body_len()),
body_handler);
} else {
NVDS_ERR(err.message().c_str());
}
};
// Read message header
boost::asio::async_read(conn_sock_,
boost::asio::buffer(&msg->header(), Message::kHeaderSize),
header_handler);
}
void Session::AsyncSendMessage(std::shared_ptr<Message> msg) {
// Avoid out-of-range of this session
auto self = shared_from_this();
auto body_handler = [this, self, msg](
const boost::system::error_code& err,
size_t bytes_transferred) {
if (!err) {
send_msg_handler_(self, msg);
} else {
NVDS_ERR(err.message().c_str());
}
};
auto header_handler = [this, self, msg, body_handler](
const boost::system::error_code& err,
size_t bytes_transferred) {
if (!err) {
assert(bytes_transferred >= Message::kHeaderSize);
// Send message body
boost::asio::async_write(conn_sock_,
boost::asio::buffer(msg->body()), body_handler);
} else {
NVDS_ERR(err.message().c_str());
}
};
// Read message header
boost::asio::async_write(conn_sock_,
boost::asio::buffer(&msg->header(), Message::kHeaderSize),
header_handler);
}
void Session::SendMessage(const Message& msg) {
boost::asio::write(conn_sock_,
boost::asio::buffer(&msg.header(), Message::kHeaderSize));
boost::asio::write(conn_sock_, boost::asio::buffer(msg.body()));
}
Message Session::RecvMessage() {
Message msg;
boost::asio::read(conn_sock_,
boost::asio::buffer(&msg.header(), Message::kHeaderSize));
msg.body().resize(msg.body_len());
boost::asio::read(conn_sock_,
boost::asio::buffer(&msg.body()[0], msg.body_len()));
return msg;
}
} // namespace nvds
<file_sep>/doc/note.md
# My Notes
## Plan
1. 先做2个节点之间的通信与query,一开始可先不做复制;测试单一节点的QPS;
2. 考虑持久化的设计;
3. 将其拓展到分布式结构;
4. 再去考虑故障检测,故障恢复等问题;
## RDMA
1. [libvma](https://github.com/Mellanox/libvma): Enable standard socket API over Ethernet or Infiniband from user-space with full network stack bypass.
2. rdma_cma.h: Used to establish communication over RDMA transports.
## RDMA 理解
1. Channel Adapter: 其实就是指NIC
2. RDMA存在两种典型的semantics: memory semantic 和 channel semantic;memory semantic就是由发起方指定读写的地址,完全不需要host的cpu参与, 这称为one-sided;这对应于RDMA的READ、WRITE;channel semantic对应于RDMA的SEND、RECV
## 测试
### RDMA 延迟
相同机架上两台机器之间的RDMA写延迟为1.21us,读延迟为1.20us;读写数据长度为64Byte;
<file_sep>/src/test_stl_map.cc
#include "hash.h"
#include "measurement.h"
#include <gtest/gtest.h>
#include <bits/stdc++.h>
using namespace std;
using namespace nvds;
static vector<string> GenRandomStrings(size_t len, size_t n) {
vector<string> ans;
std::default_random_engine g;
std::uniform_int_distribution<int> dist('a', 'z');
for (size_t i = 0; i < n; ++i) {
ans.emplace_back(len, 0);
for (size_t j = 0; j < len; ++j) {
ans[i][j] = dist(g);
}
}
return ans;
}
TEST (AllocatorTest, Alloc) {
const size_t n = 1000 * 1000;
auto vals = GenRandomStrings(40, n);
vector<uint64_t> keys(n, 0);
transform(vals.begin(), vals.end(), keys.begin(), [](const string& x) -> uint64_t {
return Hash(x.substr(0, 16));
});
Measurement m;
unordered_map<uint64_t, string> um;
um.reserve(n);
m.begin();
for (size_t i = 0; i < n; ++i) {
um[keys[i]] = vals[i];
}
m.end();
m.Print();
}
int main(int argc, char* argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
<file_sep>/src/index.cc
#include "index.h"
namespace nvds {
const ServerInfo& IndexManager::AddServer(const std::string& addr,
nlohmann::json& msg_body) {
auto id = AllocServerId();
Infiniband::Address ib_addr = msg_body["ib_addr"];
servers_[id] = {id, true, addr, ib_addr};
std::vector<Infiniband::QueuePairInfo> qpis = msg_body["tablet_qpis"];
auto i = id;
for (uint32_t j = 0; j < kNumReplicas + 1; ++j) {
for (uint32_t k = 0; k < kNumTabletsPerServer; ++k) {
auto tablet_id = CalcTabletId(i, j, k);
auto tablet_idx = CalcTabletId(0, j, k);
tablets_[tablet_id].id = tablet_id;
tablets_[tablet_id].server_id = i;
tablets_[tablet_id].is_backup = j > 0;
copy(qpis.begin() + tablet_idx * kNumReplicas,
qpis.begin() + (tablet_idx + 1) * kNumReplicas,
tablets_[tablet_id].qpis.begin());
if (!tablets_[tablet_id].is_backup) {
auto idx = CalcTabletId(0, i, k);
key_tablet_map_[idx] = tablet_id;
} else {
auto backup_id = tablet_id;
auto master_id = CalcTabletId((kNumServers + i - j) % kNumServers, 0, k);
// It is tricky here
assert(backup_id != 0);
tablets_[master_id].backups[j-1] = backup_id;
tablets_[backup_id].master = master_id;
}
servers_[i].tablets[tablet_idx] = tablet_id;
}
}
return servers_[id];
}
void IndexManager::PrintTablets() const {
for (auto& tablet : tablets_) {
tablet.Print();
std::cout << std::endl;
}
}
} // namespace nvds
<file_sep>/src/allocator.h
/*
* This allocator is a simplified version of `Doug Lea's Malloc`.
* Reference: http://g.oswego.edu/dl/html/malloc.html
*/
#ifndef _NVDS_ALLOCATOR_H_
#define _NVDS_ALLOCATOR_H_
#include "common.h"
#include "modification.h"
#include <memory.h>
namespace nvds {
class Allocator {
public:
static const uint32_t kMaxBlockSize = 1024 + 128;
static const uint32_t kSize = 64 * 1024 * 1024;
Allocator(void* base) : Allocator(reinterpret_cast<uintptr_t>(base)) {
Format();
}
Allocator(uintptr_t base)
: base_(base), cnt_writes_(0), modifications_(nullptr) {
flm_ = OffsetToPtr<FreeListManager>(0);
}
~Allocator() {}
DISALLOW_COPY_AND_ASSIGN(Allocator);
// Return offset to the object
uint32_t Alloc(uint32_t size) {
auto blk_size = RoundupBlockSize(size + sizeof(uint32_t));
assert(blk_size % 16 == 0);
// The client side should refuse too big kv item.
assert(blk_size <= kMaxBlockSize);
auto blk = AllocBlock(blk_size);
return blk == 0 ? 0 : blk + sizeof(uint32_t);
}
void Free(uint32_t ptr) {
// TODO(wgtdkp): checking if ptr is actually in this Allocator zone.
assert(ptr > sizeof(uint32_t) && ptr <= kSize);
FreeBlock(ptr - sizeof(uint32_t));
}
template<typename T>
T* OffsetToPtr(uint32_t offset) const {
return reinterpret_cast<T*>(base_ + offset);
}
template<typename T>
void Write(uint32_t offset, const T& val) {
assert(offset != 0);
auto ptr = OffsetToPtr<T>(offset);
*ptr = val;
modifications_->emplace_back(offset,
base_ + offset, static_cast<uint32_t>(sizeof(T)));
++cnt_writes_;
}
template<typename T>
T Read(uint32_t offset) {
assert(offset);
return *OffsetToPtr<T>(offset);
}
// TODO(wgtdkp): add latency
int Memcmp(uint32_t lhs, uint32_t rhs, uint32_t len) {
return memcmp(OffsetToPtr<char>(lhs), OffsetToPtr<char>(rhs), len);
}
int Memcmp(uint32_t lhs, const char* rhs, uint32_t len) {
return memcmp(OffsetToPtr<char>(lhs), rhs, len);
}
int Memcmp(const char* lhs, uint32_t rhs, uint32_t len) {
return memcmp(lhs, OffsetToPtr<char>(rhs), len);
}
void Memcpy(uint32_t des, uint32_t src, uint32_t len) {
memcpy(OffsetToPtr<char>(des), OffsetToPtr<char>(src), len);
modifications_->emplace_back(des, base_ + des, len);
}
void Memcpy(uint32_t des, const char* src, uint32_t len) {
memcpy(OffsetToPtr<char>(des), src, len);
modifications_->emplace_back(des, base_ + des, len);
}
void Memcpy(char* des, uint32_t src, uint32_t len) {
memcpy(des, OffsetToPtr<char>(src), len);
// Copy to user, no NVM modification
}
uintptr_t base() const { return base_; }
uint64_t cnt_writes() const { return cnt_writes_; }
void set_modifications(ModificationList* modifications) {
modifications_ = modifications;
}
private:
static const uint32_t kNumFreeLists = kMaxBlockSize / 16 + 1;
void Format();
PACKED(
struct BlockHeader {
// Denoting if previous block is free.
uint32_t free: 1;
// Block size, include block header.
uint32_t size: 31;
// Pointer to previous block in the same free list.
// Relative to base_.
uint32_t prev;
// Pointer to next block in the same free list.
// Relative to base_.
uint32_t next;
static const uint32_t kFreeMask = static_cast<uint32_t>(1) << 31;
BlockHeader() = delete;
});
PACKED(
struct BlockFooter {
// Denoting if current block is free.
uint32_t free: 1;
// Block size, include block header.
uint32_t size: 31;
BlockFooter() = delete;
});
PACKED(
struct FreeListManager {
// The list of these free blocks with same size.
// The last list is all free block greater than kMaxBlockSize.
// So these blocks are not necessary the same size,
// and not sorted(by addr or size).
uint32_t free_lists[kNumFreeLists];
FreeListManager() = delete;
});
uint32_t GetBlockSizeByFreeList(uint32_t free_list) {
free_list /= sizeof(uint32_t);
if (free_list < kNumFreeLists - 1) {
return (free_list + 1) * 16;
}
assert(free_list == kNumFreeLists - 1);
auto head = Read<uint32_t>(free_list * sizeof(uint32_t));
return head == 0 ? 0 : ReadTheSizeTag(head);
}
uint32_t GetFreeListByBlockOffset(uint32_t blk) {
auto blk_size = OffsetToPtr<const BlockHeader>(blk)->size;
return GetFreeListByBlockSize(blk_size);
}
uint32_t GetFreeListByBlockSize(uint32_t blk_size) {
assert(blk_size >= 16);
if (blk_size <= kMaxBlockSize) {
return (blk_size / 16 - 1) * sizeof(uint32_t);
}
return GetLastFreeList();
}
static uint32_t RoundupBlockSize(uint32_t blk_size) {
if (blk_size <= kMaxBlockSize) {
return (blk_size + 16 - 1) / 16 * 16;
}
assert(false);
return 0;
}
static uint32_t GetLastFreeList() {
return (kNumFreeLists - 1) * sizeof(uint32_t);
}
void SetTheFreeTag(uint32_t blk, uint32_t blk_size) {
auto next_blk = blk + blk_size;
Write(next_blk, Read<uint32_t>(next_blk) | BlockHeader::kFreeMask);
}
void ResetTheFreeTag(uint32_t blk, uint32_t blk_size) {
auto next_blk = blk + blk_size;
Write(next_blk, Read<uint32_t>(next_blk) & ~BlockHeader::kFreeMask);
}
uint32_t ReadTheFreeTag(uint32_t blk, uint32_t blk_size) {
return Read<uint32_t>(blk + blk_size) & BlockHeader::kFreeMask;
}
void SetThePrevFreeTag(uint32_t blk) {
Write(blk, BlockHeader::kFreeMask | Read<uint32_t>(blk));
}
void ResetThePrevFreeTag(uint32_t blk) {
Write(blk, ~BlockHeader::kFreeMask & Read<uint32_t>(blk));
}
uint32_t ReadThePrevFreeTag(uint32_t blk) {
return Read<uint32_t>(blk) & BlockHeader::kFreeMask;
}
uint32_t ReadTheSizeTag(uint32_t blk) {
return Read<uint32_t>(blk) & ~BlockHeader::kFreeMask;
}
uint32_t AllocBlock(uint32_t size);
void FreeBlock(uint32_t blk);
// Remove the block from the free list, and it keeps free.
void RemoveBlock(uint32_t blk, uint32_t blk_size);
uint32_t SplitBlock(uint32_t blk, uint32_t blk_size, uint32_t needed_size);
private:
uintptr_t base_;
FreeListManager* flm_;
uint64_t cnt_writes_;
ModificationList* modifications_;
};
} // namespace nvds
#endif // _NVDS_ALLOCATOR_H_
<file_sep>/src/basic_server.cc
#include "basic_server.h"
#include "session.h"
using boost::asio::ip::tcp;
namespace nvds {
BasicServer::BasicServer(uint16_t port)
: tcp_acceptor_(tcp_service_,
tcp::endpoint(tcp::v4(), port)),
conn_sock_(tcp_service_) {
}
void BasicServer::Accept(MessageHandler recv_msg_handler,
MessageHandler send_msg_handler) {
tcp_acceptor_.async_accept(conn_sock_,
[this, recv_msg_handler, send_msg_handler](boost::system::error_code err) {
if (!err) {
std::make_shared<Session>(std::move(conn_sock_),
recv_msg_handler, send_msg_handler)->Start();
} else {
NVDS_ERR(err.message().c_str());
}
Accept(recv_msg_handler, send_msg_handler);
});
}
} // namespace nvds
<file_sep>/src/common.h
#ifndef _NVDS_COMMON_H_
#define _NVDS_COMMON_H_
#include "nvm.h"
#include <unistd.h>
#include <cassert>
#include <cinttypes>
#include <cstddef>
#include <cstdint>
#include <cstdio>
#include <array>
#include <iostream>
#include <string>
#include <vector>
#define PACKED(s) s __attribute__((packed))
#ifndef DISALLOW_COPY_AND_ASSIGN
#define DISALLOW_COPY_AND_ASSIGN(TypeName) \
TypeName(const TypeName&) = delete; \
TypeName& operator=(const TypeName&) = delete;
#endif
#define NVDS_ERR(format, args...) { \
std::cerr << "error: " \
<< nvds::Format(format, ##args) \
<< std::endl; \
}
#define NVDS_LOG(format, args...) { \
std::cout << nvds::Format(format, ##args) << std::endl; \
}
namespace nvds {
using ServerId = uint32_t;
using TabletId = uint32_t;
/*
* Cluster configuration
*/
// Configurable
static const uint32_t kNumReplicas = 1;
static const uint32_t kNumServers = 2;
static const uint32_t kNumTabletsPerServer = 1;
static const uint16_t kCoordPort = 9090;
static const uint32_t kMaxItemSize = 1024;
static const uint32_t kNumTabletAndBackupsPerServer = kNumTabletsPerServer * (1 + kNumReplicas);
static const uint32_t kNumTablets = kNumTabletsPerServer * kNumServers;
static const uint32_t kNumTabletAndBackups = kNumTabletAndBackupsPerServer * kNumServers;
static_assert(kNumTablets % kNumServers == 0,
"`kNumTablets` cannot be divisible by `kNumServers`");
/*
* Infiniband configuration
*/
static const uint32_t kSendBufSize = 1024 + 128;
static const uint32_t kRecvBufSize = 1024 * 2 + 128;
static const uint32_t kIBUDPadding = 40;
std::string Format(const char* format, ...);
std::string Demangle(const char* name);
static inline void memset_volatile(volatile char* ptr, char v, size_t size) {
auto end = ptr + size;
while (ptr != end) {
*ptr++ = v;
}
}
} // namespace nvds
#endif // _NVDS_COMMON_H_
<file_sep>/test/test_nvds.c
#include "test.h"
#include "nvds.c"
int test_main() {
return 0;
}
<file_sep>/src/response.h
#ifndef _NVDS_RESPONSE_H_
#define _NVDS_RESPONSE_H_
#include "common.h"
#include "infiniband.h"
#include "request.h"
#include "status.h"
namespace nvds {
struct Response {
using Type = Request::Type;
Type type;
Status status;
uint16_t val_len;
char val[0];
static Response* New(Infiniband::Buffer* b, Type type, Status status) {
return new (b->buf) Response(type, status);
}
static void Del(const Response* r) {
r->~Response();
}
uint32_t Len() const {
return sizeof(Response) + (type == Type::GET ? val_len : 0);
}
void Print() const {
std::cout << "type: " << (type == Type::GET ? "GET" : type == Type::PUT ? "PUT" : "DEL") << std::endl;
std::cout << "status: " << (status == Status::OK ? "OK" : status == Status::ERROR ? "ERROR" : "NO_MEM") << std::endl;
std::cout << "val_len: " << val_len << std::endl;
std::cout << "val: ";
for (size_t i = 0; i < val_len; ++i)
std::cout << val[i];
std::cout << std::endl;
}
private:
Response(Request::Type t, Status s)
: type(t), status(s), val_len(0) {
}
};
} // namespace nvds
#endif // _NVDS_RESPONSE_H_
<file_sep>/src/client.cc
#include "client.h"
#include "request.h"
#include "response.h"
namespace nvds {
using json = nlohmann::json;
Client::Client(const std::string& coord_addr)
: session_(Connect(coord_addr)),
send_bufs_(ib_.pd(), kSendBufSize, kMaxIBQueueDepth, false),
recv_bufs_(ib_.pd(), kRecvBufSize, kMaxIBQueueDepth, true) {
// Infiniband
qp_ = new Infiniband::QueuePair(ib_, IBV_QPT_UD,
kMaxIBQueueDepth, kMaxIBQueueDepth);
qp_->Activate();
Join();
}
Client::~Client() {
Close();
delete qp_;
}
void Client::Join() {
Message msg {
Message::Header {Message::SenderType::CLIENT, Message::Type::REQ_JOIN, 0},
json().dump()
};
session_.SendMessage(msg);
msg = session_.RecvMessage();
assert(msg.sender_type() == Message::SenderType::COORDINATOR);
assert(msg.type() == Message::Type::RES_JOIN);
auto j_body = json::parse(msg.body());
index_manager_ = j_body["index_manager"];
}
tcp::socket Client::Connect(const std::string& coord_addr) {
tcp::resolver resolver {tcp_service_};
tcp::socket conn_sock {tcp_service_};
tcp::resolver::query query {coord_addr, std::to_string(kCoordPort)};
auto ep = resolver.resolve(query);
boost::asio::connect(conn_sock, ep);
return conn_sock;
}
Client::Buffer* Client::RequestAndWait(const char* key, size_t key_len,
const char* val, size_t val_len, Request::Type type) {
assert(key_len + val_len <= kMaxItemSize);
// 0. compute key hash
auto hash = Hash(key, key_len);
// 1. get tablet and server info
//auto& tablet = index_manager_.GetTablet(hash);
auto& server = index_manager_.GetServer(hash);
// 2. post ib send and recv
auto sb = send_bufs_.Alloc();
assert(sb != nullptr);
auto r = Request::New(sb, type, key, key_len, val, val_len, hash);
auto rb = recv_bufs_.Alloc();
assert(rb != nullptr);
ib_.PostReceive(qp_, rb);
ib_.PostSendAndWait(qp_, sb, r->Len(), &server.ib_addr);
Request::Del(r);
send_bufs_.Free(sb);
assert(rb == ib_.Receive(qp_));
return rb;
//std::string ans(resp->val, resp->val_len);
//recv_bufs_.Free(rb);
//return ans;
}
} // namespace nvds
<file_sep>/src/exception.h
#ifndef _NVDS_EXCEPTION_H_
#define _NVDS_EXCEPTION_H_
#include "common.h"
#include "source_location.h"
#include <cstring>
#include <exception>
#include <typeinfo>
namespace nvds {
std::string Demangle(const char* name);
class Exception : public std::exception {
public:
explicit Exception (const SourceLocation& loc)
: msg_(""), err_no_(0), loc_(loc) {}
Exception(const SourceLocation& loc, const std::string& msg)
: msg_(msg), err_no_(0), loc_(loc) {}
Exception(const SourceLocation& loc, int err_no)
: msg_(strerror(err_no)), err_no_(err_no), loc_(loc) {}
Exception(const SourceLocation& loc, const std::string& msg, int err_no)
: msg_(msg + ": " + strerror(err_no)), err_no_(err_no), loc_(loc) {}
virtual ~Exception() {}
std::string ToString() const {
auto ret = Demangle(typeid(*this).name()) + ": " +
msg_ + ", thrown at " + loc_.ToString();
if (err_no_ != 0) {
ret += " [errno: " + std::to_string(err_no_) + "]";
}
return ret;
}
const std::string& msg() const { return msg_; }
int err_no() const { return err_no_; }
const SourceLocation& loc() const { return loc_; }
private:
std::string msg_;
int err_no_;
SourceLocation loc_;
};
} // namespace nvds
#endif // _NVDS_EXCEPTION_H_
<file_sep>/test/test_consistent_hash.c
#include "test.h"
#include "consistent_hash.c"
#include <assert.h>
#include <limits.h>
#include <memory.h>
#include <stdlib.h>
#include <time.h>
clock_t begin;
clock_t delete_clk = 0;
static void __rbtree_print(rbtree_node_t* root) {
if (root == rbtree_nil)
return;
__rbtree_print(root->left);
printf("%d ", root->key);
__rbtree_print(root->right);
}
__attribute__((unused)) static void rbtree_print(rbtree_node_t* root) {
__rbtree_print(root);
printf("\n");
}
static void statistic(cluster_t* c, int request_num) {
cluster_node_t** nodes = malloc(sizeof(cluster_node_t*) * c->node_num);
memset(nodes, 0, sizeof(cluster_node_t*) * c->node_num);
int* requests = malloc(sizeof(int) * c->node_num);
memset(requests, 0, sizeof(int) * c->node_num);
int cnt = 0;
srand(time(NULL));
for (int i = 0; i < request_num; ++i) {
hash_t key = rand() % INT_MAX;
cluster_node_t* node = cluster_find(c, key);
assert(node);
int j;
for (j = 0; j < cnt; ++j) {
if (nodes[j] == node)
break;
}
++requests[j];
if (j == cnt) {
nodes[j] = node;
++cnt;
}
}
printf("total requests: %d\n", request_num);
printf("total nodes: %d\n", c->node_num);
printf("average expection: %f%%\n", 100.0 / c->node_num);
for (int i = 0; i < cnt; ++i) {
printf("%s: %d: %f%%\n", nodes[i]->name,
requests[i], 100.0 * requests[i] / request_num);
}
printf("\n");
fflush(stdout);
free(nodes);
free(requests);
}
static void test_cluster_remove(cluster_t* c, const char* name) {
clock_t begin = clock();
cluster_remove(c, name);
delete_clk += clock() - begin;
}
static void test(void) {
cluster_t c;
cluster_init(&c);
const char* node_names[] = {
"192.168.1.87",
"192.168.1.88",
"192.168.1.89",
"192.168.1.90"
};
int node_num = sizeof(node_names) / sizeof(const char*);
for (int i = 0; i < node_num; ++i) {
printf("add %s\n", node_names[i]);
fflush(stdout);
cluster_add(&c, node_names[i]);
}
const int request_num = 500000;
statistic(&c, request_num);
printf("add %s\n", "192.168.1.91");
fflush(stdout);
cluster_add(&c, "192.168.1.91");
statistic(&c, request_num);
printf("add %s\n", "192.168.1.92");
fflush(stdout);
cluster_add(&c, "192.168.1.92");
printf("add %s\n", "192.168.1.93");
fflush(stdout);
cluster_add(&c, "192.168.1.93");
statistic(&c, request_num);
printf("remove %s\n", "192.168.1.87");
fflush(stdout);
test_cluster_remove(&c, "192.168.1.87");
statistic(&c, request_num);
//rbtree_print(c.root);
printf("remove %s\n", "192.168.1.88");
fflush(stdout);
test_cluster_remove(&c, "192.168.1.88");
printf("remove %s\n", "192.168.1.89");
fflush(stdout);
test_cluster_remove(&c, "192.168.1.89");
printf("remove %s\n", "192.168.1.90");
fflush(stdout);
test_cluster_remove(&c, "192.168.1.90");
printf("remove %s\n", "192.168.1.91");
fflush(stdout);
test_cluster_remove(&c, "192.168.1.91");
statistic(&c, request_num);
cluster_destroy(&c);
}
int test_main(void) {
begin = clock();
test();
printf("total time: %fs\n", 1.0 * (clock() - begin) / CLOCKS_PER_SEC);
printf("delete time: %fs\n", 1.0 * delete_clk / CLOCKS_PER_SEC);
return 0;
}
<file_sep>/src/client_main.cc
#include "client.h"
#include <chrono>
#include <iostream>
#include <memory>
#include <random>
#include <thread>
using namespace nvds;
using VecStr = std::vector<std::string>;
static Client* client;
static void SigInt(int signo) {
std::cout << std::endl << "num_send: " << client->num_send() << std::endl;
exit(0);
}
static void Usage() {
std::cout << "usage: " << std::endl;
std::cout << "client <coordinator_addr> <item number per thread> <thread numer>" << std::endl;
}
static VecStr GenRandomStrings(size_t len, size_t n) {
VecStr ans;
std::default_random_engine g;
std::uniform_int_distribution<int> dist('a', 'z');
for (size_t i = 0; i < n; ++i) {
ans.emplace_back(len, 0);
for (size_t j = 0; j < len; ++j) {
ans[i][j] = dist(g);
}
}
return ans;
}
static void Work(double* qps, const std::string& coord_addr, size_t n) {
using namespace std::chrono;
try {
Client c {coord_addr};
client = &c;
auto keys = GenRandomStrings(16, n);
auto vals = VecStr(n, std::string(16, 'a'));
auto begin = high_resolution_clock::now();
for (size_t i = 0; i < keys.size(); ++i) {
c.Put(keys[i], vals[i]);
}
auto end = high_resolution_clock::now();
double t = duration_cast<duration<double>>(end - begin).count();
*qps = keys.size() / t;
} catch (boost::system::system_error& e) {
NVDS_ERR(e.what());
} catch (TransportException& e) {
NVDS_ERR(e.msg().c_str());
NVDS_LOG("initialize infiniband devices failed");
}
}
static int bench_main(int argc, const char* argv[]) {
if (argc < 4) {
Usage();
return -1;
}
const std::string coord_addr = argv[1];
const size_t num_items = std::stoi(argv[2]);
const size_t num_threads = std::stoi(argv[3]);
std::vector<double> qpss(num_threads);
std::vector<std::thread> workers;
for (size_t i = 0; i < num_threads; ++i) {
workers.emplace_back(std::bind(Work, &qpss[i], coord_addr, num_items));
}
for (size_t i = 0; i < num_threads; ++i) {
workers[i].join();
}
for (auto& qps : qpss) {
NVDS_LOG("QPS: %.2f", qps);
}
NVDS_LOG("total QPS: %.2f", std::accumulate(qpss.begin(), qpss.end(), 0.0));
return 0;
}
/*
static int function_test_main(int argc, const char* argv[]) {
assert(argc >= 2);
std::string coord_addr {argv[1]};
try {
Client c {coord_addr};
client = &c;
std::string author = "DDST-SJTU";
std::string version = "0.1.0";
c.Put("author", "DDST-SJTU");
c.Put("version", "0.1.0");
std::cout << "author: " << c.Get("author") << std::endl;
std::cout << "version: " << c.Get("version") << std::endl;
c.Put("author", "wgtdkp");
c.Put("version", "0.2.0");
std::cout << "author: " << c.Get("author") << std::endl;
std::cout << "version: " << c.Get("version") << std::endl;
c.Del("author");
c.Del("version");
std::cout << "author: " << c.Get("author") << std::endl;
std::cout << "version: " << c.Get("version") << std::endl;
} catch (boost::system::system_error& e) {
NVDS_ERR(e.what());
} catch (TransportException& e) {
NVDS_ERR(e.msg().c_str());
NVDS_LOG("initialize infiniband devices failed");
}
return 0;
}
*/
int main(int argc, const char* argv[]) {
signal(SIGINT, SigInt);
//return function_test_main(argc, argv);
return bench_main(argc, argv);
}
<file_sep>/src/coordinator.h
#ifndef _NVDS_COORDINATOR_H_
#define _NVDS_COORDINATOR_H_
#include "basic_server.h"
#include "index.h"
#include <boost/asio.hpp>
namespace nvds {
class Session;
/*
* Coordinator do not serve any frequent reuqests from client,
* like the get/put operation. It accepts various messages,
* controls server recovery/joining/removing and failure
* detection. It maintains the metadata of the whole cluster.
*
* At any given time, there is at most one active coordinator.
* But several standby coordinators are preparing to take in charge
* in case of the primary coordinator is down.
*/
class Coordinator : public BasicServer {
public:
Coordinator();
~Coordinator() {}
DISALLOW_COPY_AND_ASSIGN(Coordinator);
uint64_t total_storage() const { return total_storage_; }
IndexManager& index_manager() { return index_manager_; }
const IndexManager& index_manager() const { return index_manager_; }
void Run() override;
private:
void HandleRecvMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg);
void HandleSendMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg);
void HandleMessageFromServer(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg);
void HandleServerRequestJoin(std::shared_ptr<Session> session,
std::shared_ptr<Message> req);
void ResponseAllJoins();
void HandleMessageFromClient(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg);
void HandleClientRequestJoin(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg);
private:
uint32_t num_servers_ = 0;
uint64_t total_storage_ = 0;
IndexManager index_manager_;
std::vector<std::shared_ptr<Session>> sessions_;
};
} // namespace nvds
#endif // NVDS_COORDINATOR_H_
<file_sep>/cs/Makefile
CC=gcc -std=c99
CFLAGS=-Wall -g -D_GNU_SOURCE
LDFLAGS= -libverbs -lrdmacm
all: common
common: common.o
${CC} -o $@ $^ ${LDFLAGS}
ud: ud.o
${CC} -o $@ $^ ${LDFLAGS}
ud.o: ud.c
common.o: common.c common.h
clean:
rm -f common common.o
<file_sep>/src/common.cc
#include "common.h"
#include <boost/asio.hpp>
#include <cstdarg>
#include <cxxabi.h>
namespace nvds {
static std::string VFormat(const char* format, va_list ap);
/// A safe version of sprintf.
std::string Format(const char* format, ...) {
va_list ap;
va_start(ap, format);
const auto& s = VFormat(format, ap);
va_end(ap);
return s;
}
/// A safe version of vprintf.
static std::string VFormat(const char* format, va_list ap) {
std::string s;
// We're not really sure how big of a buffer will be necessary.
// Try 1K, if not the return value will tell us how much is necessary.
int bufSize = 1024;
while (true) {
char buf[bufSize];
// vsnprintf trashes the va_list, so copy it first
va_list aq;
__va_copy(aq, ap);
int r = vsnprintf(buf, bufSize, format, aq);
assert(r >= 0); // old glibc versions returned -1
if (r < bufSize) {
s = buf;
break;
}
bufSize = r + 1;
}
return s;
}
std::string Demangle(const char* name) {
int status;
char* res = abi::__cxa_demangle(name, NULL, NULL, &status);
if (status != 0) {
return name;
}
std::string ret(res);
free(res);
return ret;
}
} // namespace nvds
<file_sep>/src/Makefile
CXX = g++ -std=c++14 -O3
LD = g++
CXXINC = -I./ -I../json/src/
#CXXFLAGS = -g -fsanitize=address -Wall -D_XOPEN_SOURCE -D_GNU_SOURCE $(CXXINC)
CXXFLAGS = -g -Wall -D_XOPEN_SOURCE -D_GNU_SOURCE $(CXXINC)
LDFLAGS =
#LDLIBS = -lasan -libverbs -lboost_system -lboost_thread -lrt -lpthread -lssl -lcrypto
LDLIBS = -libverbs -lboost_system -lboost_thread -lrt -lpthread -lssl -lcrypto
SRCS = allocator.cc \
basic_server.cc \
client.cc \
common.cc \
coordinator.cc \
index.cc \
infiniband.cc \
message.cc \
server.cc \
session.cc \
tablet.cc \
MurmurHash2.cc
OBJS_DIR = build/
OBJS = $(addprefix $(OBJS_DIR), $(SRCS:.cc=.o))
GTEST_DIR = ../googletest/googletest
GTEST_FLAGS = -isystem ${GTEST_DIR}/include -pthread
$(OBJS_DIR)libgtest.a:
$(CXX) $(GTEST_FLAGS) -I${GTEST_DIR} -c ${GTEST_DIR}/src/gtest-all.cc -o $(OBJS_DIR)gtest-all.o
ar -rv $@ $(OBJS_DIR)gtest-all.o
default: all
all:
@mkdir -p $(OBJS_DIR)
@make coordinator server client library
coordinator: $(OBJS) $(OBJS_DIR)coordinator_main.o
$(LD) $(LDFLAGS) -o $(OBJS_DIR)$@ $^ $(LDLIBS)
server: $(OBJS) $(OBJS_DIR)server_main.o
$(LD) $(LDFLAGS) -o $(OBJS_DIR)$@ $^ $(LDLIBS)
client: $(OBJS) $(OBJS_DIR)client_main.o
$(LD) $(LDFLAGS) -o $(OBJS_DIR)$@ $^ $(LDLIBS)
library: $(OBJS)
ar rcs $(OBJS_DIR)libnvds.a $(OBJS)
$(OBJS_DIR)%.o: %.cc
$(CXX) $(CXXFLAGS) -o $@ -c $^
$(OBJS_DIR)coordinator_main.o: coordinator_main.cc
$(CXX) $(CXXFLAGS) -o $@ -c $^
$(OBJS_DIR)server_main.o : server_main.cc
$(CXX) $(CXXFLAGS) -o $@ -c $^
$(OBJS_DIR)client_main.o : client_main.cc
$(CXX) $(CXXFLAGS) -o $@ -c $^
$(OBJS_DIR)test_infiniband.o: test_infiniband.cc infiniband.h
$(CXX) $(CXXFLAGS) $(GTEST_FLAGS) -o $@ -c test_infiniband.cc
test_infiniband: $(OBJS_DIR)test_infiniband.o $(OBJS_DIR)infiniband.o $(OBJS_DIR)common.o $(OBJS_DIR)libgtest.a
$(LD) $(LDFLAGS) $(GTEST_FLAGS) -o $(OBJS_DIR)$@ $^ $(LDLIBS)
$(OBJS_DIR)test_allocator.o: test_allocator.cc allocator.h
$(CXX) $(CXXFLAGS) $(GTEST_FLAGS) -o $@ -c test_allocator.cc
test_allocator: $(OBJS_DIR)test_allocator.o $(OBJS_DIR)allocator.o $(OBJS_DIR)libgtest.a
$(LD) $(LDFLAGS) $(GTEST_FLAGS) -o $(OBJS_DIR)$@ $^ $(LDLIBS)
$(OBJS_DIR)test_stl_map.o: test_stl_map.cc hash.h
$(CXX) $(CXXFLAGS) $(GTEST_FLAGS) -o $@ -c test_stl_map.cc
test_stl_map: $(OBJS_DIR)test_stl_map.o $(OBJS_DIR)MurmurHash2.o $(OBJS_DIR)libgtest.a
$(LD) $(LDFLAGS) $(GTEST_FLAGS) -o $(OBJS_DIR)$@ $^ $(LDLIBS)
$(OBJS_DIR)test_message.o: test_message.cc message.h
$(CXX) $(CXXFLAGS) $(GTEST_FLAGS) -o $@ -c test_message.cc
test_message: $(OBJS_DIR)test_message.o $(OBJS_DIR)message.o $(OBJS_DIR)libgtest.a
$(LD) $(LDFLAGS) $(GTEST_FLAGS) -o $(OBJS_DIR)$@ $^ $(LDLIBS)
install:
@sudo mkdir -p /usr/include/nvds/
@sudo cp ./*.h /usr/include/nvds/
@sudo cp ../json/src/json.hpp /usr/include/nvds/
@sudo cp ./build/libnvds.a /usr/lib/
.PHONY: clean
clean-objs:
@rm -f $(OBJS_DIR)*.o
clean:
@rm -rf $(OBJS_DIR)
<file_sep>/src/transport.h
#ifndef _NVDS_TRANSPORT_H_
#define _NVDS_TRANSPORT_H_
#include "exception.h"
namespace nvds {
class Message;
struct TransportException : public Exception {
explicit TransportException(const SourceLocation& loc)
: Exception(loc) {}
TransportException(const SourceLocation& loc, const std::string& msg)
: Exception(loc, msg) {}
TransportException(const SourceLocation& loc, int err_no)
: Exception(loc, err_no) {}
TransportException(const SourceLocation& loc,
const std::string& msg, int err_no)
: Exception(loc, msg, err_no) {}
};
class Transport {
public:
static const uint32_t kMaxRPCLen = (1 << 23) + 200;
class ServerRPC {
};
};
} // namespace nvds
#endif // _NVDS_TRANSPORT_H_
<file_sep>/src/request.h
#ifndef _NVDS_REQUEST_H_
#define _NVDS_REQUEST_H_
#include "common.h"
#include "infiniband.h"
namespace nvds {
struct Request {
enum class Type : uint8_t {
PUT, ADD, GET, DEL
};
Type type;
uint16_t key_len;
uint16_t val_len;
KeyHash key_hash;
// TODO(wgtdkp):
//uint64_t id;
// Key data followed by value data
char data[0];
static Request* New(Infiniband::Buffer* b, Type type,
const char* key, size_t key_len,
const char* val, size_t val_len, KeyHash key_hash) {
return new (b->buf) Request(type, key, key_len, val, val_len, key_hash);
}
static void Del(const Request* r) {
// Explicitly call destructor(only when pairing with placement new)
r->~Request();
}
size_t Len() const { return sizeof(Request) + key_len + val_len; }
char* Key() { return data; }
const char* Key() const { return data; }
char* Val() { return data + key_len; }
const char* Val() const { return data + key_len; }
void Print() const {
std::cout << "type: " << (type == Type::PUT ? "PUT" : type == Type::GET ? "GET" : "DEL") << std::endl;
std::cout << "key_len: " << key_len << std::endl;
std::cout << "val_len: " << val_len << std::endl;
std::cout << "key_hash: " << key_hash << std::endl;
std::cout << "key: ";
for (size_t i = 0; i < key_len; ++i)
std::cout << data[i];
std::cout << std::endl;
std::cout << "val: ";
for (size_t i = key_len; i < key_len + val_len; ++i)
std::cout << data[i];
std::cout << std::endl;
}
private:
Request(Type type, const char* key, size_t key_len,
const char* val, size_t val_len, KeyHash key_hash)
: type(type), key_len(key_len), val_len(val_len), key_hash(key_hash) {
memcpy(data, key, key_len);
if (val != nullptr && val_len > 0) {
memcpy(data + key_len, val, val_len);
}
}
};
} // namespace nvds
#endif // _NVDS_SERVER_H_
<file_sep>/src/message.cc
#include "message.h"
#include "index.h"
namespace nvds {
void TabletInfo::Print() const {
std::cout << "id: " << id << std::endl;
std::cout << "server id: " << server_id << std::endl;
std::cout << "is backup: " << is_backup << std::endl;
if (is_backup) {
std::cout << "master id: " << master << std::endl;
qpis[0].Print();
} else {
std::cout << "backups id: ";
for (size_t i = 0; i < backups.size(); ++i) {
std::cout << backups[i] << (i + 1 == backups.size() ? "\n" : ", ");
}
for (auto& qpi : qpis) {
qpi.Print();
}
}
}
void to_json(nlohmann::json& j, const Infiniband::Address& ia) {
j = {
{"ib_port", ia.ib_port},
{"lid", ia.lid},
{"qpn", ia.qpn}
};
}
void from_json(const nlohmann::json& j, Infiniband::Address& ia) {
ia = {
j["ib_port"],
j["lid"],
j["qpn"],
};
}
void to_json(nlohmann::json& j, const Infiniband::QueuePairInfo& qpi) {
j["lid"] = qpi.lid;
j["qpn"] = qpi.qpn;
j["psn"] = qpi.psn;
j["rkey"] = qpi.rkey;
j["vaddr"] = qpi.vaddr;
}
void from_json(const nlohmann::json& j, Infiniband::QueuePairInfo& qpi) {
qpi.lid = j["lid"];
qpi.qpn = j["qpn"];
qpi.psn = j["psn"];
qpi.rkey = j["rkey"];
qpi.vaddr = j["vaddr"];
}
void to_json(nlohmann::json& j, const TabletInfo& ti) {
j["id"] = ti.id;
j["server_id"] = ti.server_id;
j["is_backup"] = ti.is_backup;
j["qpis"] = ti.qpis;
if (ti.is_backup) {
j["master"] = ti.master;
} else {
j["backups"] = ti.backups;
}
}
void from_json(const nlohmann::json& j, TabletInfo& ti) {
ti.id = j["id"];
ti.server_id = j["server_id"];
ti.is_backup = j["is_backup"];
ti.qpis = j["qpis"];
if (ti.is_backup) {
ti.master = j["master"];
} else {
ti.backups = j["backups"];
}
}
void to_json(nlohmann::json& j, const ServerInfo& si) {
j = {
{"id", si.id},
{"active", si.active},
{"addr", si.addr},
{"ib_addr", si.ib_addr},
{"tablets", si.tablets}
};
}
void from_json(const nlohmann::json& j, ServerInfo& si) {
si = {
j["id"],
j["active"],
j["addr"],
j["ib_addr"],
j["tablets"]
};
}
void to_json(nlohmann::json& j, const IndexManager& im) {
j = {
{"key_tablet_map", im.key_tablet_map_},
{"tablets", im.tablets_},
{"servers", im.servers_}
};
}
void from_json(const nlohmann::json& j, IndexManager& im) {
im.key_tablet_map_ = j["key_tablet_map"];
im.tablets_ = j["tablets"];
im.servers_ = j["servers"];
}
} // namespace nvds
<file_sep>/src/test_message.cc
#include "json.hpp"
#include "message.h"
#include <gtest/gtest.h>
using namespace std;
using namespace nvds;
using namespace nlohmann;
TEST (MessageTest, Backup) {
string r = R"([123, 456])";
Backup b = json::parse(r);
EXPECT_EQ(b.server_id, 123);
EXPECT_EQ(b.tablet_id, 456);
json j = Backup {111, 222};
EXPECT_EQ(j[0], 111);
EXPECT_EQ(j[1], 222);
}
TEST (MessageTest, TabletInfo) {
string r = R"({
"id": 3,
"is_backup": true,
"master": [123, 456]
})";
TabletInfo t = json::parse(r);
EXPECT_EQ(t.id, 3);
EXPECT_EQ(t.is_backup, true);
EXPECT_EQ(t.master.server_id, 123);
EXPECT_EQ(t.master.tablet_id, 456);
t = TabletInfo { 3, true};
t.master = {123, 456};
json j = t;
EXPECT_EQ(j["id"], 3);
EXPECT_EQ(j["is_backup"], true);
EXPECT_EQ(j["master"][0], 123);
EXPECT_EQ(j["master"][1], 456);
r = R"({
"id": 3,
"is_backup": false,
"backups": [[123, 456], [789, 101112]]
})";
t = json::parse(r);
EXPECT_EQ(t.id, 3);
EXPECT_EQ(t.is_backup, false);
EXPECT_EQ(t.backups[0].server_id, 123);
EXPECT_EQ(t.backups[0].tablet_id, 456);
EXPECT_EQ(t.backups[1].server_id, 789);
EXPECT_EQ(t.backups[1].tablet_id, 101112);
//j = TabletInfo { 3, false};
t = {3, false};
t.backups[0] = {123, 456};
t.backups[1] = {789, 101112};
j = t;
EXPECT_EQ(j["id"], 3);
EXPECT_EQ(j["is_backup"], false);
EXPECT_EQ(j["backups"][0][0], 123);
EXPECT_EQ(j["backups"][0][1], 456);
EXPECT_EQ(j["backups"][1][0], 789);
EXPECT_EQ(j["backups"][1][1], 101112);
}
TEST (MessageTest, InfinibandAddress) {
string r = R"({
"ib_port": 1,
"lid": 2,
"qpn": 3
})";
Infiniband::Address addr = json::parse(r);
EXPECT_EQ(addr.ib_port, 1);
EXPECT_EQ(addr.lid, 2);
EXPECT_EQ(addr.qpn, 3);
json j = Infiniband::Address {1, 2, 3};
EXPECT_EQ(j["ib_port"], 1);
EXPECT_EQ(j["lid"], 2);
EXPECT_EQ(j["qpn"], 3);
}
TEST (MessageTest, ServerInfo) {
string r = R"({
"id": 1,
"active": true,
"addr": "192.168.1.12",
"ib_addr": {
"ib_port": 11,
"lid": 12,
"qpn": 13
},
"tablets": [
{
"id": 0,
"is_backup": true,
"master": [11, 13]
}
]
})";
ServerInfo s = json::parse(r);
EXPECT_EQ(s.id, 1);
EXPECT_EQ(s.active, true);
EXPECT_EQ(s.addr, "192.168.1.12");
EXPECT_EQ(s.ib_addr, Infiniband::Address({11, 12, 13}));
EXPECT_EQ(s.tablets[0].id, 0);
EXPECT_EQ(s.tablets[0].is_backup, true);
EXPECT_EQ(s.tablets[0].master, Backup({11, 13}));
json j = s;
EXPECT_EQ(j["id"], 1);
EXPECT_EQ(j["active"], true);
EXPECT_EQ(j["addr"], "192.168.1.12");
EXPECT_EQ(j["ib_addr"], Infiniband::Address({11, 12, 13}));
EXPECT_EQ(j["tablets"][0]["id"], 0);
EXPECT_EQ(j["tablets"][0]["is_backup"], true);
EXPECT_EQ(j["tablets"][0]["master"][0], 11);
EXPECT_EQ(j["tablets"][0]["master"][1], 13);
}
int main(int argc, char* argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
return 0;
}
<file_sep>/test/test_print.c
#include <stdio.h>
#include <inttypes.h>
int main (void) {
uint32_t a=1234;
uint16_t b=5678;
printf("%" PRIu32 "\n",a);
printf("%" PRIu16 "\n",b);
return 0;
}
<file_sep>/src/index.h
/*
* Mapping items to servers
*/
#ifndef _NVDS_INDEX_H_
#define _NVDS_INDEX_H_
#include "common.h"
#include "hash.h"
#include "infiniband.h"
#include "json.hpp"
#include "message.h"
#include <map>
#include <unordered_map>
namespace nvds {
class IndexManager {
friend void to_json(nlohmann::json& j, const IndexManager& im);
friend void from_json(const nlohmann::json& j, IndexManager& im);
public:
IndexManager() {}
~IndexManager() {}
const ServerInfo& AddServer(const std::string& addr,
nlohmann::json& msg_body);
const ServerInfo& GetServer(KeyHash key_hash) const {
auto id = GetServerId(key_hash);
const auto& ans = GetServer(id);
assert(ans.id == id);
return ans;
}
const ServerInfo& GetServer(ServerId id) const {
return servers_[id];
}
ServerId GetServerId(KeyHash key_hash) const {
return GetTablet(key_hash).server_id;
}
// Get the id of the tablet that this key hash locates in.
TabletId GetTabletId(KeyHash key_hash) const {
uint32_t idx = key_hash / ((1.0 + kMaxKeyHash) / key_tablet_map_.size());
return key_tablet_map_[idx];
}
const TabletInfo& GetTablet(KeyHash key_hash) const {
return tablets_[GetTabletId(key_hash)];
}
//TabletInfo& GetTablet(KeyHash key_hash) {
// return tablets_[GetTabletId(key_hash)];
//}
const TabletInfo& GetTablet(TabletId id) const {
return tablets_[id];
}
// DEBUG
void PrintTablets() const;
private:
static ServerId AllocServerId() {
static ServerId id = 0;
return id++;
}
// `r_idx`: replication index; `s_idx`: server index; `t_idx`: tablet index;
static uint32_t CalcTabletId(uint32_t s_idx, uint32_t r_idx, uint32_t t_idx) {
return s_idx * kNumTabletAndBackupsPerServer + r_idx * kNumTabletsPerServer + t_idx;
}
std::array<TabletId, kNumTablets> key_tablet_map_;
std::array<TabletInfo, kNumTabletAndBackups> tablets_;
std::array<ServerInfo, kNumServers> servers_;
};
} // namespace nvds
#endif // _NVDS_INDEX_H_
<file_sep>/src/source_location.h
#ifndef _NVDS_SOURCE_LOCATION_H_
#define _NVDS_SOURCE_LOCATION_H_
#include <cstdint>
#include <string>
namespace nvds {
#define HERE (SourceLocation {__FILE__, __func__, __LINE__})
struct SourceLocation {
std::string file;
std::string func;
uint32_t line;
std::string ToString() const {
return file + ":" + func + ":" + std::to_string(line);
}
};
} // namespace nvds
#endif // _NVDS_SOURCE_LOCATION_H_<file_sep>/src/spinlock.h
#ifndef _NVDS_SPINLOCK_H_
#define _NVDS_SPINLOCK_H_
#include "common.h"
#include <atomic>
namespace nvds {
class Spinlock {
public:
Spinlock() = default;
DISALLOW_COPY_AND_ASSIGN(Spinlock);
void lock() {
while (flag_.test_and_set(std::memory_order_acquire)) {}
}
void unlock() {
flag_.clear(std::memory_order_release);
}
private:
std::atomic_flag flag_ = ATOMIC_FLAG_INIT;
};
} // namespace nvds
#endif // _NVDS_SPINLOCK_H_
<file_sep>/cs/udpp.c
/*
* Copyright (c) 2005 Topspin Communications. All rights reserved.
* Copyright (c) 2008 Intel Corporation. All rights reserved.
*
* This software is available to you under the OpenIB.org BSD license
* below:
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AWV
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#include <assert.h>
#include <ctype.h>
#include <errno.h>
#include <malloc.h>
#include <memory.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
#include <arpa/inet.h>
#include <infiniband/verbs.h>
#include <sys/socket.h>
#include <sys/time.h>
// Interprocess communication access
#include <sys/ipc.h>
// Shared memory facility
#include <sys/shm.h>
enum {
PINGPONG_RECV_WRID = 1,
PINGPONG_SEND_WRID = 2,
};
struct pingpong_context {
struct ibv_context *context;
struct ibv_comp_channel *channel;
struct ibv_pd *pd;
struct ibv_mr *mr;
struct ibv_cq *cq;
struct ibv_qp *qp;
struct ibv_ah *ah;
void *buf;
int size;
int rx_depth;
int pending;
};
struct pingpong_dest {
int lid;
int qpn;
int psn;
};
static int pp_connect_ctx(struct pingpong_context *ctx, int port, int my_psn,
int sl, struct pingpong_dest *dest)
{
struct ibv_ah_attr ah_attr;
struct ibv_qp_attr attr;
memset(&ah_attr, 0, sizeof ah_attr);
ah_attr.is_global = 0;
ah_attr.dlid = (uint16_t) dest->lid;
ah_attr.sl = (uint8_t) sl;
ah_attr.src_path_bits = 0;
ah_attr.port_num = (uint8_t) port;
attr.qp_state = IBV_QPS_RTR;
if (ibv_modify_qp(ctx->qp, &attr, IBV_QP_STATE)) {
fprintf(stderr, "Failed to modify QP to RTR\n");
return 1;
}
attr.qp_state = IBV_QPS_RTS;
attr.sq_psn = my_psn;
if (ibv_modify_qp(ctx->qp, &attr,
IBV_QP_STATE | IBV_QP_SQ_PSN)) {
fprintf(stderr, "Failed to modify QP to RTS\n");
return 1;
}
ctx->ah = ibv_create_ah(ctx->pd, &ah_attr);
if (!ctx->ah) {
fprintf(stderr, "Failed to create AH\n");
return 1;
}
return 0;
}
static struct pingpong_dest *pp_client_exch_dest(const char *servername, int port,
const struct pingpong_dest *my_dest)
{
struct addrinfo *res, *t;
struct addrinfo hints;
char service[6];
char msg[sizeof "0000:000000:000000"];
int n;
SOCKET sockfd = INVALID_SOCKET;
struct pingpong_dest *rem_dest = NULL;
memset(&hints, 0, sizeof hints);
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_STREAM;
sprintf(service, "%d\0", port);
n = getaddrinfo(servername, service, &hints, &res);
if (n != 0) {
fprintf(stderr, "%s for %s:%d\n", gai_strerror(n), servername, port);
return NULL;
}
for (t = res; t; t = t->ai_next) {
sockfd = socket(t->ai_family, t->ai_socktype, t->ai_protocol);
if (sockfd != INVALID_SOCKET) {
if (!connect(sockfd, t->ai_addr, t->ai_addrlen))
break;
closesocket(sockfd);
sockfd = INVALID_SOCKET;
}
}
freeaddrinfo(res);
if (sockfd == INVALID_SOCKET) {
fprintf(stderr, "Couldn't connect to %s:%d\n", servername, port);
return NULL;
}
sprintf(msg, "%04x:%06x:%06x", my_dest->lid, my_dest->qpn, my_dest->psn);
if (send(sockfd, msg, sizeof msg, 0) != sizeof msg) {
fprintf(stderr, "Couldn't send local address\n");
goto out;
}
if (recv(sockfd, msg, sizeof msg, 0) != sizeof msg) {
perror("client recv");
fprintf(stderr, "Couldn't recv remote address\n");
goto out;
}
send(sockfd, "done", sizeof "done", 0);
rem_dest = malloc(sizeof *rem_dest);
if (!rem_dest)
goto out;
sscanf(msg, "%x:%x:%x", &rem_dest->lid, &rem_dest->qpn, &rem_dest->psn);
out:
closesocket(sockfd);
return rem_dest;
}
static struct pingpong_dest *pp_server_exch_dest(struct pingpong_context *ctx,
int ib_port, int port, int sl,
const struct pingpong_dest *my_dest)
{
struct addrinfo *res, *t;
struct addrinfo hints;
char service[6];
char msg[sizeof "0000:000000:000000"];
int n;
SOCKET sockfd = INVALID_SOCKET, connfd;
struct pingpong_dest *rem_dest = NULL;
memset(&hints, 0, sizeof hints);
hints.ai_flags = AI_PASSIVE;
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_STREAM;
sprintf(service, "%d\0", port);
n = getaddrinfo(NULL, service, &hints, &res);
if (n != 0) {
fprintf(stderr, "%s for port %d\n", gai_strerror(n), port);
return NULL;
}
for (t = res; t; t = t->ai_next) {
sockfd = socket(t->ai_family, t->ai_socktype, t->ai_protocol);
if (sockfd != INVALID_SOCKET) {
n = 0;
setsockopt(sockfd, IPPROTO_IPV6, IPV6_V6ONLY, (char *) &n, sizeof n);
n = 1;
setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, (char *) &n, sizeof n);
if (!bind(sockfd, t->ai_addr, t->ai_addrlen))
break;
closesocket(sockfd);
sockfd = INVALID_SOCKET;
}
}
freeaddrinfo(res);
if (sockfd == INVALID_SOCKET) {
fprintf(stderr, "Couldn't listen to port %d\n", port);
return NULL;
}
listen(sockfd, 1);
connfd = accept(sockfd, NULL, 0);
closesocket(sockfd);
if (connfd == INVALID_SOCKET) {
fprintf(stderr, "accept() failed\n");
return NULL;
}
n = recv(connfd, msg, sizeof msg, 0);
if (n != sizeof msg) {
perror("server recv");
fprintf(stderr, "%d/%d: Couldn't recv remote address\n", n, (int) sizeof msg);
goto out;
}
rem_dest = malloc(sizeof *rem_dest);
if (!rem_dest)
goto out;
sscanf(msg, "%x:%x:%x", &rem_dest->lid, &rem_dest->qpn, &rem_dest->psn);
if (pp_connect_ctx(ctx, ib_port, my_dest->psn, sl, rem_dest)) {
fprintf(stderr, "Couldn't connect to remote QP\n");
free(rem_dest);
rem_dest = NULL;
goto out;
}
sprintf(msg, "%04x:%06x:%06x", my_dest->lid, my_dest->qpn, my_dest->psn);
if (send(connfd, msg, sizeof msg, 0) != sizeof msg) {
fprintf(stderr, "Couldn't send local address\n");
free(rem_dest);
rem_dest = NULL;
goto out;
}
recv(connfd, msg, sizeof msg, 0);
out:
closesocket(connfd);
return rem_dest;
}
static struct pingpong_context *pp_init_ctx(struct ibv_device *ib_dev, int size,
int rx_depth, int port,
int use_event)
{
struct pingpong_context *ctx;
ctx = malloc(sizeof *ctx);
if (!ctx)
return NULL;
ctx->size = size;
ctx->rx_depth = rx_depth;
ctx->buf = malloc(size + 40);
if (!ctx->buf) {
fprintf(stderr, "Couldn't allocate work buf.\n");
return NULL;
}
memset(ctx->buf, 0, size + 40);
ctx->context = ibv_open_device(ib_dev);
if (!ctx->context) {
fprintf(stderr, "Couldn't get context for %s\n",
ibv_get_device_name(ib_dev));
return NULL;
}
if (use_event) {
ctx->channel = ibv_create_comp_channel(ctx->context);
if (!ctx->channel) {
fprintf(stderr, "Couldn't create completion channel\n");
return NULL;
}
} else
ctx->channel = NULL;
ctx->pd = ibv_alloc_pd(ctx->context);
if (!ctx->pd) {
fprintf(stderr, "Couldn't allocate PD\n");
return NULL;
}
ctx->mr = ibv_reg_mr(ctx->pd, ctx->buf, size + 40, IBV_ACCESS_LOCAL_WRITE);
if (!ctx->mr) {
fprintf(stderr, "Couldn't register MR\n");
return NULL;
}
ctx->cq = ibv_create_cq(ctx->context, rx_depth + 1, NULL,
ctx->channel, 0);
if (!ctx->cq) {
fprintf(stderr, "Couldn't create CQ\n");
return NULL;
}
{
struct ibv_qp_init_attr attr;
memset(&attr, 0, sizeof attr);
attr.send_cq = ctx->cq;
attr.recv_cq = ctx->cq;
attr.cap.max_send_wr = 1;
attr.cap.max_recv_wr = rx_depth;
attr.cap.max_send_sge = 1;
attr.cap.max_recv_sge = 1;
attr.qp_type = IBV_QPT_UD,
ctx->qp = ibv_create_qp(ctx->pd, &attr);
if (!ctx->qp) {
fprintf(stderr, "Couldn't create QP\n");
return NULL;
}
}
{
struct ibv_qp_attr attr;
memset(&attr, 0, sizeof attr);
attr.qp_state = IBV_QPS_INIT;
attr.pkey_index = 0;
attr.port_num = (uint8_t) port;
attr.qkey = 0x11111111;
if (ibv_modify_qp(ctx->qp, &attr,
IBV_QP_STATE |
IBV_QP_PKEY_INDEX |
IBV_QP_PORT |
IBV_QP_QKEY)) {
fprintf(stderr, "Failed to modify QP to INIT\n");
return NULL;
}
}
return ctx;
}
int pp_close_ctx(struct pingpong_context *ctx)
{
if (ibv_destroy_qp(ctx->qp)) {
fprintf(stderr, "Couldn't destroy QP\n");
return 1;
}
if (ibv_destroy_cq(ctx->cq)) {
fprintf(stderr, "Couldn't destroy CQ\n");
return 1;
}
if (ibv_dereg_mr(ctx->mr)) {
fprintf(stderr, "Couldn't deregister MR\n");
return 1;
}
if (ibv_destroy_ah(ctx->ah)) {
fprintf(stderr, "Couldn't destroy AH\n");
return 1;
}
if (ibv_dealloc_pd(ctx->pd)) {
fprintf(stderr, "Couldn't deallocate PD\n");
return 1;
}
if (ctx->channel) {
if (ibv_destroy_comp_channel(ctx->channel)) {
fprintf(stderr, "Couldn't destroy completion channel\n");
return 1;
}
}
if (ibv_close_device(ctx->context)) {
fprintf(stderr, "Couldn't release context\n");
return 1;
}
free(ctx->buf);
free(ctx);
return 0;
}
static int pp_post_recv(struct pingpong_context *ctx, int n)
{
struct ibv_sge list;
struct ibv_recv_wr wr;
struct ibv_recv_wr *bad_wr;
int i;
list.addr = (uintptr_t) ctx->buf;
list.length = ctx->size + 40;
list.lkey = ctx->mr->lkey;
wr.next = NULL;
wr.wr_id = PINGPONG_RECV_WRID;
wr.sg_list = &list;
wr.num_sge = 1;
for (i = 0; i < n; ++i)
if (ibv_post_recv(ctx->qp, &wr, &bad_wr))
break;
return i;
}
static int pp_post_send(struct pingpong_context *ctx, uint32_t qpn)
{
struct ibv_sge list;
struct ibv_send_wr wr;
struct ibv_send_wr *bad_wr;
list.addr = (uintptr_t) ctx->buf + 40;
list.length = ctx->size;
list.lkey = ctx->mr->lkey;
wr.next = NULL;
wr.wr_id = PINGPONG_SEND_WRID;
wr.sg_list = &list;
wr.num_sge = 1;
wr.opcode = IBV_WR_SEND;
wr.send_flags = IBV_SEND_SIGNALED;
wr.wr.ud.ah = ctx->ah;
wr.wr.ud.remote_qpn = qpn;
wr.wr.ud.remote_qkey = 0x11111111;
return ibv_post_send(ctx->qp, &wr, &bad_wr);
}
static void usage(const char *argv0)
{
printf("Usage:\n");
printf(" %s start a server and wait for connection\n", argv0);
printf(" %s -h <host> connect to server at <host>\n", argv0);
printf("\n");
printf("Options:\n");
printf(" -p <port> listen on/connect to port <port> (default 18515)\n");
printf(" -d <dev> use IB device <dev> (default first device found)\n");
printf(" -i <port> use port <port> of IB device (default 1)\n");
printf(" -s <size> size of message to exchange (default 2048)\n");
printf(" -r <dep> number of receives to post at a time (default 500)\n");
printf(" -n <iters> number of exchanges (default 1000)\n");
printf(" -e sleep on CQ events (default poll)\n");
}
int __cdecl main(int argc, char *argv[])
{
struct ibv_device **dev_list;
struct ibv_device *ib_dev;
struct pingpong_context *ctx;
struct pingpong_dest my_dest;
struct pingpong_dest *rem_dest;
LARGE_INTEGER start, end, freq;
char *ib_devname = NULL;
char *servername = NULL;
int port = 18515;
int ib_port = 1;
int size = 2048;
int rx_depth = 500;
int iters = 1000;
int use_event = 0;
int routs;
int rcnt, scnt;
int num_cq_events = 0;
int sl = 0;
WORD version;
WSADATA data;
int err;
srand((unsigned int) time(NULL));
version = MAKEWORD(2, 2);
err = WSAStartup(version, &data);
if (err)
return -1;
while (1) {
int c;
c = getopt(argc, argv, "h:p:d:i:s:r:n:l:e");
if (c == -1)
break;
switch (c) {
case 'p':
port = strtol(optarg, NULL, 0);
if (port < 0 || port > 65535) {
usage(argv[0]);
return 1;
}
break;
case 'd':
ib_devname = _strdup(optarg);
break;
case 'i':
ib_port = strtol(optarg, NULL, 0);
if (ib_port < 0) {
usage(argv[0]);
return 1;
}
break;
case 's':
size = strtol(optarg, NULL, 0);
break;
case 'r':
rx_depth = strtol(optarg, NULL, 0);
break;
case 'n':
iters = strtol(optarg, NULL, 0);
break;
case 'l':
sl = strtol(optarg, NULL, 0);
break;
case 'e':
++use_event;
break;
case 'h':
servername = _strdup(optarg);
break;
default:
usage(argv[0]);
return 1;
}
}
dev_list = ibv_get_device_list(NULL);
if (!dev_list) {
fprintf(stderr, "No IB devices found\n");
return 1;
}
if (!ib_devname) {
ib_dev = *dev_list;
if (!ib_dev) {
fprintf(stderr, "No IB devices found\n");
return 1;
}
} else {
int i;
for (i = 0; dev_list[i]; ++i)
if (!strcmp(ibv_get_device_name(dev_list[i]), ib_devname))
break;
ib_dev = dev_list[i];
if (!ib_dev) {
fprintf(stderr, "IB device %s not found\n", ib_devname);
return 1;
}
}
ctx = pp_init_ctx(ib_dev, size, rx_depth, ib_port, use_event);
if (!ctx)
return 1;
routs = pp_post_recv(ctx, ctx->rx_depth);
if (routs < ctx->rx_depth) {
fprintf(stderr, "Couldn't post receive (%d)\n", routs);
return 1;
}
if (use_event)
if (ibv_req_notify_cq(ctx->cq, 0)) {
fprintf(stderr, "Couldn't request CQ notification\n");
return 1;
}
my_dest.lid = pp_get_local_lid(ctx->context, ib_port);
my_dest.qpn = ctx->qp->qp_num;
my_dest.psn = rand() & 0xffffff;
if (!my_dest.lid) {
fprintf(stderr, "Couldn't get local LID\n");
return 1;
}
printf(" local address: LID 0x%04x, QPN 0x%06x, PSN 0x%06x\n",
my_dest.lid, my_dest.qpn, my_dest.psn);
if (servername)
rem_dest = pp_client_exch_dest(servername, port, &my_dest);
else
rem_dest = pp_server_exch_dest(ctx, ib_port, port, sl, &my_dest);
if (!rem_dest)
return 1;
printf(" remote address: LID 0x%04x, QPN 0x%06x, PSN 0x%06x\n",
rem_dest->lid, rem_dest->qpn, rem_dest->psn);
if (servername)
if (pp_connect_ctx(ctx, ib_port, my_dest.psn, sl, rem_dest))
return 1;
ctx->pending = PINGPONG_RECV_WRID;
if (servername) {
if (pp_post_send(ctx, rem_dest->qpn)) {
fprintf(stderr, "Couldn't post send\n");
return 1;
}
ctx->pending |= PINGPONG_SEND_WRID;
}
if (!QueryPerformanceCounter(&start)) {
perror("QueryPerformanceCounter");
return 1;
}
rcnt = scnt = 0;
while (rcnt < iters || scnt < iters) {
if (use_event) {
struct ibv_cq *ev_cq;
void *ev_ctx;
if (ibv_get_cq_event(ctx->channel, &ev_cq, &ev_ctx)) {
fprintf(stderr, "Failed to get cq_event\n");
return 1;
}
++num_cq_events;
if (ev_cq != ctx->cq) {
fprintf(stderr, "CQ event for unknown CQ %p\n", ev_cq);
return 1;
}
if (ibv_req_notify_cq(ctx->cq, 0)) {
fprintf(stderr, "Couldn't request CQ notification\n");
return 1;
}
}
{
struct ibv_wc wc[2];
int ne, i;
do {
ne = ibv_poll_cq(ctx->cq, 2, wc);
if (ne < 0) {
fprintf(stderr, "poll CQ failed %d\n", ne);
return 1;
}
} while (!use_event && ne < 1);
for (i = 0; i < ne; ++i) {
if (wc[i].status != IBV_WC_SUCCESS) {
fprintf(stderr, "Failed status %s (%d) for wr_id %d\n",
ibv_wc_status_str(wc[i].status),
wc[i].status, (int) wc[i].wr_id);
return 1;
}
switch ((int) wc[i].wr_id) {
case PINGPONG_SEND_WRID:
++scnt;
break;
case PINGPONG_RECV_WRID:
if (--routs <= 1) {
routs += pp_post_recv(ctx, ctx->rx_depth - routs);
if (routs < ctx->rx_depth) {
fprintf(stderr,
"Couldn't post receive (%d)\n",
routs);
return 1;
}
}
++rcnt;
break;
default:
fprintf(stderr, "Completion for unknown wr_id %d\n",
(int) wc[i].wr_id);
return 1;
}
ctx->pending &= ~(int) wc[i].wr_id;
if (scnt < iters && !ctx->pending) {
if (pp_post_send(ctx, rem_dest->qpn)) {
fprintf(stderr, "Couldn't post send\n");
return 1;
}
ctx->pending = PINGPONG_RECV_WRID |
PINGPONG_SEND_WRID;
}
}
}
}
if (!QueryPerformanceCounter(&end) ||
!QueryPerformanceFrequency(&freq)) {
perror("QueryPerformanceCounter/Frequency");
return 1;
}
{
double sec = (double) (end.QuadPart - start.QuadPart) / (double) freq.QuadPart;
long long bytes = (long long) size * iters * 2;
printf("%I64d bytes in %.2f seconds = %.2f Mbit/sec\n",
bytes, sec, bytes * 8. / 1000000. / sec);
printf("%d iters in %.2f seconds = %.2f usec/iter\n",
iters, sec, sec * 1000000. / iters);
}
ibv_ack_cq_events(ctx->cq, num_cq_events);
if (pp_close_ctx(ctx))
return 1;
ibv_free_device_list(dev_list);
free(rem_dest);
return 0;
}
<file_sep>/src/status.h
#ifndef _NVDS_STATUS_H_
#define _NVDS_STATUS_H_
#include "common.h"
namespace nvds {
enum class Status : uint8_t {
OK, ERROR, NO_MEM,
};
} // namespace nvds
#endif // _NVDS_STATUS_H_
<file_sep>/src/server_main.cc
/*
* Startup module of server.
*/
#include "server.h"
using namespace nvds;
static Server* server;
static void SigInt(int signo) {
std::cout << std::endl << "num_recv: " << server->num_recv() << std::endl;
std::cout << "alloc measurement: " << std::endl;
server->alloc_measurement.Print();
std::cout << std::endl;
std::cout << "sync measurement: " << std::endl;
server->sync_measurement.Print();
std::cout << std::endl;
std::cout << "thread measurement: " << std::endl;
server->thread_measurement.Print();
std::cout << std::endl;
std::cout << "send measurement: " << std::endl;
server->send_measurement.Print();
std::cout << std::endl;
std::cout << "recv measurement: " << std::endl;
server->recv_measurement.Print();
std::cout << std::endl;
std::cout << std::flush;
exit(0);
}
static void Usage(int argc, const char* argv[]) {
std::cout << "Usage:" << std::endl
<< " " << argv[0] << " <port> <coord addr>" << std::endl;
}
int main(int argc, const char* argv[]) {
signal(SIGINT, SigInt);
// -2. Argument parsing.
if (argc < 3) {
Usage(argc, argv);
return -1;
}
uint16_t server_port = std::stoi(argv[1]);
std::string coord_addr = argv[2];
// Step 0, self initialization, including formatting nvm storage.
// DRAM emulated NVM.
auto nvm = AcquireNVM<NVMDevice>(kNVMDeviceSize);
if (nvm == nullptr) {
NVDS_ERR("acquire nvm failed: size = %PRIu64", kNVMDeviceSize);
return -1;
}
try {
Server s(server_port, nvm, kNVMDeviceSize);
server = &s;
// Step 1: request to the coordinator for joining in.
if (!s.Join(coord_addr)) {
NVDS_ERR("join cluster failed");
return -1;
}
// Step 2: get the arrangement from coordinator, contacting servers
// that will be affected, performing data migration.
// Step 3: acknowledge the coordinator of complemention
// Step 4: serving request
NVDS_LOG("Server startup");
NVDS_LOG("Listening at: %u", server_port);
NVDS_LOG("......");
s.Run();
} catch (boost::system::system_error& e) {
NVDS_ERR(e.what());
} catch (TransportException& e) {
NVDS_ERR(e.msg().c_str());
NVDS_LOG("initialize infiniband devices failed");
}
return 0;
}
<file_sep>/src/infiniband_old.cc
#include "infiniband.h"
#include <algorithm>
#include <malloc.h>
namespace nvds {
Infiniband::Infiniband(const char* device_name)
: dev_(device_name), pd_(dev_) {
std::fill(addr_handlers_.begin(), addr_handlers_.end(), nullptr);
}
Infiniband::~Infiniband() {
for (auto iter = addr_handlers_.rbegin(); iter != addr_handlers_.rend(); ++iter) {
auto ah = *iter;
if (ah != nullptr)
ibv_destroy_ah(ah);
}
}
void Infiniband::QueuePairInfo::Print() const {
std::cout << "lid: " << lid << std::endl;
std::cout << "qpn: " << qpn << std::endl;
std::cout << "psn: " << psn << std::endl;
std::cout << "rkey: " << rkey << std::endl;
std::cout << "vaddr: " << vaddr << std::endl;
}
Infiniband::DeviceList::DeviceList()
: dev_list_(ibv_get_device_list(nullptr)) {
if (dev_list_ == nullptr) {
throw TransportException(HERE, "opening infiniband device failed", errno);
}
}
ibv_device* Infiniband::DeviceList::Lookup(const char* name) {
if (name == nullptr)
return dev_list_[0];
for (int i = 0; dev_list_[i] != nullptr; ++i) {
if (strcmp(dev_list_[i]->name, name) == 0)
return dev_list_[i];
}
return nullptr;
}
Infiniband::Device::Device(const char* name) {
DeviceList dev_list;
auto dev = dev_list.Lookup(name);
if (dev == nullptr) {
throw TransportException(HERE,
Format("failed to find infiniband devide: %s", name ? name: "any"));
}
ctx_ = ibv_open_device(dev);
if (ctx_ == nullptr) {
throw TransportException(HERE,
Format("failed to find infiniband devide: %s", name ? name: "any"),
errno);
}
}
Infiniband::QueuePair::QueuePair(Infiniband& ib, ibv_qp_type type,
int ib_port, ibv_srq* srq, ibv_cq* scq, ibv_cq* rcq,
uint32_t max_send, uint32_t max_recv)
: ib(ib), type(type), ctx(ib.dev().ctx()),
ib_port(ib_port), pd(ib.pd().pd()), srq(srq),
qp(nullptr), scq(scq), rcq(rcq), psn(0), peer_lid(0), sin() {
assert(type == IBV_QPT_UD || type == IBV_QPT_RC);
ibv_qp_init_attr qpia;
memset(&qpia, 0, sizeof(qpia));
qpia.send_cq = scq;
qpia.recv_cq = rcq;
qpia.srq = srq;
qpia.cap.max_send_wr = max_send;
qpia.cap.max_recv_wr = max_recv;
// FIXME(wgtdkp): what is the `max_send_sge` for replication `qp`?
qpia.cap.max_send_sge = type == IBV_QPT_UD ? 1 : 8;
qpia.cap.max_recv_sge = type == IBV_QPT_UD ? 1 : 8;
qpia.cap.max_inline_data = kMaxInlineData;
qpia.qp_type = type;
qpia.sq_sig_all = 0; // Only generate CQEs on requested WQEs
qp = ibv_create_qp(pd, &qpia);
if (qp == nullptr) {
throw TransportException(HERE, "failed to create queue pair");
}
// Set queue pair to status:INIT
ibv_qp_attr qpa;
memset(&qpa, 0, sizeof(qpa));
qpa.qp_state = IBV_QPS_INIT;
qpa.pkey_index = 0;
qpa.port_num = static_cast<uint8_t>(ib_port);
qpa.qp_access_flags = IBV_ACCESS_REMOTE_READ | IBV_ACCESS_REMOTE_WRITE | IBV_ACCESS_LOCAL_WRITE;
qpa.qkey = kQkey;
int mask = IBV_QP_STATE | IBV_QP_PORT;
switch (type) {
case IBV_QPT_RC:
mask |= IBV_QP_ACCESS_FLAGS;
mask |= IBV_QP_PKEY_INDEX;
break;
case IBV_QPT_UD:
mask |= IBV_QP_QKEY;
mask |= IBV_QP_PKEY_INDEX;
break;
default:
assert(false);
}
int ret = ibv_modify_qp(qp, &qpa, mask);
if (ret != 0) {
ibv_destroy_qp(qp);
throw TransportException(HERE, ret);
}
}
uint32_t Infiniband::QueuePair::GetPeerQPNum() const {
ibv_qp_attr qpa;
ibv_qp_init_attr qpia;
auto err = ibv_query_qp(qp, &qpa, IBV_QP_DEST_QPN, &qpia);
if (err != 0) {
throw TransportException(HERE, err);
}
return qpa.dest_qp_num;
}
int Infiniband::QueuePair::GetState() const {
ibv_qp_attr qpa;
ibv_qp_init_attr qpia;
int err = ibv_query_qp(qp, &qpa, IBV_QP_STATE, &qpia);
if (err != 0) {
throw TransportException(HERE, err);
}
return qpa.qp_state;
}
// Called only on RC queue pair
void Infiniband::QueuePair::Plumb(enum ibv_qp_state state,
const QueuePairInfo& qpi) {
assert(type == IBV_QPT_RC);
assert(GetState() == IBV_QPS_INIT);
assert(state == IBV_QPS_RTR || state == IBV_QPS_RTS);
ibv_qp_attr qpa;
memset(&qpa, 0, sizeof(qpa));
// We must set qp into RTR state before RTS
qpa.qp_state = IBV_QPS_RTR;
qpa.path_mtu = IBV_MTU_4096;
qpa.dest_qp_num = qpi.qpn;
qpa.rq_psn = qpi.psn;
qpa.max_dest_rd_atomic = 1;
qpa.min_rnr_timer = 12;
qpa.ah_attr.is_global = 0;
qpa.ah_attr.dlid = qpi.lid;
qpa.ah_attr.sl = 0;
qpa.ah_attr.src_path_bits = 0;
qpa.ah_attr.port_num = static_cast<uint8_t>(ib_port);
auto err = ibv_modify_qp(qp, &qpa,
IBV_QP_STATE | IBV_QP_AV | IBV_QP_PATH_MTU | IBV_QP_DEST_QPN |
IBV_QP_RQ_PSN | IBV_QP_MIN_RNR_TIMER | IBV_QP_MAX_DEST_RD_ATOMIC);
if (err != 0) {
throw TransportException(HERE, err);
}
if (state == IBV_QPS_RTR) {
return;
}
memset(&qpa, 0, sizeof(qpa));
qpa.qp_state = IBV_QPS_RTS;
// How long to wait before retrying if packet lost or server dead.
// Supposedly the timeout is 4.096us*2^timeout. However, the actual
// timeout appears to be 4.096us*2^(timeout+1), so the setting
// below creates a 135ms timeout.
qpa.timeout = 14;
// How many times to retry after timeouts before giving up.
qpa.retry_cnt = 7;
// How many times to retry after RNR (receiver not ready) condition
// before giving up. Occurs when the remote side has not yet posted
// a receive request.
qpa.rnr_retry = 7; // 7 is infinite retry.
qpa.sq_psn = psn;
qpa.max_rd_atomic = 1;
err = ibv_modify_qp(qp, &qpa,
IBV_QP_STATE | IBV_QP_TIMEOUT | IBV_QP_RETRY_CNT |
IBV_QP_RNR_RETRY | IBV_QP_SQ_PSN | IBV_QP_MAX_QP_RD_ATOMIC);
if (err != 0) {
throw TransportException(HERE, err);
}
peer_lid = qpi.lid;
}
// Bring UD queue pair into RTS status
void Infiniband::QueuePair::Activate() {
assert(type == IBV_QPT_UD);
assert(GetState() == IBV_QPS_INIT);
ibv_qp_attr qpa;
memset(&qpa, 0, sizeof(qpa));
qpa.qp_state = IBV_QPS_RTR;
auto err = ibv_modify_qp(qp, &qpa, IBV_QP_STATE);
if (err != 0) {
throw TransportException(HERE, err);
}
qpa.qp_state = IBV_QPS_RTS;
qpa.sq_psn = psn;
err = ibv_modify_qp(qp, &qpa, IBV_QP_STATE | IBV_QP_SQ_PSN);
if (err != 0) {
throw TransportException(HERE, err);
}
}
std::string Infiniband::Address::ToString() const {
return Format("%u:%u", lid, qpn);
}
Infiniband::RegisteredBuffers::RegisteredBuffers(ProtectionDomain& pd,
uint32_t buf_size, uint32_t buf_num, bool is_recv)
: buf_size_(buf_size), buf_num_(buf_num),
ptr_(nullptr), bufs_(nullptr), root_(nullptr) {
const size_t bytes = buf_size * buf_num;
ptr_ = memalign(kPageSize, bytes);
assert(ptr_ != nullptr);
auto mr = ibv_reg_mr(pd.pd(), ptr_, bytes,
IBV_ACCESS_REMOTE_WRITE | IBV_ACCESS_LOCAL_WRITE);
if (mr == nullptr) {
throw TransportException(HERE, "failed to register buffer", errno);
}
bufs_ = new Buffer[buf_num_];
char* p = static_cast<char*>(ptr_);
for (uint32_t i = 0; i < buf_num; ++i) {
new (&bufs_[i]) Buffer(p, buf_size_, mr, is_recv);
bufs_[i].next = i + 1 < buf_num ? &bufs_[i+1] : nullptr;
p += buf_size_;
}
root_ = bufs_;
}
Infiniband::RegisteredBuffers::~RegisteredBuffers() {
// Deregsiter memory region
ibv_dereg_mr(bufs_[0].mr);
free(ptr_);
delete[] bufs_;
}
/*
Infiniband::QueuePair* Infiniband::CreateQP(ibv_qp_type type, int ib_port,
ibv_srq* srq, ibv_cq* scq, ibv_cq* rcq, uint32_t max_send,
uint32_t max_recv, uint32_t qkey) {
return new QueuePair(*this, type, ib_port, srq, scq, rcq,
max_send, max_recv, qkey);
}
*/
uint16_t Infiniband::GetLid(int32_t port) {
ibv_port_attr port_attr;
auto ret = ibv_query_port(dev_.ctx(), static_cast<uint8_t>(port), &port_attr);
if (ret != 0) {
throw TransportException(HERE, ret);
}
return port_attr.lid;
}
// May blocking
Infiniband::Buffer* Infiniband::Receive(QueuePair* qp) {
Buffer* b;
while ((b = TryReceive(qp)) == nullptr) {}
return b;
}
Infiniband::Buffer* Infiniband::PollCQ(ibv_cq* cq, int num_entries) {
ibv_wc wc;
int r = ibv_poll_cq(cq, num_entries, &wc);
if (r == 0) {
return NULL;
} else if (r < 0) {
throw TransportException(HERE, r);
} else if (wc.status != IBV_WC_SUCCESS) {
throw TransportException(HERE, wc.status);
}
// wr_id is used as buffer address
auto b = reinterpret_cast<Buffer*>(wc.wr_id);
b->msg_len = wc.byte_len;
// FIXME(wgtdkp):
b->peer_addr = {Infiniband::kPort/*qp->ib_port*/, wc.slid, wc.src_qp};
return b;
}
void Infiniband::PostReceive(QueuePair* qp, Buffer* b) {
ibv_sge sge {
reinterpret_cast<uint64_t>(b->buf),
b->size,
b->mr->lkey
};
ibv_recv_wr rwr;
memset(&rwr, 0, sizeof(rwr));
rwr.wr_id = reinterpret_cast<uint64_t>(b);
rwr.next = nullptr;
rwr.sg_list = &sge;
rwr.num_sge = 1;
ibv_recv_wr* bad_rwr;
auto err = ibv_post_recv(qp->qp, &rwr, &bad_rwr);
if (err != 0) {
throw TransportException(HERE, "ibv_post_recv failed", err);
}
}
void Infiniband::PostSRQReceive(ibv_srq* srq, Buffer* b) {
ibv_sge sge {
reinterpret_cast<uint64_t>(b->buf),
b->size,
b->mr->lkey
};
ibv_recv_wr rwr;
memset(&rwr, 0, sizeof(rwr));
rwr.wr_id = reinterpret_cast<uint64_t>(b);
rwr.next = nullptr;
rwr.sg_list = &sge;
rwr.num_sge = 1;
ibv_recv_wr* bad_rwr;
auto err = ibv_post_srq_recv(srq, &rwr, &bad_rwr);
if (err != 0) {
throw TransportException(HERE, "ibv_post_srq_recv failed", err);
}
}
void Infiniband::PostSend(QueuePair* qp, Buffer* b,
uint32_t len, const Address* peer_addr, uint32_t peer_qkey) {
if (qp->type == IBV_QPT_UD) {
assert(peer_addr != nullptr);
} else {
assert(peer_addr == nullptr);
assert(peer_qkey == 0);
}
ibv_sge sge {
reinterpret_cast<uint64_t>(b->buf),
len,
b->mr->lkey
};
ibv_send_wr swr;
memset(&swr, 0, sizeof(swr));
swr.wr_id = reinterpret_cast<uint64_t>(b);
if (qp->type == IBV_QPT_UD) {
swr.wr.ud.ah = GetAddrHandler(*peer_addr);
swr.wr.ud.remote_qpn = peer_addr->qpn;
swr.wr.ud.remote_qkey = peer_qkey;
assert(swr.wr.ud.ah != nullptr);
}
swr.next = nullptr;
swr.sg_list = &sge;
swr.num_sge = 1;
swr.opcode = IBV_WR_SEND;
swr.send_flags = IBV_SEND_SIGNALED;
if (len <= kMaxInlineData) {
swr.send_flags |= IBV_SEND_INLINE;
}
ibv_send_wr* bad_swr;
auto err = ibv_post_send(qp->qp, &swr, &bad_swr);
if (err != 0) {
throw TransportException(HERE, "ibv_post_send failed", err);
}
}
void Infiniband::PostSendAndWait(QueuePair* qp, Buffer* b,
uint32_t len, const Address* peer_addr, uint32_t peer_qkey) {
assert(qp->type == IBV_QPT_UD);
PostSend(qp, b, len, peer_addr, peer_qkey);
ibv_wc wc;
while (ibv_poll_cq(qp->scq, 1, &wc) < 1) {}
// FIXME(wgtdkp):
// How can we make sure that the completed work we polled is the one
// we have just post?
if (wc.status != IBV_WC_SUCCESS) {
throw TransportException(HERE, "PostSendAndWait failed", wc.status);
}
}
ibv_ah* Infiniband::GetAddrHandler(const Address& addr) {
auto& ah = addr_handlers_[addr.lid];
if (ah != nullptr) {
return ah;
}
ibv_ah_attr attr;
memset(&attr, 0, sizeof(attr));
attr.is_global = 0;
attr.dlid = addr.lid;
attr.sl = 1;
attr.src_path_bits = 0;
attr.port_num = kPort;
return ah = ibv_create_ah(pd_.pd(), &attr);
}
} // namespace nvds
<file_sep>/src/modification.h
#ifndef _NVDS_MODIFICATION_H_
#define _NVDS_MODIFICATION_H_
#include "common.h"
namespace nvds {
struct Modification {
uint32_t des;
uint32_t len;
uint64_t src;
Modification(uint32_t des, uint64_t src, uint32_t len)
: des(des), len(len), src(src) {}
// For STL
Modification() : des(0), len(0), src(0) {}
bool operator<(const Modification& other) const {
return des < other.des;
}
// DEBUG
void Print() const {
std::cout << "des: " << des << "; ";
std::cout << "len: " << len << "; ";
std::cout << "src: " << src << std::endl;
}
};
using ModificationList = std::vector<Modification>;
struct Position {
uint32_t offset;
uint32_t len;
Position() = delete;
};
struct ModificationLog {
uint32_t cnt;
Position positions[0];
ModificationLog() = delete;
};
} // namespace nvds
#endif // _NVDS_MODIFICATION_H_
<file_sep>/src/test_memcached.cc
#include <libmemcached/memcached.h>
#include <cassert>
#include <vector>
#include <string>
#include <iostream>
#include <chrono>
#include <random>
using VecStr = std::vector<std::string>;
static VecStr GenRandomStrings(size_t len, size_t n) {
VecStr ans;
std::default_random_engine g;
std::uniform_int_distribution<int> dist('a', 'z');
for (size_t i = 0; i < n; ++i) {
ans.emplace_back(len, 0);
for (size_t j = 0; j < len; ++j) {
ans[i][j] = dist(g);
}
}
return ans;
}
void test(size_t n) {
const char* config_string = "--SERVER=127.0.0.1";
memcached_st* memc = memcached(config_string, strlen(config_string));
auto keys = GenRandomStrings(16, n);
auto vals = VecStr(n, std::string(16, 'a'));
auto begin = std::chrono::high_resolution_clock::now();
for (size_t i = 0; i < n; ++i) {
auto& key = keys[i];
auto& val = vals[i];
memcached_return_t rc = memcached_set(memc, key.c_str(), key.size(), val.c_str(), val.size(), 0, 0);
assert(rc == MEMCACHED_SUCCESS);
}
auto end = std::chrono::high_resolution_clock::now();
double t = std::chrono::duration_cast<std::chrono::duration<double>>(end - begin).count();
double qps = keys.size() / t;
std::cout << "time: " << t << std::endl;
std::cout << "qps: " << qps << std::endl;
memcached_free(memc);
}
int main() {
test(10000);
return 0;
}
<file_sep>/src/allocator.cc
#include "allocator.h"
#include <cstring>
namespace nvds {
void Allocator::Format() {
ModificationList modifications;
set_modifications(&modifications);
memset(flm_, 0, kSize);
uint32_t blk = sizeof(FreeListManager);
Write(GetLastFreeList(), blk);
// 'The block before first block' is not free
// The last 4 bytes is kept for the free tag
uint32_t blk_size = (kSize - blk - sizeof(uint32_t)) &
~static_cast<uint32_t>(0x0f);
Write(blk, blk_size & ~BlockHeader::kFreeMask);
Write(blk + blk_size - sizeof(uint32_t), blk_size);
SetTheFreeTag(blk, blk_size);
// TODO(wgtdkp): setting `next` and `prev` nullptr.(unnecessary if called `memset`)
}
uint32_t Allocator::AllocBlock(uint32_t blk_size) {
auto free_list = GetFreeListByBlockSize(blk_size);
uint32_t head;
while (free_list < kNumFreeLists * sizeof(uint32_t) &&
(head = Read<uint32_t>(free_list)) == 0) {
free_list += sizeof(uint32_t);
}
if (head == 0) {
return 0;
}
auto head_size = ReadTheSizeTag(head);
assert(ReadTheFreeTag(head, head_size) != 0);
assert(head_size >= blk_size);
if (head_size == blk_size) {
auto next_blk = Read<uint32_t>(head + offsetof(BlockHeader, next));
ResetTheFreeTag(head, head_size);
Write(free_list, next_blk);
Write(next_blk + offsetof(BlockHeader, prev),
static_cast<uint32_t>(0));
return head;
}
return SplitBlock(head, head_size, blk_size);
}
void Allocator::FreeBlock(uint32_t blk) {
auto blk_size = ReadTheSizeTag(blk);
auto blk_size_old = blk_size;
auto prev_blk_size = Read<uint32_t>(blk - sizeof(uint32_t));
auto prev_blk = blk - prev_blk_size;
auto next_blk = blk + blk_size;
auto next_blk_size = ReadTheSizeTag(next_blk);
if (ReadThePrevFreeTag(blk) &&
blk_size <= kMaxBlockSize &&
prev_blk_size <= kMaxBlockSize) {
RemoveBlock(prev_blk, prev_blk_size);
blk = prev_blk;
blk_size += prev_blk_size;
}
if (ReadTheFreeTag(next_blk, next_blk_size) &&
blk_size <= kMaxBlockSize &&
next_blk_size <= kMaxBlockSize) {
RemoveBlock(next_blk, next_blk_size);
blk_size += next_blk_size;
}
// If the blk_size changed, write it.
if (blk_size != blk_size_old) {
Write(blk, ReadThePrevFreeTag(blk) | blk_size);
Write(blk + blk_size - sizeof(uint32_t), blk_size);
}
SetTheFreeTag(blk, blk_size);
auto free_list = GetFreeListByBlockSize(blk_size);
auto head = Read<uint32_t>(free_list);
Write(free_list, blk);
Write(blk + offsetof(BlockHeader, prev), static_cast<uint32_t>(0));
Write(blk + offsetof(BlockHeader, next), head);
if (head != 0) {
Write(head + offsetof(BlockHeader, prev), blk);
}
}
void Allocator::RemoveBlock(uint32_t blk, uint32_t blk_size) {
assert(ReadTheFreeTag(blk, blk_size) != 0);
auto prev_blk = Read<uint32_t>(blk + offsetof(BlockHeader, prev));
auto next_blk = Read<uint32_t>(blk + offsetof(BlockHeader, next));
if (prev_blk != 0) {
Write(prev_blk + offsetof(BlockHeader, next),
Read<uint32_t>(blk + offsetof(BlockHeader, next)));
} else {
auto free_list = GetFreeListByBlockSize(blk_size);
Write(free_list, Read<uint32_t>(blk + offsetof(BlockHeader, next)));
}
if (next_blk != 0) {
Write(next_blk + offsetof(BlockHeader, prev),
Read<uint32_t>(blk + offsetof(BlockHeader, prev)));
}
}
uint32_t Allocator::SplitBlock(uint32_t blk, uint32_t blk_size,
uint32_t needed_size) {
auto next_blk = Read<uint32_t>(blk + offsetof(BlockHeader, next));
auto free_list = GetFreeListByBlockSize(blk_size);
assert(blk_size > needed_size);
auto new_size = blk_size - needed_size;
Write(free_list, next_blk);
if (next_blk != 0) {
Write(next_blk + offsetof(BlockHeader, prev), static_cast<uint32_t>(0));
}
// Update size
Write(blk, ReadThePrevFreeTag(blk) | new_size);
Write(blk + new_size - sizeof(uint32_t), new_size);
free_list = GetFreeListByBlockSize(new_size);
auto head = Read<uint32_t>(free_list);
Write(blk + offsetof(BlockHeader, prev), static_cast<uint32_t>(0));
Write(blk + offsetof(BlockHeader, next), head);
Write(free_list, blk);
if (head != 0) {
Write(head + offsetof(BlockHeader, prev), blk);
}
// The rest smaller block is still free
Write(blk + new_size, BlockHeader::kFreeMask | needed_size);
Write(blk + blk_size - sizeof(uint32_t), needed_size);
// Piece splitted is not free now.
ResetTheFreeTag(blk + new_size, needed_size);
return blk + new_size;
}
} // namespace nvds
<file_sep>/src/session.h
#ifndef _NVDS_SESSION_H_
#define _NVDS_SESSION_H_
#include "message.h"
#include <boost/asio.hpp>
#include <boost/function.hpp>
namespace nvds {
class Session;
using MessageHandler = boost::function<void(std::shared_ptr<Session>,
std::shared_ptr<Message>)>;
using boost::asio::ip::tcp;
class Session : public std::enable_shared_from_this<Session> {
public:
explicit Session(tcp::socket conn_sock,
MessageHandler recv_msg_handler=0,
MessageHandler send_msg_handler=0)
: conn_sock_(std::move(conn_sock)),
recv_msg_handler_(recv_msg_handler),
send_msg_handler_(send_msg_handler) {}
~Session() { /* Close socket automatically */ }
// Start async message send/recv
void Start();
void AsyncRecvMessage(std::shared_ptr<Message> msg);
void AsyncSendMessage(std::shared_ptr<Message> msg);
// Throw exception `boost::system::system_error` on failure
Message RecvMessage();
// Throw exception `boost::system::system_error` on failure
void SendMessage(const Message& msg);
std::string GetPeerAddr() const {
return conn_sock_.remote_endpoint().address().to_string();
}
private:
tcp::socket conn_sock_;
MessageHandler recv_msg_handler_;
MessageHandler send_msg_handler_;
};
} // namespace nvds
#endif // _NVDS_SESSION_H_
<file_sep>/test/test_json.cc
#include "../json/src/json.hpp"
#include <fstream>
#include <string>
using json = nlohmann::json;
int main(int argc, const char* argv[]) {
if (argc < 2) {
std::cout << "need parameter" << std::endl;
return -1;
}
json cfg_json;
try {
cfg_json = json::parse(std::ifstream(argv[1]));
} catch (std::invalid_argument& inv_arg) {
std::cout << "invalid argument" << std::endl;
return -2;
}
std::cout << static_cast<int>(cfg_json["coordinator"]["tcp_port"]) << std::endl;
return 0;
}
<file_sep>/src/infiniband_old.h
#ifndef _NVDS_INFINIBAND_H_
#define _NVDS_INFINIBAND_H_
#include "common.h"
#include "spinlock.h"
#include "transport.h"
#include <cstring>
#include <mutex>
#include <infiniband/verbs.h>
#include <netinet/ip.h>
namespace nvds {
struct Request;
struct Response;
// Derived from RAMCloud Infiniband.h
class Infiniband {
public:
static const uint16_t kPort = 1;
static const uint32_t kMaxInlineData = 400;
// If the device name is not specified, choose the first one in device list
explicit Infiniband(const char* device_name=nullptr);
~Infiniband();
// This class includes informations for constructing a connection
struct QueuePairInfo {
uint16_t lid; // Local id
uint32_t qpn; // Queue pair number
uint32_t psn; // Packet sequence number
uint32_t rkey; // Memory region key
uint64_t vaddr; // Virtual address of memory regions
// DEBUG
void Print() const;
};
class DeviceList {
public:
DeviceList();
~DeviceList() { ibv_free_device_list(dev_list_); }
DISALLOW_COPY_AND_ASSIGN(DeviceList);
ibv_device* Lookup(const char* name);
private:
ibv_device** const dev_list_;
};
class Device {
public:
explicit Device(const char* name=nullptr);
~Device() { ibv_close_device(ctx_); }
DISALLOW_COPY_AND_ASSIGN(Device);
ibv_context* ctx() { return ctx_; }
const ibv_context* ctx() const { return ctx_; }
private:
ibv_context* ctx_;
};
class ProtectionDomain {
public:
explicit ProtectionDomain(Device& device)
: pd_(ibv_alloc_pd(device.ctx())) {
if (pd_ == nullptr) {
throw TransportException(HERE,
"failed to allocate infiniband protection domain", errno);
}
}
~ProtectionDomain() { ibv_dealloc_pd(pd_); }
DISALLOW_COPY_AND_ASSIGN(ProtectionDomain);
operator ibv_pd() { return *pd_; }
ibv_pd* pd() { return pd_; }
const ibv_pd* pd() const { return pd_; }
private:
ibv_pd* const pd_;
};
struct QueuePair {
Infiniband& ib; // this QP belongs to
ibv_qp_type type; // QP type
ibv_context* ctx; // Device context
int ib_port; // Physical port
ibv_pd* pd;
ibv_srq* srq;
ibv_qp* qp;
ibv_cq* scq;
ibv_cq* rcq;
uint32_t psn;
uint16_t peer_lid;
sockaddr_in sin;
QueuePair(Infiniband& ib, ibv_qp_type type,
int ib_port, ibv_srq* srq, ibv_cq* scq,
ibv_cq* rcq, uint32_t max_send,
uint32_t max_recv);
~QueuePair() { ibv_destroy_qp(qp); }
DISALLOW_COPY_AND_ASSIGN(QueuePair);
uint32_t GetLocalQPNum() const { return qp->qp_num; }
uint32_t GetPeerQPNum() const;
int GetState() const;
// Bring a queue pair into RTS state
void Plumb(enum ibv_qp_state state, const QueuePairInfo& qpi);
void Activate();
//private:
// ibv_qp* Create(uint32_t max_send, uint32_t max_recv, );
};
struct Address {
int32_t ib_port;
uint16_t lid;
uint32_t qpn;
// `ah` is moved to class `Infiniband`
// ibv_ah* ah; // Could be nullptr
std::string ToString() const;
bool operator==(const Address& other) const {
return ib_port == other.ib_port &&
lid == other.lid &&
qpn == other.qpn;
}
bool operator!=(const Address& other) const {
return !(*this == other);
}
};
struct Buffer {
Buffer* next;
char* buf;
uint32_t size; // Buffer size
uint32_t msg_len; // message length
ibv_mr* mr; // IBV memory region
Address peer_addr; // Peer address of this request
bool is_recv; // Is this a recv buffer
Buffer(char* b, uint32_t size, ibv_mr* mr, bool is_recv=false)
: next(nullptr), buf(b), size(size), mr(mr), is_recv(is_recv) {}
Buffer() : next(nullptr), buf(nullptr), size(0), msg_len(0),
mr(nullptr), is_recv(false) {}
DISALLOW_COPY_AND_ASSIGN(Buffer);
Request* MakeRequest() {
return reinterpret_cast<Request*>(buf + kIBUDPadding);
}
Response* MakeResponse() {
// TODO(wgtdkp): are you sure ?
return reinterpret_cast<Response*>(buf + kIBUDPadding);
}
};
// A registered buffer pool
class RegisteredBuffers {
public:
RegisteredBuffers(ProtectionDomain& pd,
uint32_t buf_size, uint32_t buf_num, bool is_recv=false);
~RegisteredBuffers();
DISALLOW_COPY_AND_ASSIGN(RegisteredBuffers);
// Used as buffer pool
Buffer* Alloc() {
std::lock_guard<Spinlock> _(spinlock_);
auto ret = root_;
if (root_ != nullptr) {
root_ = root_->next;
}
return ret;
}
void Free(Buffer* b) {
std::lock_guard<Spinlock> _(spinlock_);
b->next = root_;
root_ = b;
}
Buffer& GetBuffer(const void* pos) {
auto idx = (static_cast<const char*>(pos) - static_cast<const char*>(ptr_)) / buf_size_;
return bufs_[idx];
}
Buffer& operator[](size_t idx) { return bufs_[idx]; }
const Buffer& operator[](size_t idx) const {
return const_cast<RegisteredBuffers*>(this)->operator[](idx);
}
Buffer* begin() { return bufs_; }
Buffer* end() { return bufs_ + buf_num_; }
const Buffer* root() const { return root_; }
private:
uint32_t buf_size_;
uint32_t buf_num_;
void* ptr_;
Buffer* bufs_;
Buffer* root_;
Spinlock spinlock_;
};
uint16_t GetLid(int32_t port);
//QueuePair* CreateQP(ibv_qp_type type, int ib_port,
// ibv_srq* srq, ibv_cq* scq,
// ibv_cq* rcq, uint32_t max_send,
// uint32_t max_recv, uint32_t qkey=0);
Buffer* TryReceive(QueuePair* qp) {
return PollCQ(qp->rcq, 1);
}
Buffer* TrySend(QueuePair* qp) {
return PollCQ(qp->scq, 1);
}
Buffer* Receive(QueuePair* qp);
void PostReceive(QueuePair* qp, Buffer* b);
void PostSRQReceive(ibv_srq* srq, Buffer* b);
void PostSend(QueuePair* qp, Buffer* b, uint32_t len,
const Address* peer_addr=nullptr,
uint32_t peer_qkey=0);
void PostSendAndWait(QueuePair* qp, Buffer* b, uint32_t len,
const Address* peer_addr=nullptr,
uint32_t peer_qkey=0);
ibv_cq* CreateCQ(int min_entries) {
return ibv_create_cq(dev_.ctx(), min_entries, nullptr, nullptr, 0);
}
void DestroyCQ(ibv_cq* cq) {
ibv_destroy_cq(cq);
}
ibv_srq* CreateSRQ(uint32_t max_wr, uint32_t max_sge) {
ibv_srq_init_attr sia;
memset(&sia, 0, sizeof(sia));
sia.srq_context = dev_.ctx();
sia.attr.max_wr = max_wr;
sia.attr.max_sge = max_sge;
return ibv_create_srq(pd_.pd(), &sia);
}
void DestroySRQ(ibv_srq* srq) {
ibv_destroy_srq(srq);
}
Buffer* PollCQ(ibv_cq* cq, int num_entries);
ibv_ah* GetAddrHandler(const Address& addr);
Device& dev() { return dev_; }
const Device& dev() const { return dev_; }
ProtectionDomain& pd() { return pd_; }
const ProtectionDomain& pd() const { return pd_; }
private:
Device dev_;
ProtectionDomain pd_;
std::array<ibv_ah*, UINT16_MAX + 1> addr_handlers_;
};
} // namespace nvds
#endif // _NVDS_INFINIBAND_H_
<file_sep>/script/stop.sh
#!/bin/bash
killall -9 -q coordinator
while read line; do
server=($line)
ssh root@${server[0]} 'killall -9 -q server'
done < servers.txt
<file_sep>/src/test_client.cc
/*
* Compile: g++ -std=c++11 -Wall -g test_client.cc -lnvds -lboost_system -lboost_thread -pthread -libverbs
*/
#include "nvds/client.h"
#include <iostream>
#include <string>
int main() {
try {
nvds::Client c {"192.168.99.14"};
c.Put("hello", "world");
assert(!c.Add("hello", "NVDS"));
std::cout << "hello: " << c.Get("hello") << std::endl;
c.Del("hello");
assert(c.Add("hello", "world"));
c.Del("hello");
assert(c.Get("hello").size() == 0);
} catch (boost::system::system_error& e) {
NVDS_ERR(e.what());
} catch (nvds::TransportException& e) {
NVDS_ERR(e.msg().c_str());
NVDS_LOG("initialize infiniband devices failed");
}
return 0;
}
<file_sep>/src/coordinator.cc
#include "coordinator.h"
#include "json.hpp"
#include "message.h"
#include "session.h"
namespace nvds {
using nlohmann::json;
Coordinator::Coordinator()
: BasicServer(kCoordPort) {
}
void Coordinator::Run() {
Accept(std::bind(&Coordinator::HandleRecvMessage, this,
std::placeholders::_1, std::placeholders::_2),
std::bind(&Coordinator::HandleSendMessage, this,
std::placeholders::_1, std::placeholders::_2));
RunService();
}
void Coordinator::HandleRecvMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg) {
switch (msg->sender_type()) {
case Message::SenderType::SERVER:
HandleMessageFromServer(session, msg);
break;
case Message::SenderType::CLIENT:
HandleMessageFromClient(session, msg);
break;
case Message::SenderType::COORDINATOR:
assert(false);
}
}
void Coordinator::HandleSendMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg) {
// Receive message
session->AsyncRecvMessage(std::make_shared<Message>());
}
void Coordinator::HandleMessageFromServer(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg) {
switch (msg->type()) {
case Message::Type::REQ_JOIN:
//NVDS_LOG("join message: %s", msg->body().c_str());
HandleServerRequestJoin(session, msg);
break;
case Message::Type::REQ_LEAVE:
break;
case Message::Type::ACK_REJECT:
break;
case Message::Type::ACK_ERROR:
break;
case Message::Type::ACK_OK:
break;
default:
assert(false);
}
}
void Coordinator::HandleServerRequestJoin(std::shared_ptr<Session> session,
std::shared_ptr<Message> req) {
auto body = json::parse(req->body());
uint64_t nvm_size = body["size"];
NVDS_LOG("join request from server: [%s]", session->GetPeerAddr().c_str());
if (num_servers_ >= kNumServers) {
NVDS_LOG("too much servers, ignored");
return;
}
index_manager_.AddServer(session->GetPeerAddr(), body);
sessions_.emplace_back(session);
++num_servers_;
total_storage_ += nvm_size;
assert(num_servers_ <= kNumServers);
if (num_servers_ == kNumServers) {
NVDS_LOG("all servers' join request received. [total servers = %d]",
kNumServers);
ResponseAllJoins();
} else {
NVDS_LOG("[%d/%d] join requests received", num_servers_, kNumServers);
}
}
void Coordinator::ResponseAllJoins() {
assert(sessions_.size() == kNumServers);
for (size_t i = 0; i < sessions_.size(); ++i) {
json msg_body {
{"id", i},
{"index_manager", index_manager_}
};
sessions_[i]->AsyncSendMessage(std::make_shared<Message>(
Message::Header {Message::SenderType::COORDINATOR,
Message::Type::RES_JOIN},
msg_body.dump()));
}
sessions_.clear();
}
void Coordinator::HandleMessageFromClient(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg) {
switch (msg->type()) {
case Message::Type::REQ_JOIN:
HandleClientRequestJoin(session, msg);
break;
case Message::Type::REQ_LEAVE:
break;
case Message::Type::ACK_REJECT:
break;
case Message::Type::ACK_ERROR:
break;
case Message::Type::ACK_OK:
break;
default:
assert(false);
}
}
void Coordinator::HandleClientRequestJoin(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg) {
// TODO(wgtdkp): handle this error!
assert(num_servers_ == kNumServers);
assert(msg->sender_type() == Message::SenderType::CLIENT);
assert(msg->type() == Message::Type::REQ_JOIN);
NVDS_LOG("join request from client: [%s]", session->GetPeerAddr().c_str());
json j_body {
{"index_manager", index_manager_}
};
session->AsyncSendMessage(std::make_shared<Message>(
Message::Header {Message::SenderType::COORDINATOR,
Message::Type::RES_JOIN},
j_body.dump()));
}
} // namespace nvds
<file_sep>/src/server.h
#ifndef _NVDS_SERVER_H_
#define _NVDS_SERVER_H_
#include "basic_server.h"
#include "common.h"
#include "index.h"
#include "infiniband.h"
#include "measurement.h"
#include "message.h"
#include "spinlock.h"
#include "tablet.h"
#include <boost/asio.hpp>
#include <boost/function.hpp>
#include <condition_variable>
#include <mutex>
#include <thread>
#define ENABLE_MEASUREMENT
namespace nvds {
/*
* The abstraction of NVM device. No instantiation is allowed.
* For each server, there is only one NVMDevice object which is
* mapped to the address passed when the server is started.
*/
struct NVMDevice {
// The server id of the machine.
ServerId server_id;
// The size in byte of the NVM device
uint64_t size;
// The key hash range
KeyHash key_begin;
KeyHash key_end;
// The number of tablets
uint32_t tablet_num;
// The begin of tablets
NVMTablet tablets[0];
NVMDevice() = delete;
};
static const uint64_t kNVMDeviceSize = sizeof(NVMDevice) +
kNVMTabletSize * kNumTabletAndBackupsPerServer;
class Server : public BasicServer {
// For convenience
public:
Measurement alloc_measurement;
Measurement sync_measurement;
Measurement thread_measurement;
Measurement send_measurement;
Measurement recv_measurement;
public:
// Workers & tablets
friend class Worker;
using Work = Infiniband::Buffer;
Server(uint16_t port, NVMPtr<NVMDevice> nvm, uint64_t nvm_size);
~Server();
DISALLOW_COPY_AND_ASSIGN(Server);
ServerId id() const { return id_; }
bool active() const { return active_; }
uint64_t nvm_size() const { return nvm_size_; }
NVMPtr<NVMDevice> nvm() const { return nvm_; }
size_t num_recv() const { return num_recv_; }
void Run() override;
bool Join(const std::string& coord_addr);
void Leave();
void Listening();
void Poll();
// Dispatch the `work` to specific `worker`, load balance considered.
void Dispatch(Work* work);
private:
void HandleRecvMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg);
void HandleSendMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg);
// The WorkQueue will be enqueued by the dispatch,
// and dequeued by the worker. Thus, it must be made thread safe.
class WorkQueue {
public:
WorkQueue() {}
DISALLOW_COPY_AND_ASSIGN(WorkQueue);
// Made Thread safe
void Enqueue(Work* work) {
std::lock_guard<Spinlock> _(spinlock_);
if (head_ == nullptr) {
//std::lock_guard<Spinlock> _(spinlock_);
tail_ = work;
head_ = work;
} else {
tail_->next = work;
tail_ = work;
}
}
// Made Thread safe
Work* Dequeue() {
std::lock_guard<Spinlock> _(spinlock_);
if (head_ == nullptr) {
return nullptr;
} else if (head_ == tail_) {
auto ans = head_;
tail_ = nullptr;
head_ = nullptr;
return const_cast<Work*>(ans);
} else {
auto ans = head_;
head_ = head_->next;
return const_cast<Work*>(ans);
}
}
Work* TryPollWork() {
Measurement m;
m.begin();
auto ans = Dequeue();
while (ans == nullptr && m.cur_period() < 50) {
ans = Dequeue();
}
return const_cast<Work*>(ans);
}
private:
volatile Work* head_ = nullptr;
Work* tail_ = nullptr;
Spinlock spinlock_;
};
class Worker {
public:
Worker(Server* server, Tablet* tablet)
: server_(server), tablet_(tablet),
slave_(std::bind(&Worker::Serve, this)) {}
void Enqueue(Work* work) {
std::unique_lock<std::mutex> lock(mtx_);
wq_.Enqueue(work);
#ifdef ENABLE_MEASUREMENT
server_->thread_measurement.begin();
#endif
cond_var_.notify_one();
}
private:
void Serve();
WorkQueue wq_;
Server* server_;
Tablet* tablet_;
std::mutex mtx_;
std::condition_variable cond_var_;
std::thread slave_;
};
static const uint32_t kMaxIBQueueDepth = 128;
ServerId id_;
bool active_;
uint64_t nvm_size_;
NVMPtr<NVMDevice> nvm_;
IndexManager index_manager_;
// Infiniband
Infiniband ib_;
Infiniband::Address ib_addr_;
Infiniband::RegisteredBuffers send_bufs_;
Infiniband::RegisteredBuffers recv_bufs_;
Infiniband::QueuePair* qp_;
// Worker
std::array<Worker*, kNumTabletsPerServer> workers_;
std::array<Tablet*, kNumTabletAndBackupsPerServer> tablets_;
// Statistic
size_t num_recv_ {0};
};
} // namespace nvds
#endif // _NVDS_SERVER_H_
<file_sep>/test/Makefile
TARGET = test
CC = gcc -std=c11
SRCS = ../src/consistent_hash.c test_consistent_hash.c \
../src/nvds.c test_nvds.c \
../src/rbtree.c test_rbtree.c \
test.c
CFLAGS = -g -Wall -D_XOPEN_SOURCE -D_GNU_SOURCE -I./ -I../src/
CFLAGS_MD5 = -lssl -lcrypto
OBJS_DIR = build/
OBJS = $(OBJS_DIR)consistent_hash.o $(OBJS_DIR)test_consistent_hash.o \
$(OBJS_DIR)nvds.o $(OBJS_DIR)test_nvds.o \
$(OBJS_DIR)rbtree.o $(OBJS_DIR)test_rbtree.o \
$(OBJS_DIR)test.o
default: all
all:
@mkdir -p $(OBJS_DIR)
@make $(TARGET)
$(TARGET): $(OBJS)
$(CC) -o $(OBJS_DIR)$@ $^ $(CFLAGS_MD5)
$(OBJS_DIR)consistent_hash.o: ../src/consistent_hash.c ../src/consistent_hash.h
$(CC) $(CFLAGS) -o $@ -c $<
$(OBJS_DIR)nvds.o: ../src/nvds.c ../src/nvds.h
$(CC) $(CFLAGS) -o $@ -c $<
$(OBJS_DIR)rbtree.o: ../src/rbtree.c ../src/rbtree.h
$(CC) $(CFLAGS) -o $@ -c $<
$(OBJS_DIR)test_consistent_hash.o: test_consistent_hash.c
$(CC) $(CFLAGS) -o $@ -c $<
$(OBJS_DIR)nvds.o: ../src/nvds.c ../src/nvds.h
$(CC) $(CFLAGS) -o $@ -c $<
$(OBJS_DIR)rbtree.o: ../src/rbtree.c ../src/rbtree.h
$(CC) $(CFLAGS) -o $@ -c $<
test_consistent_hash: $(OBJS_DIR)test_consistent_hash.o $(OBJS_DIR)consistent_hash.o $(OBJS_DIR)rbtree.o $(OBJS_DIR)test.o
$(CC) -o $(OBJS_DIR)$@ $^ $(CFLAGS_MD5)
.PHONY: clean
clean:
@rm -rf $(OBJS_DIR)
<file_sep>/src/nvm.h
#ifndef _NVDS_NVM_H_
#define _NVDS_NVM_H_
#include <cstdlib>
#include <cstddef>
namespace nvds {
// using nvmoffset_t = uint32_t;
/*
* A pointer that points to objects allocated on NVM. As are
* persistent on NVM, They should be Allocated and freed by
* the Allocator. This class is for an abstraction of accessing
* NVM.
*/
template<typename T>
class NVMPtr {
public:
explicit NVMPtr(T* ptr) : ptr_(ptr) {}
~NVMPtr() {}
T* ptr() { return ptr_; }
const T* ptr() const { return ptr_; }
T* operator->() { return ptr_; }
const T* operator->() const { return ptr_; }
T& operator*() { return *ptr_; }
const T& operator*() const { return *ptr_; }
bool operator==(const NVMPtr& other) const {
return ptr_ == other.ptr_;
}
bool operator!=(const NVMPtr& other) const {
return !(*this == other);
}
bool operator==(const std::nullptr_t& null) const {
return ptr_ == null;
}
bool operator!=(const std::nullptr_t& null) const {
return !(*this == nullptr);
}
NVMPtr<T> operator+(std::ptrdiff_t d) const {
return NVMPtr<T>(ptr_ + d);
}
NVMPtr<T> operator-(std::ptrdiff_t d) const {
return NVMPtr<T>(ptr_ - d);
}
std::ptrdiff_t operator-(NVMPtr<T> other) const {
return ptr_ - other.ptr_;
}
template<typename RT>
const NVMPtr<T>& operator=(NVMPtr<RT> rhs) {
ptr_ = rhs.ptr_;
return *this;
}
private:
T* ptr_;
};
/*
* Get nvm with specified size
*/
template <typename T>
NVMPtr<T> AcquireNVM(size_t size) {
return NVMPtr<T>(static_cast<T*>(malloc(size)));
}
} // namespace nvds
#endif // _NVDS_NVM_H_
<file_sep>/src/experiments/write_rc.c
#include "write_rc.h"
#include <assert.h>
#include <stdint.h>
#include <time.h>
#define RDMA_WRITE_ID (3)
#define RDMA_READ_ID (4)
#define RDMA_MAX_LEN (128 * 1024)
//#define RDMA_WRITE_LEN (64)
//#define RDMA_READ_LEN RDMA_WRITE_LEN
static int page_size;
static void nvds_server_exch_info(nvds_data_t* data);
static void nvds_client_exch_info(nvds_data_t* data);
static void nvds_run_server(nvds_context_t* ctx, nvds_data_t* data);
static void nvds_run_client(nvds_context_t* ctx, nvds_data_t* data);
static void nvds_init_local_ib_connection(nvds_context_t* ctx,
nvds_data_t* data);
static void nvds_init_ctx(nvds_context_t* ctx, nvds_data_t* data);
static void nvds_set_qp_state_init(struct ibv_qp* qp, nvds_data_t* data);
static void nvds_set_qp_state_rtr(struct ibv_qp* qp, nvds_data_t* data);
static void nvds_set_qp_state_rts(struct ibv_qp* qp, nvds_data_t* data);
static void nvds_rdma_write(nvds_context_t* ctx, nvds_data_t* data, uint32_t len);
//static void nvds_rdma_read(nvds_context_t* ctx, nvds_data_t* data);
static void nvds_poll_send(nvds_context_t* ctx);
//static void nvds_poll_recv(nvds_context_t* ctx);
//static void nvds_post_send(nvds_context_t* ctx, nvds_data_t* data);
//static void nvds_post_recv(nvds_context_t* ctx, nvds_data_t* data);
static void nvds_print_ib_connection(nvds_ib_connection_t* c) {
printf("lid: %d\n", c->lid);
printf("qpn: %d\n", c->qpn);
printf("psn: %d\n", c->psn);
printf("rkey: %u\n", c->rkey);
printf("vaddr: %llu\n", c->vaddr);
}
static void nvds_set_qp_state_init(struct ibv_qp* qp, nvds_data_t* data) {
struct ibv_qp_attr attr = {
.qp_state = IBV_QPS_INIT,
.pkey_index = 0,
.port_num = data->ib_port,
.qp_access_flags = IBV_ACCESS_LOCAL_WRITE |
IBV_ACCESS_REMOTE_READ |
IBV_ACCESS_REMOTE_WRITE
};
int modify_flags = IBV_QP_STATE | IBV_QP_PKEY_INDEX |
IBV_QP_PORT | IBV_QP_ACCESS_FLAGS;
nvds_expect(ibv_modify_qp(qp, &attr, modify_flags) == 0,
"ibv_modify_qp() failed");
}
static void nvds_set_qp_state_rtr(struct ibv_qp* qp, nvds_data_t* data) {
struct ibv_qp_attr attr = {
.qp_state = IBV_QPS_RTR,
.path_mtu = IBV_MTU_2048,
.dest_qp_num = data->remote_conn.qpn,
.rq_psn = data->remote_conn.psn,
.max_dest_rd_atomic = 1,
.min_rnr_timer = 12,
.ah_attr = {
.is_global = 0,
.dlid = data->remote_conn.lid,
.sl = 1,
.src_path_bits = 0,
.port_num = data->ib_port
}
};
int modify_flags = IBV_QP_STATE |
IBV_QP_AV |
IBV_QP_PATH_MTU |
IBV_QP_DEST_QPN |
IBV_QP_RQ_PSN |
IBV_QP_MAX_DEST_RD_ATOMIC |
IBV_QP_MIN_RNR_TIMER;
nvds_expect(ibv_modify_qp(qp, &attr, modify_flags) == 0,
"ibv_modify_qp() failed");
}
static void nvds_set_qp_state_rts(struct ibv_qp* qp, nvds_data_t* data) {
// first the qp state has to be changed to rtr
nvds_set_qp_state_rtr(qp, data);
struct ibv_qp_attr attr = {
.qp_state = IBV_QPS_RTS,
.timeout = 14,
.retry_cnt = 7,
.rnr_retry = 7,
.sq_psn = data->local_conn.psn,
.max_rd_atomic = 1
};
int modify_flags = IBV_QP_STATE |
IBV_QP_TIMEOUT |
IBV_QP_RETRY_CNT |
IBV_QP_RNR_RETRY |
IBV_QP_SQ_PSN |
IBV_QP_MAX_QP_RD_ATOMIC;
nvds_expect(ibv_modify_qp(qp, &attr, modify_flags) == 0,
"ibv_modify_qp() failed");
}
static void nvds_server_exch_info(nvds_data_t* data) {
int listen_fd;
struct sockaddr_in server_addr;
listen_fd = socket(AF_INET, SOCK_STREAM, 0);
nvds_expect(listen_fd >= 0, "socket() failed");
int on = 1;
nvds_expect(setsockopt(listen_fd, SOL_SOCKET, SO_REUSEADDR,
(const char*)&on, sizeof(on)) != -1,
"sotsockopt() failed");
memset(&server_addr, 0, sizeof(server_addr));
server_addr.sin_family = AF_INET;
server_addr.sin_addr.s_addr = INADDR_ANY;
server_addr.sin_port = htons(data->port);
nvds_expect(bind(listen_fd, (struct sockaddr*)&server_addr,
sizeof(server_addr)) >= 0,
"bind() failed");
printf("server listening at port %d\n", data->port);
listen(listen_fd, 1024);
int fd = accept(listen_fd, NULL, NULL);
nvds_expect(fd >= 0, "accept() failed");
int len = read(fd, &data->remote_conn, sizeof(data->remote_conn));
nvds_expect(len == sizeof(data->remote_conn), "read() failed");
len = write(fd, &data->local_conn, sizeof(data->local_conn));
nvds_expect(len == sizeof(data->local_conn), "write() failed");
close(fd);
close(listen_fd);
printf("information exchanged\n");
}
static void nvds_client_exch_info(nvds_data_t* data) {
int fd = socket(AF_INET, SOCK_STREAM, 0);
nvds_expect(fd != -1, "socket() failed");
struct sockaddr_in server_addr;
memset(&server_addr, 0, sizeof(server_addr));
server_addr.sin_family = AF_INET;
server_addr.sin_port = htons(data->port);
nvds_expect(inet_pton(AF_INET, data->server_name,
&server_addr.sin_addr) == 1,
"inet_pton() failed");
nvds_expect(connect(fd, (struct sockaddr*)&server_addr,
sizeof(server_addr)) == 0,
"connect() failed");
int len = write(fd, &data->local_conn, sizeof(data->local_conn));
nvds_expect(len == sizeof(data->local_conn), "write() failed");
len = read(fd, &data->remote_conn, sizeof(data->remote_conn));
nvds_expect(len == sizeof(data->remote_conn), "read() failed");
close(fd);
printf("information exchanged\n");
}
static void nvds_run_server(nvds_context_t* ctx, nvds_data_t* data) {
nvds_server_exch_info(data);
// Set queue pair state to RTR(Ready to receive)
nvds_set_qp_state_rtr(ctx->qp, data);
printf("server side: \n");
printf("local connection: \n");
nvds_print_ib_connection(&data->local_conn);
printf("remote connection: \n");
nvds_print_ib_connection(&data->remote_conn);
// TODO(wgtdkp): poll request from client or do nothing
while (1) {}
}
/*
static void nvds_multi_rdma_write(nvds_context_t* ctx, nvds_data_t* data) {
}
static void nvds_test_multi_sge(nvds_context_t* ctx, nvds_data_t* data) {
static const int n = 1000 * 1000;
}
*/
static inline int64_t time_diff(struct timespec* lhs, struct timespec* rhs) {
return (lhs->tv_sec - rhs->tv_sec) * 1000000000 +
(lhs->tv_nsec - rhs->tv_nsec);
}
static void nvds_run_client(nvds_context_t* ctx, nvds_data_t* data) {
nvds_client_exch_info(data);
nvds_set_qp_state_rts(ctx->qp, data);
//printf("client side: \n");
//printf("local connection: \n");
//nvds_print_ib_connection(&data->local_conn);
//printf("remote connection: \n");
//nvds_print_ib_connection(&data->remote_conn);
static const int n = 1000 * 1000;
for (int32_t len = 1; len <= 1024; len <<= 1) {
struct timespec begin, end;
assert(clock_gettime(CLOCK_REALTIME, &begin) == 0);
for (int i = 0; i < n; ++i) {
nvds_rdma_write(ctx, data, len);
nvds_poll_send(ctx);
}
assert(clock_gettime(CLOCK_REALTIME, &end) == 0);
int64_t t = time_diff(&end, &begin);
printf("%d, %f\n", len, t / 1000000.0 / 1000);
}
exit(0);
}
static void nvds_rdma_write(nvds_context_t* ctx, nvds_data_t* data, uint32_t len) {
ctx->sge.addr = (uintptr_t)ctx->buf;
ctx->sge.length = len;
ctx->sge.lkey = ctx->mr->lkey;
// The address write to
ctx->wr.wr.rdma.remote_addr = data->remote_conn.vaddr;
ctx->wr.wr.rdma.rkey = data->remote_conn.rkey;
ctx->wr.wr_id = RDMA_WRITE_ID;
ctx->wr.sg_list = &ctx->sge;
ctx->wr.num_sge = 1;
ctx->wr.opcode = IBV_WR_RDMA_WRITE;
ctx->wr.send_flags = IBV_SEND_SIGNALED;
ctx->wr.next = NULL;
struct ibv_send_wr* bad_wr;
nvds_expect(ibv_post_send(ctx->qp, &ctx->wr, &bad_wr) == 0,
"ibv_post_send() failed");
}
/*
static void nvds_rdma_read(nvds_context_t* ctx, nvds_data_t* data) {
ctx->sge.addr = (uintptr_t)ctx->buf + RDMA_WRITE_LEN;
ctx->sge.length = RDMA_READ_LEN;
ctx->sge.lkey = ctx->mr->lkey;
// The address read from
ctx->wr.wr.rdma.remote_addr = data->remote_conn.vaddr;
ctx->wr.wr.rdma.rkey = data->remote_conn.rkey;
ctx->wr.wr_id = RDMA_READ_ID;
ctx->wr.sg_list = &ctx->sge;
ctx->wr.num_sge = 1;
ctx->wr.opcode = IBV_WR_RDMA_READ;
ctx->wr.send_flags = IBV_SEND_SIGNALED;
ctx->wr.next = NULL;
struct ibv_send_wr* bad_wr;
nvds_expect(ibv_post_send(ctx->qp, &ctx->wr, &bad_wr) == 0,
"ibv_post_send() failed");
}
*/
static void nvds_poll_send(nvds_context_t* ctx) {
struct ibv_wc wc;
while (ibv_poll_cq(ctx->scq, 1, &wc) != 1) {}
nvds_expect(wc.status == IBV_WC_SUCCESS, "rdma write failed");
nvds_expect(wc.wr_id == ctx->wr.wr_id, "wr id not matched");
}
/*
static void nvds_poll_recv(nvds_context_t* ctx) {
struct ibv_wc wc;
int cnt = ibv_poll_cq(ctx->rcq, 1, &wc);
nvds_expect(cnt == 1, "ibv_poll_cq() failed");
nvds_expect(wc.status == IBV_WC_SUCCESS, "rdma read failed");
nvds_expect(wc.wr_id == ctx->wr.wr_id, "wr id not matched");
}
*/
/*
static void nvds_post_send(nvds_context_t* ctx, nvds_data_t* data) {
}
static void nvds_post_recv(nvds_context_t* ctx, nvds_data_t* data) {
}
*/
static void nvds_init_local_ib_connection(nvds_context_t* ctx,
nvds_data_t* data) {
struct ibv_port_attr attr;
nvds_expect(ibv_query_port(ctx->context, data->ib_port, &attr) == 0,
"ibv_query_port() failed");
data->local_conn.lid = attr.lid;
data->local_conn.qpn = ctx->qp->qp_num;
data->local_conn.psn = 33; //lrand48() & 0xffffff;
data->local_conn.rkey = ctx->mr->rkey;
data->local_conn.vaddr = (uintptr_t)ctx->buf;
}
static void nvds_init_ctx(nvds_context_t* ctx, nvds_data_t* data) {
memset(ctx, 0, sizeof(nvds_context_t));
ctx->size = data->size;
ctx->tx_depth = data->tx_depth;
// Get IB device
struct ibv_device** dev_list = ibv_get_device_list(NULL);
nvds_expect(dev_list, "ibv_get_device_list() failed");
data->ib_dev = dev_list[0];
// Open IB device
ctx->context = ibv_open_device(data->ib_dev);
nvds_expect(ctx->context, "ibv_open_device() failed");
// Alloc protection domain
ctx->pd = ibv_alloc_pd(ctx->context);
nvds_expect(ctx->pd, "ibv_alloc_pd() failed");
// Alloc buffer for write
ctx->buf = memalign(page_size, ctx->size);
memset(ctx->buf, 0, ctx->size);
// Register memory region
int access = IBV_ACCESS_LOCAL_WRITE |
IBV_ACCESS_REMOTE_READ |
IBV_ACCESS_REMOTE_WRITE;
ctx->mr = ibv_reg_mr(ctx->pd, ctx->buf, ctx->size, access);
nvds_expect(ctx->mr, "ibv_reg_mr() failed");
// Create completion channel
//ctx->ch = ibv_create_comp_channel(ctx->context);
//nvds_expect(ctx->ch, "ibv_create_comp_channel() failed");
// Create complete queue
ctx->rcq = ibv_create_cq(ctx->context, 1, NULL, NULL, 0);
nvds_expect(ctx->rcq, "ibv_create_cq() failed");
//ctx->scq = ibv_create_cq(ctx->context, ctx->tx_depth, ctx, ctx->ch, 0);
ctx->scq = ibv_create_cq(ctx->context, ctx->tx_depth, NULL, NULL, 0);
nvds_expect(ctx->rcq, "ibv_create_cq() failed");
// Create & init queue apir
struct ibv_qp_init_attr qp_init_attr = {
.send_cq = ctx->scq,
.recv_cq = ctx->rcq,
.qp_type = QP_TYPE,
.sq_sig_all = 0,
.cap = {
.max_send_wr = ctx->tx_depth,
.max_recv_wr = 1,
.max_send_sge = 1,
.max_recv_sge = 1,
.max_inline_data = 0
}
};
ctx->qp = ibv_create_qp(ctx->pd, &qp_init_attr);
nvds_expect(ctx->qp, "ibv_create_qp() failed");
nvds_set_qp_state_init(ctx->qp, data);
}
int main(int argc, const char* argv[]) {
nvds_context_t ctx;
nvds_data_t data = {
.port = 5500,
.ib_port = 1,
.size = RDMA_MAX_LEN,
.tx_depth = 100,
.server_name = NULL,
.ib_dev = NULL
};
nvds_expect(argc <= 2, "too many arguments");
int is_client = 0;
if (argc == 2) {
is_client = 1;
data.server_name = argv[1];
}
pid_t pid = getpid();
srand48(pid * time(NULL));
page_size = sysconf(_SC_PAGESIZE);
nvds_init_ctx(&ctx, &data);
nvds_init_local_ib_connection(&ctx, &data);
if (is_client) {
nvds_run_client(&ctx, &data);
} else {
nvds_run_server(&ctx, &data);
}
return 0;
}
<file_sep>/README.md
[Google Test]: https://github.com/google/googletest
[json]: https://github.com/nlohmann/json
# NVDS
Fast replication over RDMA fabric for NVM-based storage system.
## DEPENDENCY
+ [Google Test]
+ [json]
## BUILD
+ Get the source code:
```shell
$ git clone https://github.com/wgtdkp/nvds --recursive
```
+ Checkout the `master` branch of submodule `json`:
```shell
$ cd nvds/json && git checkout master
```
+ Build nvds from source:
```shell
$ cd ../src && make all
```
## INSTALL
```shell
$ make install # root required
```
## RUN
Navigate to directory `script`:
```shell
script$ ./startup.sh
```
## CONFIGURE
### cluster
Parameters below are defined in header file `common.h`, recompilation and reinstallation are needed for changes to take effect.
| parameter | default | range | description |
| :--------: | :---: | :---: | :---------: |
| kNumReplicas | 1 | [1, ] | the number of replications in primary backup |
| kNumServers | 2 | [1, ] | the number of servers in this cluster |
| kNumTabletsPerServer | 1 | [1, 16] | the number of tablets per server |
The volume of the whole cluster equals to: kNumServers * kNumTabletsPerServer * 64MB;
### servers
Specify server addresses in file `script/servers.txt`. The total number of servers
should equals to parameter `kNumServers`. The file format: a line for a server address(ip address and port number separated by space). An example of two servers:
```text
192.168.99.14 5050
192.168.99.14 5051
```
## PROGRAMMING
To connect to a nvds cluster, a client only needs to include header file `nvds/client.h` and link nvds's static library.
A tutorial snippet below shows how to put a key/value pair to the cluster and fetch it later:
```c++
#include "nvds/client.h"
#include <iostream>
#include <string>
int main() {
try {
nvds::Client c {"192.168.99.14"};
c.Put("hello", "world");
std::cout << "hello: " << c.Get("hello") << std::endl;
} catch (boost::system::system_error& e) {
NVDS_ERR(e.what());
} catch (nvds::TransportException& e) {
NVDS_ERR(e.msg().c_str());
NVDS_LOG("initialize infiniband devices failed");
}
return 0;
}
```
Compile:
```bash
g++ -std=c++11 -Wall test_client.cc -lnvds -lboost_system -lboost_thread -pthread -libverbs
```
The whole code can be obtained in source file `src/test_client.cc`.
## TROUBLESHOOTING
### enable UD
If `ibv_create_ah` failed with `"Cannot allocate memory"`, first, checkout the infiniband port status by typing command `ibstatus`; it should show that the port state is `INIT`; it's caused by disabling of `opensm`, start `opensm` by command: `/etc/init.d/opensmd start`; make sure the port state transformed to `ACTIVE`. It should be fixed then. This problem usually occurs after reboot, just enable _opensm_ once started.
### setting up IPoIB
Setting up IPoIB for testing of memcached latency.
Do below steps on both client and server machine.
1. `lsmod | grep ib_ipoib` check if ipoib module is listed. If not, `modprobe ib_ipoib`;
2. `ifconfig ib0 10.0.0.7/24` setting route and address (the address is for `ddst7`, you may specify an address as you like) of `ib0`;
3. make sure `opensm` service is enabled, you can restart it by `service opensmd restart`;
4. `ping 10.0.0.4` ping peer machine (this address must be 10.0.0.*) to make sure IPoIB works for you.
Have fun then!
<file_sep>/src/measurement.h
#ifndef _NVDS_MEASUREMENT_H_
#define _NVDS_MEASUREMENT_H_
#include <chrono>
#include <iostream>
#include <ratio>
namespace nvds {
class Measurement {
public:
void begin() {
using namespace std::chrono;
begin_clock_ = high_resolution_clock::now();
}
void end() {
using namespace std::chrono;
end_clock_ = high_resolution_clock::now();
++num_repetitions_;
total_time_ += duration_cast<duration<double, std::micro>>(end_clock_ - begin_clock_).count();
}
double total_time() const { return total_time_; }
size_t num_repetitions() const { return num_repetitions_; }
double average_time() const { return total_time_ / num_repetitions_; }
double cur_period() const {
using namespace std::chrono;
auto now = high_resolution_clock::now();
return duration_cast<duration<double, std::micro>>(now - begin_clock_).count();
}
// DEBUG
void Print() const {
std::cout << "total time: " << total_time() << std::endl;
std::cout << "repetitions: " << num_repetitions() << std::endl;
std::cout << "average time: " << average_time() << std::endl;
}
private:
std::chrono::time_point<std::chrono::high_resolution_clock> begin_clock_;
std::chrono::time_point<std::chrono::high_resolution_clock> end_clock_;
double total_time_ {0};
size_t num_repetitions_ {0};
};
} // namespace nvds
#endif // _NVDS_MESSAGE_
<file_sep>/src/coordinator_main.cc
/*
* Startup module of coordinator. For a storage cluster,
* it's the coordinator that starts up first, load configs
* from the config file,
*/
#include "coordinator.h"
using namespace nvds;
/*
static void Usage(int argc, const char* argv[]) {
std::cout << "Usage:" << std::endl
<< " " << argv[0] << " <config_file>" << std::endl;
}
*/
static std::string GetLocalAddr() {
using boost::asio::ip::tcp;
boost::asio::io_service io_service;
tcp::resolver resolver(io_service);
tcp::resolver::query query(boost::asio::ip::host_name(), "");
auto iter = resolver.resolve(query);
decltype(iter) end;
while (iter != end) {
tcp::endpoint ep = *iter++;
return ep.address().to_string();
}
assert(false);
return "";
}
int main(int argc, const char* argv[]) {
// -1. Argument parsing
//if (argc < 2) {
// Usage(argc, argv);
// return -1;
//}
// 1. Self initialization
Coordinator coordinator;
// 2. Listening for joining request from nodes and request from clients
NVDS_LOG("Coordinator at: %s", GetLocalAddr().c_str());
NVDS_LOG("Listening at: %u", kCoordPort);
NVDS_LOG("......");
coordinator.Run();
return 0;
}
<file_sep>/src/test.h
#ifndef _NVDS_TEST_H_
#define _NVDS_TEST_H_
namespace nvds {
} // namespace nvds
#endif // _NVDS_TEST_H_
<file_sep>/src/infiniband.cc
#include "infiniband.h"
#include <algorithm>
#include <malloc.h>
namespace nvds {
Infiniband::Infiniband(uint32_t registered_mem_size)
: ctx_(nullptr), pd_(nullptr), mr_(nullptr),
raw_mem_(nullptr), raw_mem_size_(registered_mem_size) {
struct ibv_device** dev_list = ibv_get_device_list(NULL);
assert(dev_list != nullptr);
ctx_ = ibv_open_device(dev_list[0]);
assert(ctx_ != nullptr);
ibv_free_device_list(dev_list);
pd_ = ibv_alloc_pd(ctx_);
assert(pd_ != nullptr);
raw_mem_ = reinterpret_cast<char*>(memalign(4096, raw_mem_size_));
assert(raw_mem_ != nullptr);
int access = IBV_ACCESS_LOCAL_WRITE |
IBV_ACCESS_REMOTE_READ |
IBV_ACCESS_REMOTE_WRITE;
mr_ = ibv_reg_mr(pd_, raw_mem_, raw_mem_size_, access);
assert(mr_ != nullptr);
std::fill(addr_handlers_.begin(), addr_handlers_.end(), nullptr);
}
Infiniband::~Infiniband() {
// Release in reverse order
int err;
for (auto ah : addr_handlers_) {
if (ah != nullptr) {
err = ibv_destroy_ah(ah);
assert(err == 0);
}
}
err = ibv_dereg_mr(mr_);
assert(err == 0);
free(raw_mem_);
err = ibv_dealloc_pd(pd_);
assert(err == 0);
err = ibv_close_device(ctx_);
assert(err == 0);
}
void Infiniband::QueuePairInfo::Print() const {
std::cout << "lid: " << lid << std::endl;
std::cout << "qpn: " << qpn << std::endl;
std::cout << "psn: " << psn << std::endl;
std::cout << "rkey: " << rkey << std::endl;
std::cout << "vaddr: " << vaddr << std::endl;
}
Infiniband::QueuePair::QueuePair(Infiniband& ib, ibv_qp_type type,
uint32_t max_send, uint32_t max_recv)
: ib(ib), type(type), qp(nullptr),
scq(nullptr), rcq(nullptr), psn(kDefaultPsn) {
assert(type == IBV_QPT_RC || type == IBV_QPT_UD);
scq = ibv_create_cq(ib.ctx(), max_send, nullptr, nullptr, 0);
rcq = ibv_create_cq(ib.ctx(), max_recv, nullptr, nullptr, 0);
assert(scq != nullptr && rcq != nullptr);
ibv_qp_init_attr init_attr;
memset(&init_attr, 0, sizeof(init_attr));
init_attr.srq = nullptr;
init_attr.send_cq = scq;
init_attr.recv_cq = rcq;
init_attr.qp_type = type;
init_attr.cap.max_send_wr = max_send;
init_attr.cap.max_recv_wr = max_recv;
init_attr.cap.max_send_sge = Infiniband::kMaxSendSge;
init_attr.cap.max_recv_sge = Infiniband::kMaxRecvSge;
init_attr.cap.max_inline_data = Infiniband::kMaxInlineData;
init_attr.sq_sig_all = 0; // Related to unsignaled completion
qp = ibv_create_qp(ib.pd(), &init_attr);
if (qp == nullptr) {
throw TransportException(HERE, "create queue pair failed", errno);
}
ibv_qp_attr attr;
attr.qp_state = IBV_QPS_INIT;
attr.pkey_index = 0;
attr.port_num = Infiniband::kPort;
attr.qkey = 0;
attr.qp_access_flags = IBV_ACCESS_LOCAL_WRITE |
IBV_ACCESS_REMOTE_READ |
IBV_ACCESS_REMOTE_WRITE;
int modify = IBV_QP_STATE | IBV_QP_PORT | IBV_QP_PKEY_INDEX;
modify |= type == IBV_QPT_RC ? IBV_QP_ACCESS_FLAGS : IBV_QP_QKEY;
int ret = ibv_modify_qp(qp, &attr, modify);
if (ret != 0) {
ibv_destroy_qp(qp);
throw TransportException(HERE, ret);
}
}
Infiniband::QueuePair::~QueuePair() {
int err = ibv_destroy_qp(qp);
assert(err == 0);
err = ibv_destroy_cq(rcq);
assert(err == 0);
err = ibv_destroy_cq(scq);
assert(err == 0);
}
uint32_t Infiniband::QueuePair::GetPeerQPNum() const {
ibv_qp_attr attr;
ibv_qp_init_attr init_attr;
auto err = ibv_query_qp(qp, &attr, IBV_QP_DEST_QPN, &init_attr);
if (err != 0) {
throw TransportException(HERE, err);
}
return attr.dest_qp_num;
}
int Infiniband::QueuePair::GetState() const {
ibv_qp_attr attr;
ibv_qp_init_attr init_attr;
int err = ibv_query_qp(qp, &attr, IBV_QP_STATE, &init_attr);
if (err != 0) {
throw TransportException(HERE, err);
}
return attr.qp_state;
}
// Bring UD queue pair into RTS status
void Infiniband::QueuePair::Activate() {
assert(type == IBV_QPT_UD);
assert(GetState() == IBV_QPS_INIT);
ibv_qp_attr attr;
memset(&attr, 0, sizeof(attr));
attr.qp_state = IBV_QPS_RTR;
auto err = ibv_modify_qp(qp, &attr, IBV_QP_STATE);
if (err != 0) {
throw TransportException(HERE, err);
}
attr.qp_state = IBV_QPS_RTS;
attr.sq_psn = psn;
err = ibv_modify_qp(qp, &attr, IBV_QP_STATE | IBV_QP_SQ_PSN);
if (err != 0) {
throw TransportException(HERE, err);
}
}
void Infiniband::QueuePair::SetStateRTR(const QueuePairInfo& peer_info) {
assert(type == IBV_QPT_RC);
assert(GetState() == IBV_QPS_INIT);
struct ibv_qp_attr attr;
// We must set qp into RTR state before RTS
attr.qp_state = IBV_QPS_RTR;
attr.path_mtu = IBV_MTU_4096;
attr.dest_qp_num = peer_info.qpn;
attr.rq_psn = peer_info.psn;
attr.max_dest_rd_atomic = 1;
attr.min_rnr_timer = 12;
attr.ah_attr.is_global = 0;
attr.ah_attr.dlid = peer_info.lid;
attr.ah_attr.sl = 0;
attr.ah_attr.src_path_bits = 0;
attr.ah_attr.port_num = Infiniband::kPort;
int modify = IBV_QP_STATE |
IBV_QP_AV |
IBV_QP_PATH_MTU |
IBV_QP_DEST_QPN |
IBV_QP_RQ_PSN |
IBV_QP_MAX_DEST_RD_ATOMIC |
IBV_QP_MIN_RNR_TIMER;
int err = ibv_modify_qp(qp, &attr, modify);
if (err != 0) {
throw TransportException(HERE, err);
}
}
void Infiniband::QueuePair::SetStateRTS(const QueuePairInfo& peer_info) {
SetStateRTR(peer_info);
struct ibv_qp_attr attr;
attr.qp_state = IBV_QPS_RTS;
attr.timeout = 14;
attr.retry_cnt = 7;
attr.rnr_retry = 7;
attr.sq_psn = psn;
attr.max_rd_atomic = 1;
int modify = IBV_QP_STATE |
IBV_QP_TIMEOUT |
IBV_QP_RETRY_CNT |
IBV_QP_RNR_RETRY |
IBV_QP_SQ_PSN |
IBV_QP_MAX_QP_RD_ATOMIC;
int err = ibv_modify_qp(qp, &attr, modify);
if (err != 0) {
throw TransportException(HERE, err);
}
}
Infiniband::RegisteredBuffers::RegisteredBuffers(ibv_pd* pd,
uint32_t buf_size, uint32_t buf_num, bool is_recv)
: buf_size_(buf_size), buf_num_(buf_num),
ptr_(nullptr), bufs_(nullptr), root_(nullptr) {
const size_t bytes = buf_size * buf_num;
ptr_ = memalign(sysconf(_SC_PAGESIZE), bytes);
assert(ptr_ != nullptr);
int access = IBV_ACCESS_REMOTE_WRITE | IBV_ACCESS_LOCAL_WRITE;
auto mr = ibv_reg_mr(pd, ptr_, bytes, access);
if (mr == nullptr) {
throw TransportException(HERE, "failed to register buffers", errno);
}
bufs_ = new Buffer[buf_num_];
char* p = static_cast<char*>(ptr_);
for (uint32_t i = 0; i < buf_num; ++i) {
new (&bufs_[i]) Buffer(p, buf_size_, mr, is_recv);
bufs_[i].next = i + 1 < buf_num ? &bufs_[i+1] : nullptr;
p += buf_size_;
}
root_ = bufs_;
}
Infiniband::RegisteredBuffers::~RegisteredBuffers() {
// Deregsiter memory region
ibv_dereg_mr(bufs_[0].mr);
free(ptr_);
delete[] bufs_;
}
uint16_t Infiniband::GetLid(uint16_t port) {
ibv_port_attr port_attr;
auto ret = ibv_query_port(ctx_, port, &port_attr);
if (ret != 0) {
throw TransportException(HERE, ret);
}
return port_attr.lid;
}
Infiniband::Buffer* Infiniband::PollCQ(ibv_cq* cq, int num_entries) {
ibv_wc wc;
int r = ibv_poll_cq(cq, num_entries, &wc);
if (r == 0) {
return NULL;
} else if (r < 0) {
throw TransportException(HERE, r);
} else if (wc.status != IBV_WC_SUCCESS) {
throw TransportException(HERE, wc.status);
}
// wr_id is used as buffer address
auto b = reinterpret_cast<Buffer*>(wc.wr_id);
b->msg_len = wc.byte_len;
// FIXME(wgtdkp):
b->peer_addr = {Infiniband::kPort/*qp->ib_port*/, wc.slid, wc.src_qp};
return b;
}
// May blocking
Infiniband::Buffer* Infiniband::Receive(QueuePair* qp) {
Buffer* b;
while ((b = TryReceive(qp)) == nullptr) {}
return b;
}
void Infiniband::PostReceive(QueuePair* qp, Buffer* b) {
ibv_sge sge {
reinterpret_cast<uint64_t>(b->buf),
b->size,
b->mr->lkey
};
ibv_recv_wr rwr;
memset(&rwr, 0, sizeof(rwr));
rwr.wr_id = reinterpret_cast<uint64_t>(b);
rwr.next = nullptr;
rwr.sg_list = &sge;
rwr.num_sge = 1;
ibv_recv_wr* bad_rwr;
auto err = ibv_post_recv(qp->qp, &rwr, &bad_rwr);
if (err != 0) {
throw TransportException(HERE, "ibv_post_recv failed", err);
}
}
void Infiniband::PostSend(QueuePair* qp, Buffer* b,
uint32_t len, const Address* peer_addr) {
assert(qp->type == IBV_QPT_UD);
ibv_sge sge {
reinterpret_cast<uint64_t>(b->buf),
len,
b->mr->lkey
};
ibv_send_wr swr;
memset(&swr, 0, sizeof(swr));
swr.wr_id = reinterpret_cast<uint64_t>(b);
if (qp->type == IBV_QPT_UD) {
swr.wr.ud.ah = GetAddrHandler(*peer_addr);
swr.wr.ud.remote_qpn = peer_addr->qpn;
swr.wr.ud.remote_qkey = 0;
assert(swr.wr.ud.ah != nullptr);
}
swr.next = nullptr;
swr.sg_list = &sge;
swr.num_sge = 1;
swr.opcode = IBV_WR_SEND;
swr.send_flags = IBV_SEND_SIGNALED;
if (len <= kMaxInlineData) {
swr.send_flags |= IBV_SEND_INLINE;
}
ibv_send_wr* bad_swr;
auto err = ibv_post_send(qp->qp, &swr, &bad_swr);
if (err != 0) {
throw TransportException(HERE, "ibv_post_send failed", err);
}
}
void Infiniband::PostSendAndWait(QueuePair* qp, Buffer* b,
uint32_t len, const Address* peer_addr) {
assert(qp->type == IBV_QPT_UD);
PostSend(qp, b, len, peer_addr);
ibv_wc wc;
while (ibv_poll_cq(qp->scq, 1, &wc) < 1) {}
// FIXME(wgtdkp):
// How can we make sure that the completed work we polled is the one
// we have just post?
if (wc.status != IBV_WC_SUCCESS) {
throw TransportException(HERE, "PostSendAndWait failed", wc.status);
}
}
ibv_ah* Infiniband::GetAddrHandler(const Address& addr) {
auto& ah = addr_handlers_[addr.lid];
if (ah != nullptr) {
return ah;
}
ibv_ah_attr attr;
attr.is_global = 0;
attr.dlid = addr.lid;
attr.sl = 1;
attr.src_path_bits = 0;
attr.port_num = kPort;
ah = ibv_create_ah(pd_, &attr);
assert(ah != nullptr);
return ah;
}
void Infiniband::PostWrite(QueuePair& qp, QueuePairInfo& peer_info) {
struct ibv_sge sge;
sge.addr = reinterpret_cast<uint64_t>(raw_mem_);
sge.length = 64;
sge.lkey = mr_->lkey;
struct ibv_send_wr swr;
swr.wr.rdma.remote_addr = peer_info.vaddr;
swr.wr.rdma.rkey = peer_info.rkey;
swr.wr_id = 3;
swr.sg_list = &sge;
swr.num_sge = 1;
swr.opcode = IBV_WR_RDMA_WRITE;
swr.send_flags = IBV_SEND_SIGNALED;
swr.next = nullptr;
struct ibv_send_wr* bad_wr;
int err = ibv_post_send(qp.qp, &swr, &bad_wr);
if (err != 0) {
throw TransportException(HERE, "post rdma write failed", err);
}
}
void Infiniband::PostWriteAndWait(QueuePair& qp, QueuePairInfo& peer_info) {
PostWrite(qp, peer_info);
struct ibv_wc wc;
int r;
while ((r = ibv_poll_cq(qp.scq, 1, &wc)) != 1) {}
if (wc.status != IBV_WC_SUCCESS) {
throw TransportException(HERE, "poll completion queue failed", wc.status);
}
}
} // namespace nvds
<file_sep>/src/experiments/write_rc_multi.h
#ifndef _NVDS_COMMON_H_
#define _NVDS_COMMON_H_
#include <ctype.h>
#include <errno.h>
#include <malloc.h>
#include <memory.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
#include <arpa/inet.h>
#include <infiniband/verbs.h>
#include <sys/socket.h>
#include <sys/time.h>
// Interprocess communication access
#include <sys/ipc.h>
// Shared memory facility
#include <sys/shm.h>
#define nvds_error(format, ...) { \
fprintf(stderr, "error: %s: %d: ", __FILE__, __LINE__); \
fprintf(stderr, format, ## __VA_ARGS__); \
fprintf(stderr, "\n"); \
exit(-1); \
}
#define nvds_expect(cond, err, ...) { \
if (!(cond)) { \
nvds_error(err, ## __VA_ARGS__); \
} \
}
#ifndef NUM_QP
#define NUM_QP (1024)
#endif
#define QP_TYPE IBV_QPT_RC
typedef struct nvds_context {
struct ibv_context* context;
struct ibv_pd* pd;
struct ibv_mr* mr;
struct ibv_cq* rcq;
struct ibv_cq* scq;
struct ibv_qp* qp;
struct ibv_comp_channel* ch;
char* buf;
unsigned buf_size;
int tx_depth;
struct ibv_sge sge;
struct ibv_send_wr wr;
} nvds_context_t;
typedef struct nvds_ib_connection {
int lid;
int qpn;
int psn;
unsigned rkey;
unsigned long long vaddr;
} nvds_ib_connection_t;
typedef struct nvds_data {
int port;
int ib_port;
unsigned size;
int tx_depth;
int sockfd;
const char* server_name;
nvds_ib_connection_t local_conn;
nvds_ib_connection_t remote_conn;
struct ibv_device* ib_dev;
} nvds_data_t;
#endif
<file_sep>/src/server.cc
#include "server.h"
#include "json.hpp"
#include "request.h"
#include <chrono>
#include <thread>
namespace nvds {
using json = nlohmann::json;
Server::Server(uint16_t port, NVMPtr<NVMDevice> nvm, uint64_t nvm_size)
: BasicServer(port), id_(0),
active_(false), nvm_size_(nvm_size), nvm_(nvm),
send_bufs_(ib_.pd(), kSendBufSize, kMaxIBQueueDepth, false),
recv_bufs_(ib_.pd(), kRecvBufSize, kMaxIBQueueDepth, true) {
// Infiniband
qp_ = new Infiniband::QueuePair(ib_, IBV_QPT_UD,
kMaxIBQueueDepth, kMaxIBQueueDepth);
qp_->Activate();
ib_addr_ = {
Infiniband::kPort,
ib_.GetLid(Infiniband::kPort),
qp_->GetLocalQPNum()
};
// Tablets
for (uint32_t i = 0; i < kNumTabletAndBackupsPerServer; ++i) {
auto ptr = reinterpret_cast<char*>(&nvm_->tablets) + i * kNVMTabletSize;
bool is_backup = i >= kNumTabletsPerServer;
tablets_[i] = new Tablet(index_manager_,
NVMPtr<NVMTablet>(reinterpret_cast<NVMTablet*>(ptr)), is_backup);
if (i < kNumTabletsPerServer) {
workers_[i] = new Worker(this, tablets_[i]);
}
}
}
Server::~Server() {
// Destruct elements in reverse order
for (int64_t i = kNumTabletAndBackupsPerServer - 1; i >= 0; --i) {
delete tablets_[i];
if (i < kNumTabletsPerServer) {
delete workers_[i];
}
}
delete qp_;
}
void Server::Run() {
std::thread poller(&Server::Poll, this);
Accept(std::bind(&Server::HandleRecvMessage, this,
std::placeholders::_1, std::placeholders::_2),
std::bind(&Server::HandleSendMessage, this,
std::placeholders::_1, std::placeholders::_2));
RunService();
poller.join();
}
bool Server::Join(const std::string& coord_addr) {
using boost::asio::ip::tcp;
tcp::resolver resolver(tcp_service_);
tcp::resolver::query query(coord_addr, std::to_string(kCoordPort));
auto ep = resolver.resolve(query);
try {
boost::asio::connect(conn_sock_, ep);
Session session_join(std::move(conn_sock_));
Message::Header header {Message::SenderType::SERVER,
Message::Type::REQ_JOIN, 0};
std::vector<Infiniband::QueuePairInfo> qpis;
for (uint32_t i = 0; i < kNumTabletAndBackupsPerServer; ++i) {
for (uint32_t j = 0; j < kNumReplicas; ++j) {
qpis.emplace_back(tablets_[i]->info().qpis[j]);
}
}
json body {
{"size", nvm_size_},
{"ib_addr", ib_addr_},
{"tablet_qpis", qpis},
};
Message msg(header, body.dump());
try {
session_join.SendMessage(msg);
} catch (boost::system::system_error& err) {
NVDS_ERR("send join request to coordinator failed: %s", err.what());
return false;
}
try {
msg = session_join.RecvMessage();
assert(msg.sender_type() == Message::SenderType::COORDINATOR);
assert(msg.type() == Message::Type::RES_JOIN);
auto j_body = json::parse(msg.body());
id_ = j_body["id"];
index_manager_ = j_body["index_manager"];
active_ = true;
auto& server_info = index_manager_.GetServer(id_);
for (uint32_t i = 0; i < kNumTabletAndBackupsPerServer; ++i) {
auto tablet_id = server_info.tablets[i];
// Both master and backup tablets
tablets_[i]->SettingupQPConnect(tablet_id, index_manager_);
}
} catch (boost::system::system_error& err) {
NVDS_ERR("receive join response from coordinator failed: %s",
err.what());
return false;
}
} catch (boost::system::system_error& err) {
NVDS_ERR("connect to coordinator: %s: %" PRIu16 " failed",
coord_addr.c_str(), kCoordPort);
return false;
}
return true;
}
void Server::Leave() {
assert(false);
}
void Server::Listening() {
assert(false);
}
void Server::HandleRecvMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg) {
assert(false);
}
void Server::HandleSendMessage(std::shared_ptr<Session> session,
std::shared_ptr<Message> msg) {
assert(false);
}
void Server::Poll() {
// This has a great
// Fill the receive queue
for (size_t i = 0; i < kMaxIBQueueDepth; ++i) {
auto b = recv_bufs_.Alloc();
assert(b);
ib_.PostReceive(qp_, b);
}
using namespace std::chrono;
//uint64_t cnt = 0;
high_resolution_clock::time_point begin;
while (true) {
auto b = ib_.TryReceive(qp_);
if (b != nullptr) {
#ifdef ENABLE_MEASUREMENT
static bool enable = false;
if (enable) {
recv_measurement.end();
}
enable = true;
#endif
// Dispatch to worker's queue, then worker get work from it's queue.
// The buffer `b` will be freed by the worker. Thus, the buffer pool
// must made thread safe.
Dispatch(b);
}
if ((b = ib_.TrySend(qp_)) != nullptr) {
#ifdef ENABLE_MEASUREMENT
send_measurement.end();
recv_measurement.begin();
#endif
send_bufs_.Free(b);
}
// The receive queue is not full, if there are free buffers.
if ((b = recv_bufs_.Alloc()) != nullptr) {
assert(b);
// Try posting receive if we could
ib_.PostReceive(qp_, b);
}
}
}
void Server::Dispatch(Work* work) {
auto r = work->MakeRequest();
auto id = index_manager_.GetTabletId(r->key_hash);
workers_[id % kNumTabletAndBackupsPerServer]->Enqueue(work);
++num_recv_;
}
void Server::Worker::Serve() {
ModificationList modifications;
while (true) {
// Get request
auto work = wq_.TryPollWork();
Infiniband::Buffer* sb = nullptr;
if (work == nullptr) {
std::unique_lock<std::mutex> lock(mtx_);
cond_var_.wait(lock, [&work, &sb, this]() -> bool {
return (work || (work = wq_.Dequeue()));
});
}
#ifdef ENABLE_MEASUREMENT
server_->thread_measurement.end();
#endif
sb = server_->send_bufs_.Alloc();
assert(work != nullptr && sb != nullptr);
// Do the work
auto r = work->MakeRequest();
auto resp = Response::New(sb, r->type, Status::OK);
modifications.clear();
#ifdef ENABLE_MEASUREMENT
server_->alloc_measurement.begin();
#endif
switch (r->type) {
case Request::Type::PUT:
resp->status = tablet_->Put(r, modifications);
break;
case Request::Type::ADD:
resp->status = tablet_->Add(r, modifications);
break;
case Request::Type::DEL:
resp->status = tablet_->Del(r, modifications);
break;
case Request::Type::GET:
resp->status = tablet_->Get(resp, r, modifications);
break;
}
#ifdef ENABLE_MEASUREMENT
server_->alloc_measurement.end();
#endif
try {
#ifdef ENABLE_MEASUREMENT
server_->sync_measurement.begin();
#endif
tablet_->Sync(modifications);
#ifdef ENABLE_MEASUREMENT
server_->sync_measurement.end();
#endif
} catch (TransportException& e) {
r->Print();
tablet_->info().Print();
server_->index_manager_.PrintTablets();
NVDS_ERR(e.ToString().c_str());
}
#ifdef ENABLE_MEASUREMENT
server_->send_measurement.begin();
#endif
server_->ib_.PostSend(server_->qp_, sb, resp->Len(), &work->peer_addr);
server_->recv_bufs_.Free(work);
}
}
} // namespace nvds
<file_sep>/src/nvds.h
#ifndef _NVDS_NVDS_H_
#define _NVDS_NVDS_H_
#include <ctype.h>
#include <errno.h>
#include <malloc.h>
#include <memory.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
#include <arpa/inet.h>
#include <infiniband/verbs.h>
#include <sys/socket.h>
#include <sys/time.h>
// Interprocess communication access
#include <sys/ipc.h>
// Shared memory facility
#include <sys/shm.h>
namespace nvds {
#define nvds_error(format, ...) { \
fprintf(stderr, "error: %s: %d: ", __FILE__, __LINE__); \
fprintf(stderr, format, ## __VA_ARGS__); \
fprintf(stderr, "\n"); \
exit(-1); \
}
#define nvds_expect(cond, err, ...) { \
if (!(cond)) { \
nvds_error(err, ## __VA_ARGS__); \
} \
}
} // namespace nvds
#endif // _NVDS_NVDS_H_
<file_sep>/src/test_infiniband.cc
#include "infiniband.h"
#include <gtest/gtest.h>
#include <cstdlib>
#include <functional>
#include <iostream>
#include <thread>
#include <vector>
#include <ctype.h>
#include <errno.h>
#include <malloc.h>
#include <memory.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
#include <arpa/inet.h>
#include <infiniband/verbs.h>
#include <sys/socket.h>
#include <sys/time.h>
// Interprocess communication access
#include <sys/ipc.h>
// Shared memory facility
#include <sys/shm.h>
using namespace std;
using namespace nvds;
#define nvds_error(format, ...) { \
fprintf(stderr, "error: %s: %d: ", __FILE__, __LINE__); \
fprintf(stderr, format, ## __VA_ARGS__); \
fprintf(stderr, "\n"); \
exit(-1); \
}
#define nvds_expect(cond, err, ...) { \
if (!(cond)) { \
nvds_error(err, ## __VA_ARGS__); \
} \
}
#define BUF_SIZE (1024)
#define QP_TYPE IBV_QPT_RC
#define IB_PORT (1)
#define DEFAULT_PSN (33)
typedef struct nvds_context {
struct ibv_context* context;
struct ibv_pd* pd;
struct ibv_mr* mr;
struct ibv_cq* rcq;
struct ibv_cq* scq;
struct ibv_qp* qp;
struct ibv_comp_channel* ch;
char* buf;
} nvds_context_t;
typedef struct nvds_ib_connection {
int lid;
int qpn;
int psn;
unsigned rkey;
unsigned long long vaddr;
} nvds_ib_connection_t;
#define RDMA_WRITE_ID (3)
#define RDMA_READ_ID (4)
#define RDMA_WRITE_LEN (64)
#define RDMA_READ_LEN RDMA_WRITE_LEN
static int page_size;
nvds_ib_connection_t conn[2];
static void nvds_run_server(nvds_context_t* ctx, nvds_ib_connection_t* remote_conn);
static void nvds_run_client(nvds_context_t* ctx, nvds_ib_connection_t* remote_conn);
static void nvds_init_local_ib_connection(nvds_context_t* ctx,
nvds_ib_connection_t* local_conn);
static void nvds_init_ctx(nvds_context_t* ctx);
static void nvds_set_qp_state_init(struct ibv_qp* qp);
static void nvds_set_qp_state_rtr(struct ibv_qp* qp, nvds_ib_connection_t* remote_conn);
static void nvds_set_qp_state_rts(struct ibv_qp* qp, nvds_ib_connection_t* remote_conn);
static void nvds_rdma_write(nvds_context_t* ctx, nvds_ib_connection_t* remote_conn);
static void nvds_poll_send(nvds_context_t* ctx);
static void copy_info_to_conn(nvds_ib_connection_t* conn, Infiniband::QueuePairInfo* info) {
conn->lid = info->lid;
conn->qpn = info->qpn;
conn->psn = info->psn;
conn->rkey = info->rkey;
conn->vaddr = info->vaddr;
}
static void copy_conn_to_info(Infiniband::QueuePairInfo* info, nvds_ib_connection_t* conn) {
info->lid = conn->lid;
info->qpn = conn->qpn;
info->psn = conn->psn;
info->rkey = conn->rkey;
info->vaddr = conn->vaddr;
}
static void nvds_print_ib_connection(nvds_ib_connection_t* c) {
printf("lid: %d\n", c->lid);
printf("qpn: %d\n", c->qpn);
printf("psn: %d\n", c->psn);
printf("rkey: %u\n", c->rkey);
printf("vaddr: %llu\n", c->vaddr);
}
static void nvds_set_qp_state_init(struct ibv_qp* qp) {
struct ibv_qp_attr attr;
attr.qp_state = IBV_QPS_INIT;
attr.pkey_index = 0;
attr.port_num = Infiniband::kPort;
attr.qp_access_flags = IBV_ACCESS_LOCAL_WRITE |
IBV_ACCESS_REMOTE_READ |
IBV_ACCESS_REMOTE_WRITE;
int modify_flags = IBV_QP_STATE | IBV_QP_PKEY_INDEX |
IBV_QP_PORT | IBV_QP_ACCESS_FLAGS;
nvds_expect(ibv_modify_qp(qp, &attr, modify_flags) == 0,
"ibv_modify_qp() failed");
}
static void nvds_set_qp_state_rtr(struct ibv_qp* qp, nvds_ib_connection_t* remote_conn) {
struct ibv_qp_attr attr;
attr.qp_state = IBV_QPS_RTR;
attr.path_mtu = IBV_MTU_4096;
attr.dest_qp_num = remote_conn->qpn;
attr.rq_psn = remote_conn->psn;
attr.max_dest_rd_atomic = 1;
attr.min_rnr_timer = 12;
attr.ah_attr.is_global = 0;
attr.ah_attr.dlid = remote_conn->lid;
attr.ah_attr.sl = 0;//1;
attr.ah_attr.src_path_bits = 0;
attr.ah_attr.port_num = 1;
int modify_flags = IBV_QP_STATE |
IBV_QP_AV |
IBV_QP_PATH_MTU |
IBV_QP_DEST_QPN |
IBV_QP_RQ_PSN |
IBV_QP_MAX_DEST_RD_ATOMIC |
IBV_QP_MIN_RNR_TIMER;
nvds_expect(ibv_modify_qp(qp, &attr, modify_flags) == 0,
"ibv_modify_qp() failed");
}
static void nvds_set_qp_state_rts(struct ibv_qp* qp, nvds_ib_connection_t* remote_conn) {
// first the qp state has to be changed to rtr
nvds_set_qp_state_rtr(qp, remote_conn);
struct ibv_qp_attr attr;
attr.qp_state = IBV_QPS_RTS;
attr.timeout = 14;
attr.retry_cnt = 7;
attr.rnr_retry = 7;
attr.sq_psn = DEFAULT_PSN;
attr.max_rd_atomic = 1;
int modify_flags = IBV_QP_STATE |
IBV_QP_TIMEOUT |
IBV_QP_RETRY_CNT |
IBV_QP_RNR_RETRY |
IBV_QP_SQ_PSN |
IBV_QP_MAX_QP_RD_ATOMIC;
nvds_expect(ibv_modify_qp(qp, &attr, modify_flags) == 0,
"ibv_modify_qp() failed");
}
static void nvds_rdma_write(nvds_context_t* ctx, nvds_ib_connection_t* remote_conn) {
struct ibv_sge sge;
sge.addr = (uintptr_t)ctx->buf;
sge.length = RDMA_WRITE_LEN;
sge.lkey = ctx->mr->lkey;
struct ibv_send_wr wr;
// The address write to
wr.wr.rdma.remote_addr = remote_conn->vaddr;
wr.wr.rdma.rkey = remote_conn->rkey;
wr.wr_id = RDMA_WRITE_ID;
wr.sg_list = &sge;
wr.num_sge = 1;
wr.opcode = IBV_WR_RDMA_WRITE;
wr.send_flags = IBV_SEND_SIGNALED;
wr.next = NULL;
struct ibv_send_wr* bad_wr;
nvds_expect(ibv_post_send(ctx->qp, &wr, &bad_wr) == 0,
"ibv_post_send() failed");
}
static void nvds_poll_send(nvds_context_t* ctx) {
struct ibv_wc wc;
int r;
while ((r = ibv_poll_cq(ctx->scq, 1, &wc)) != 1) {}
//nvds_expect(wc.status == IBV_WC_SUCCESS, "rdma write failed");
if (wc.status != IBV_WC_SUCCESS) {
printf("rdma write failed, status = %d\n", wc.status);
perror("ibv_poll_cq");
abort();
}
}
static void nvds_init_local_ib_connection(nvds_context_t* ctx,
nvds_ib_connection_t* local_conn) {
struct ibv_port_attr attr;
nvds_expect(ibv_query_port(ctx->context, IB_PORT, &attr) == 0,
"ibv_query_port() failed");
local_conn->lid = attr.lid;
local_conn->qpn = ctx->qp->qp_num;
local_conn->psn = Infiniband::QueuePair::kDefaultPsn; //lrand48() & 0xffffff;
local_conn->rkey = ctx->mr->rkey;
local_conn->vaddr = (uintptr_t)ctx->buf;
}
static void nvds_init_ctx(nvds_context_t* ctx) {
memset(ctx, 0, sizeof(nvds_context_t));
// Get IB device
struct ibv_device** dev_list = ibv_get_device_list(NULL);
nvds_expect(dev_list, "ibv_get_device_list() failed");
// Open IB device
ctx->context = ibv_open_device(dev_list[0]);
nvds_expect(ctx->context, "ibv_open_device() failed");
// Alloc protection domain
ctx->pd = ibv_alloc_pd(ctx->context);
nvds_expect(ctx->pd, "ibv_alloc_pd() failed");
// Alloc buffer for write
ctx->buf = (char*)memalign(4096, BUF_SIZE);
memset(ctx->buf, 0, BUF_SIZE);
// Register memory region
int access = IBV_ACCESS_LOCAL_WRITE |
IBV_ACCESS_REMOTE_READ |
IBV_ACCESS_REMOTE_WRITE;
ctx->mr = ibv_reg_mr(ctx->pd, ctx->buf, BUF_SIZE, access);
nvds_expect(ctx->mr, "ibv_reg_mr() failed");
// Create completion channel
//ctx->ch = ibv_create_comp_channel(ctx->context);
//nvds_expect(ctx->ch, "ibv_create_comp_channel() failed");
// Create complete queue
ctx->rcq = ibv_create_cq(ctx->context, 128, NULL, NULL, 0);
nvds_expect(ctx->rcq, "ibv_create_cq() failed");
//ctx->scq = ibv_create_cq(ctx->context, ctx->tx_depth, ctx, ctx->ch, 0);
ctx->scq = ibv_create_cq(ctx->context, 128, NULL, NULL, 0);
nvds_expect(ctx->rcq, "ibv_create_cq() failed");
// Create & init queue apir
struct ibv_qp_init_attr qp_init_attr;
qp_init_attr.send_cq = ctx->scq;
qp_init_attr.recv_cq = ctx->rcq;
qp_init_attr.qp_type = QP_TYPE;
qp_init_attr.cap.max_send_wr = 128;
qp_init_attr.cap.max_recv_wr = 128;
qp_init_attr.cap.max_send_sge = Infiniband::kMaxSendSge;
qp_init_attr.cap.max_recv_sge = Infiniband::kMaxRecvSge;
qp_init_attr.cap.max_inline_data = Infiniband::kMaxInlineData;
ctx->qp = ibv_create_qp(ctx->pd, &qp_init_attr);
nvds_expect(ctx->qp, "ibv_create_qp() failed");
nvds_set_qp_state_init(ctx->qp);
}
static void nvds_run_server(nvds_context_t* ctx, nvds_ib_connection_t* remote_conn) {
//nvds_server_exch_info(data);
sleep(1); // Waitting for peer
// Set queue pair state to RTR(Ready to receive)
nvds_set_qp_state_rtr(ctx->qp, remote_conn);
printf("server side: \n");
//printf("\n");
//nvds_print_qp_attr(ctx->qp);
//printf("\n\n");
/*
printf("local connection: \n");
nvds_print_ib_connection(&data->local_conn);
printf("remote connection: \n");
nvds_print_ib_connection(&data->remote_conn);
*/
// TODO(wgtdkp): poll request from client or do nothing
while (1) {}
}
static void nvds_run_client(nvds_context_t* ctx, nvds_ib_connection_t* remote_conn) {
sleep(2); // Waitting for peer
nvds_set_qp_state_rts(ctx->qp, remote_conn);
//nvds_set_qp_state_rtr(ctx->qp, data);
printf("client side: \n");
//printf("\n");
//nvds_print_qp_attr(ctx->qp);
//printf("\n\n");
/*
printf("local connection: \n");
nvds_print_ib_connection(&data->local_conn);
printf("remote connection: \n");
nvds_print_ib_connection(&data->remote_conn);
*/
snprintf(ctx->buf, RDMA_WRITE_LEN, "hello rdma\n");
static const int n = 1000 * 1000;
clock_t begin;
double t;
// Test write latency
begin = clock();
for (int i = 0; i < n; ++i) {
// Step 1: RDMA write to server
nvds_rdma_write(ctx, remote_conn);
// Step 2: polling if RDMA write done
nvds_poll_send(ctx);
}
t = (clock() - begin) * 1.0 / CLOCKS_PER_SEC;
printf("time: %fs\n", t);
printf("write latency: %fus\n", t / n * 1000 * 1000);
abort();
// Dump statistic info
}
Infiniband::QueuePairInfo qpis[2];
int nvds_main(int argc, const char* argv[]) {
nvds_context_t ctx;
nvds_expect(argc <= 2, "too many arguments");
int is_client = 0;
if (argc == 2) {
is_client = 1;
}
page_size = sysconf(_SC_PAGESIZE);
nvds_init_ctx(&ctx);
if (is_client) {
nvds_init_local_ib_connection(&ctx, &conn[0]);
copy_conn_to_info(&qpis[0], &conn[0]);
nvds_run_client(&ctx, &conn[1]);
} else {
nvds_init_local_ib_connection(&ctx, &conn[1]);
copy_conn_to_info(&qpis[1], &conn[1]);
nvds_run_server(&ctx, &conn[0]);
}
return 0;
}
static void RunClient() {
try {
Infiniband ib {1024};
Infiniband::QueuePair qp(ib, IBV_QPT_RC, 128, 128);
qpis[0] = {
ib.GetLid(Infiniband::kPort),
qp.GetLocalQPNum(),
qp.psn,
ib.mr()->rkey,
reinterpret_cast<uint64_t>(ib.raw_mem())
};
copy_info_to_conn(&conn[0], &qpis[0]);
sleep(2); // Waitting for peer
printf("client side\n");
qp.SetStateRTS(qpis[1]);
assert(qp.GetState() == IBV_QPS_RTS);
snprintf(ib.raw_mem(), RDMA_WRITE_LEN, "hello rdma\n");
for (size_t i = 0; i < 10000; ++i) {
ib.PostWriteAndWait(qp, qpis[1]);
}
std::cout << "finally, you bitch!" << std::endl;
} catch (TransportException& e) {
NVDS_ERR(e.ToString().c_str());
}
}
static void RunServer() {
try {
Infiniband ib {1024};
Infiniband::QueuePair qp(ib, IBV_QPT_RC, 128, 128);
qpis[1] = {
ib.GetLid(Infiniband::kPort),
qp.GetLocalQPNum(),
qp.psn,
ib.mr()->rkey,
reinterpret_cast<uint64_t>(ib.raw_mem())
};
copy_info_to_conn(&conn[1], &qpis[1]);
sleep(1); // Waitting for peer
printf("server side\n");
qp.SetStateRTS(qpis[0]);
assert(qp.GetState() == IBV_QPS_RTS);
while (1) {}
} catch (TransportException& e) {
NVDS_ERR(e.ToString().c_str());
}
}
///*
TEST (InfinibandTest, RDMAWrite) {
const char* argv[] {"a.out", "192.168.99.14"};
//thread server(bind(nvds_main, 1, argv));
thread server(RunServer);
//thread client(bind(nvds_main, 2, argv));
thread client(RunClient);
client.join();
server.join();
}
//*/
/*
TEST (InfinibandTest, RDMAWrite) {
thread server(RunServer);
thread client(RunClient);
client.join();
server.join();
}
*/
/*
TEST (InfinibandTest, RDMAWrite) {
try {
Infiniband ib;
char bufs[2][1024] {{0}};
ibv_mr* mr = ibv_reg_mr(ib.pd().pd(), bufs, sizeof(bufs),
IBV_ACCESS_LOCAL_WRITE | IBV_ACCESS_REMOTE_WRITE);
assert(mr != nullptr);
auto scq0 = ib.CreateCQ(128);
auto rcq0 = ib.CreateCQ(128);
Infiniband::QueuePair qp0(ib, IBV_QPT_RC, 1,
nullptr, scq0, rcq0, 128, 128);
auto scq1 = ib.CreateCQ(128);
auto rcq1 = ib.CreateCQ(128);
Infiniband::QueuePair qp1(ib, IBV_QPT_RC, 1,
nullptr, scq1, rcq1, 128, 128);
Infiniband::QueuePairInfo qpi0 {
ib.GetLid(1),
qp0.GetLocalQPNum(),
33,
mr->rkey,
reinterpret_cast<uint64_t>(bufs[0])
};
Infiniband::QueuePairInfo qpi1 {
ib.GetLid(1),
qp1.GetLocalQPNum(),
33,
mr->rkey,
reinterpret_cast<uint64_t>(bufs[1])
};
qp0.Plumb(IBV_QPS_RTS, qpi1);
qp1.Plumb(IBV_QPS_RTR, qpi0);
struct ibv_sge sge {qpi0.vaddr, 64, mr->lkey};
struct ibv_send_wr swr;
swr.wr.rdma.remote_addr = qpi1.vaddr;
swr.wr.rdma.rkey = qpi1.rkey;
swr.wr_id = 1;
swr.sg_list = &sge;
swr.num_sge = 1;
swr.opcode = IBV_WR_RDMA_WRITE;
swr.send_flags = IBV_SEND_SIGNALED;
swr.next = nullptr;
struct ibv_send_wr* bad_wr;
int err = ibv_post_send(qp0.qp, &swr, &bad_wr);
assert(err == 0);
ibv_wc wc;
int r;
while ((r = ibv_poll_cq(qp0.scq, 1, &wc)) != 1) {}
if (wc.status != IBV_WC_SUCCESS) {
throw TransportException(HERE, wc.status);
} else {
std::clog << "success" << std::endl << std::flush;
}
} catch (TransportException& e) {
NVDS_ERR(e.ToString().c_str());
}
}
*/
int main(int argc, char* argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
<file_sep>/src/basic_server.h
#ifndef _NVDS_BASIC_SERVER_H_
#define _NVDS_BASIC_SERVER_H_
#include "session.h"
#include <boost/asio.hpp>
namespace nvds {
class BasicServer {
public:
BasicServer(uint16_t port);
virtual ~BasicServer() {}
virtual void Run() = 0;
void Accept(MessageHandler recv_msg_handler,
MessageHandler send_msg_handler);
protected:
void RunService() { tcp_service_.run(); }
boost::asio::io_service tcp_service_;
boost::asio::ip::tcp::acceptor tcp_acceptor_;
boost::asio::ip::tcp::socket conn_sock_;
};
} // namespace nvds
#endif // _NVDS_BASIC_SERVER_H_
<file_sep>/src/test_allocator.cc
#include "allocator.h"
#include "measurement.h"
#include <gtest/gtest.h>
#include <cstdlib>
#include <iostream>
#include <vector>
using namespace std;
using namespace nvds;
TEST (AllocatorTest, Alloc) {
auto base = malloc(Allocator::kSize);
assert(base != nullptr);
Allocator a(base);
Allocator::ModificationList modifications;
modifications.reserve(20);
a.set_modifications(&modifications);
std::vector<uint32_t> collector;
Measurement alloc_measurement;
alloc_measurement.begin();
for (size_t i = 0; i < 1000 * 1000; ++i) {
modifications.clear();
auto ptr = a.Alloc(40);
collector.push_back(ptr);
}
alloc_measurement.end();
alloc_measurement.Print();
free(base);
}
/*
TEST (AllocatorTest, Init) {
auto mem = malloc(Allocator::kSize);
uintptr_t base = reinterpret_cast<uintptr_t>(mem);
Allocator a(base);
free(mem);
}
TEST (AllocatorTest, Format) {
auto mem = malloc(Allocator::kSize);
uintptr_t base = reinterpret_cast<uintptr_t>(mem);
Allocator a(base);
free(mem);
}
TEST (AllocatorTest, Alloc) {
auto mem = malloc(Allocator::kSize);
uintptr_t base = reinterpret_cast<uintptr_t>(mem);
Allocator a(base);
uintptr_t blk;
vector<uintptr_t> blks;
while ((blk = a.Alloc(12)) != 0) {
blks.push_back(blk);
}
ASSERT_EQ(4194285, blks.size());
cout << "average cnt_writes: " << a.cnt_writes() * 1.0 / blks.size() << endl;
free(mem);
}
TEST (AllocatorTest, AllocFree) {
auto mem = malloc(Allocator::kSize);
uintptr_t base = reinterpret_cast<uintptr_t>(mem);
Allocator a(base);
vector<uintptr_t> blks;
uintptr_t blk;
while ((blk = a.Alloc(12)) != 0) {
blks.push_back(blk);
}
ASSERT_EQ(4194285, blks.size());
while (blks.size() > 0) {
auto blk = blks.back();
blks.pop_back();
a.Free(blk);
}
while ((blk = a.Alloc(12)) != 0) {
blks.push_back(blk);
}
ASSERT_EQ(4194285, blks.size());
free(mem);
}
TEST (AllocatorTest, Random) {
auto mem = malloc(Allocator::kSize);
uintptr_t base = reinterpret_cast<uintptr_t>(mem);
Allocator a(base);
vector<uintptr_t> blks;
uintptr_t blk;
auto g = default_random_engine();
size_t cnt = 0;
clock_t begin = clock();
for (size_t i = 0, j = 0, k = 0; i < 64 * 1024 * 1024; ++i) {
int op = g() % 2;
if (op == 0) {
blk = a.Alloc(g() % (Allocator::kMaxBlockSize - sizeof(uint32_t)));
if (blk == 0)
break;
blks.push_back(blk);
++k;
++cnt;
} else if (j < blks.size()) {
a.Free(blks[j++]);
++cnt;
}
}
auto total_time = (clock() - begin) * 1.0 / CLOCKS_PER_SEC;
cout << "cnt: " << cnt << endl;
cout << "total time: " << total_time << endl;
cout << "average time: " << total_time / cnt << endl;
cout << "average cnt_writes: " << a.cnt_writes() * 1.0 / cnt << endl;
free(mem);
}
TEST (AllocatorTest, UsageRate) {
auto mem = malloc(Allocator::kSize);
uintptr_t base = reinterpret_cast<uintptr_t>(mem);
Allocator a(base);
vector<uintptr_t> blks;
vector<uint32_t> blk_sizes;
uintptr_t blk;
auto g = default_random_engine();
uint32_t total_size = 0;
size_t j = 0;
while (true) {
int op = g() % 3;
if (op != 0) {
uint32_t size = g() % (Allocator::kMaxBlockSize - sizeof(uint32_t));
blk = a.Alloc(size);
if (blk == 0)
break;
blks.push_back(blk);
blk_sizes.push_back(size);
total_size += size;
} else if (j < blks.size()) {
total_size -= blk_sizes[j];
a.Free(blks[j++]);
}
}
cout << "usage rate: " << total_size * 1.0 / Allocator::kSize << endl;
free(mem);
}
*/
int main(int argc, char* argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
<file_sep>/src/client.h
#ifndef _NVDS_CLIENT_H_
#define _NVDS_CLIENT_H_
#include "common.h"
#include "index.h"
#include "infiniband.h"
#include "request.h"
#include "response.h"
#include "session.h"
namespace nvds {
class Client {
using Buffer = Infiniband::Buffer;
public:
Client(const std::string& coord_addr);
~Client();
DISALLOW_COPY_AND_ASSIGN(Client);
// Get value by the key, return empty string if error occurs.
// Throw: TransportException
std::string Get(const std::string& key) {
return Get(key.c_str(), key.size());
}
std::string Get(const char* key, size_t key_len) {
auto rb = RequestAndWait(key, key_len, nullptr, 0, Request::Type::GET);
auto resp = rb->MakeResponse();
std::string ans {resp->val, resp->val_len};
recv_bufs_.Free(rb);
return ans;
}
// Insert key/value pair to the cluster, return if operation succeed.
// Throw: TransportException
bool Put(const std::string& key, const std::string& val) {
return Put(key.c_str(), key.size(), val.c_str(), val.size());
}
bool Put(const char* key, size_t key_len, const char* val, size_t val_len) {
auto rb = RequestAndWait(key, key_len, val, val_len, Request::Type::PUT);
bool ans = rb->MakeResponse()->status == Status::OK;
recv_bufs_.Free(rb);
return ans;
}
// Add key/value pair to the cluster,
// return false if there is already the same key;
// Throw: TransportException
bool Add(const std::string& key, const std::string& val) {
return Add(key.c_str(), key.size(), val.c_str(), val.size());
}
bool Add(const char* key, size_t key_len, const char* val, size_t val_len) {
auto rb = RequestAndWait(key, key_len, val, val_len, Request::Type::ADD);
// FIXME(wgtdkp): what about Status::NO_MEM?
bool ans = rb->MakeResponse()->status == Status::OK;
recv_bufs_.Free(rb);
return ans;
}
// Delete item indexed by the key, return if operation succeed.
// Throw: TransportException
bool Del(const std::string& key) {
return Del(key.c_str(), key.size());
}
bool Del(const char* key, size_t key_len) {
auto rb = RequestAndWait(key, key_len, nullptr, 0, Request::Type::DEL);
bool ans = rb->MakeResponse()->status == Status::OK;
recv_bufs_.Free(rb);
return ans;
}
// Statistic
size_t num_send() const { return num_send_; }
private:
static const uint32_t kMaxIBQueueDepth = 128;
static const uint32_t kSendBufSize = 1024 * 2 + 128;
static const uint32_t kRecvBufSize = 1024 + 128;
// May throw exception `boost::system::system_error`
tcp::socket Connect(const std::string& coord_addr);
void Close() {}
void Join();
Buffer* RequestAndWait(const char* key, size_t key_len,
const char* val, size_t val_len, Request::Type type);
boost::asio::io_service tcp_service_;
Session session_;
IndexManager index_manager_;
// Infiniband
Infiniband ib_;
Infiniband::RegisteredBuffers send_bufs_;
Infiniband::RegisteredBuffers recv_bufs_;
Infiniband::QueuePair* qp_;
// Statistic
size_t num_send_ {0};
};
} // namespace nvds
#endif // _NVDS_CLIENT_H_
<file_sep>/src/experiments/Makefile
CC = gcc -std=c11
CFLAGS = -Wall -g -D_GNU_SOURCE
LDFLAGS = -libverbs -lrt
all:
@make write-rc write-rc-multi write-rc-unsignaled send-rc send-ud
write-rc: write_rc.o
${CC} -o $@ $^ ${LDFLAGS}
write_rc.o: write_rc.c write_rc.h
${CC} ${CFLAGS} -o $@ -c $<
write-rc-multi: write_rc_multi.o
${CC} -o $@ $^ ${LDFLAGS}
write_rc_multi.o: write_rc_multi.c write_rc_multi.h
${CC} ${CFLAGS} -o $@ -c $<
write-rc-unsignaled: write_rc_unsignaled.o
${CC} -o $@ $^ ${LDFLAGS}
write_rc_unsignaled.o: write_rc_unsignaled.c write_rc_unsignaled.h
${CC} ${CFLAGS} -o $@ -c $<
send-rc: send_rc.o
${CC} -o $@ $^ ${LDFLAGS}
send_rc.o: send_rc.c send_rc.h
${CC} ${CFLAGS} -o $@ -c $<
send-ud: send_ud.o
${CC} -o $@ $^ ${LDFLAGS}
send_ud.o: send_ud.c send_ud.h
${CC} ${CFLAGS} -o $@ -c $<
clean:
rm -f *.o write-rc send-rc send-ud
| efee11fd5cf49804224f14999d14ee659028268b | [
"Markdown",
"Makefile",
"C",
"C++",
"Shell"
] | 66 | C++ | wgtdkp/nvds | 34d58576b34326dafa7f690b0c953cb6a9ce7031 | 0f190377d254d8ae2b293873570bc78ec1dbf877 | |
refs/heads/master | <file_sep>import { Placement } from './Placement';
import { User } from './User';
/**
* To create a Request for ads, use {@link Request.Builder} to build the ad request.
* <p>
* Every Request must contain one or more Placements. Each placement represents a "slot" in which an ad may be served.
* By specifying multiple placements, you can request multiple ads in a single call.
* <p>
* @see com.adzerk.android.sdk.AdzerkSdk
*/
export interface AdRequest {
// list of placements where an ad can be served (required)
placements: Placement[];
// target to the user key used to identify a unique user
user?: User;
// zero or more keywords to use when selecting the ad
keywords?: string[];
// URL to use as the referrer when selecting an ad
referrer?: string;
// URL to use as the current page URL when selecting an ad
url?: string;
// UNIX epoch timestamp to use when selecting an ad
time?: number;
// IP address to use when selecting the ad; if specified, overrides the IP the request is made from
ip?: string;
// zero or more numeric creative ids to disregard when selecting an ad
blockedCreatives?: number[];
// hash of flight ids to arrays of UNIX epoch timestamps representing times the user viewed an ad
// in the specified flight (used for frequency capping)
flightViewTimes?: any;
}
<file_sep>import { User } from './User';
import { Decision } from './Decision';
/**
* The DecisionResponse to an ad {@link Request}.
* <p>
* A DecisionResponse will contain zero or more {@link Decision}s, one per {@link Placement} that was sent in on the requestPlacement.
* If no ad was selected for a given Placement, the corresponding Decision entry will be undefined (null).
*/
export interface DecisionResponse {
// identifies the unique user that places the requestPlacement
user: User;
// each Decision represents the ad that was selected to be served for a given Placement
decisions: Map<string, Decision>;
}
<file_sep>import { ContentData } from './ContentData';
const TYPE_HTML = 'html';
const TYPE_CSS = 'css';
const TYPE_JS = 'js';
const TYPE_JS_EXTERNAL = 'js-external';
const TYPE_RAW = 'raw';
const TEMPLATE_IMAGE = 'image';
const TEMPLATE_IMAGE_NO_WIDTH = 'image-nowidth';
const TEMPLATE_FLASH = 'flash';
const TEMPLATE_FLASH_NO_WIDTH = 'flash-nowidth';
/**
* Each {@link Decision} contains one or more Contents. Combined, the Contents represent the creative that should
* be displayed.
* <p>
* For example, a creative may contain a CSS stylesheet and a block of HTML. This would be represented as
* two Contents, one with the type css and one with the type html.
* <p>
* Custom metadata set at the creative level will be passed in the Contents as the key customData.
* <p>
* If a content uses a predefined template, the template property will be set to the name of the template to use.
* For example, an image content will have the {@link Content#TYPE_HTML} and the {@link Content#TEMPLATE_IMAGE}.
* <p>
*
* @see Decision
*/
export interface Content {
// the type of the content
type: string;
// name of the template used to render the content (unless TYPE_RAW)
template: string;
// the body of the custom template for TYPE_RAW content
customTemplate: string;
// rendered body of the content
body: string;
// contains a map of the creative data (title, height, width, etc.) and the creative metadata
contentData: ContentData;
}
<file_sep>import { Content } from './content';
import { ContentData } from './ContentData';
import { AdzerkEvent } from './AdzerkEvent';
/**
* A Decision represents the ad that was selected to be served for a given {@link Placement}.
* <p>
* A {@link DecisionResponse} will contain zero or more Decisions, one per Placement that was sent in on the requestPlacement.
* If no ad was selected for a given Placement, the corresponding Decision entry will be undefined (null).
*/
export interface Decision {
// id for the ad that was selected
adId: number;
// id for the creative in the selected ad
creativeId: number;
// id for the flight in the selected ad
flightId: number;
// id for the campaign in the selected ad
campaignId: number;
// url endpoint that, using a GET, triggers the recording of the click and redirects to the target
clickUrl: string;
// list of ad contents
contents: Content[];
// list if Events - the IDs and tracking URLs of custom events
events: AdzerkEvent[];
// url endpoint that, using a GET, triggers the recording of the impression
impressionUrl: string;
}
<file_sep>// data key for the json metadata
const KEY_CUSTOM_DATA = 'customData';
// data key for image url
const KEY_IMAGE_URL = 'imageUrl';
// data key for title
const KEY_TITLE = 'title';
/**
* Contains the data and metadata parsed from a Content element. Not intended for API consumers.
* @see Content
*/
export interface ContentData {
// map of creative data (title, height, width, etc) used to build the content body
creativeData: Map<string, object>;
// map of creative metadata; the deserialized JSON metadata from the creative
creativeMetadata: Map<string, object>;
// raw creative metadata JSON object; useful for clients needing custom deserialization
creativeMetadataJson: any;
}
<file_sep>const ADVERTISERS = 'advertisers';
const CAMPAIGNS = 'campaigns';
const CREATIVES = 'creatives';
const FLIGHTS = 'flights';
/**
* Contains the key to identify the User in a {@link DecisionResponse}
* <p>
* The response contains the key used to identify the unique user that places the request. All user keys
* are generated by the API. You can target to a user when you place a {@link Request}.
*
* Not supported:
* - pendingConversions, partnerUserIds, retargettingSegments, cookieMonster, dirtyCookies
*/
export interface User {
key: string;
isNew?: boolean;
interests?: string[];
customProperties?: any;
optOut?: boolean;
blockedItems?: Map<string, number[]>;
flightViewTimes?: Map<number, number[]>;
adViewTimes?: Map<number, number[]>;
siteViewTimes?: Map<number, number[]>;
}
<file_sep>
/**
* To make an ad {@link Request}, you will specify one of more Placements. Each Placement has a unique name to
* identify a place where an can be served in your app. To request multiple ads in a single {@link Request} you
* specify multiple Placements.
* <p>
* <pre>
* {@code
* // Build a Request with multiple Placements:
* Request request = new Request.Builder()
* .addPlacement(new Placement(name1, networkId, siteId, adTypes))
* .addPlacement(new Placement(name2, networkId, siteId, adTypes))
* .build();
* }
* </pre>
*
*
*/
export interface Placement {
// unique name for the placement (required)
divName: string;
// network id to use when selecting an ad (required)
networkId: number;
// site id to use when selecting an ad (required)
siteId: number;
// one or more integer ad types to use when selecting an ad (required)
adTypes: number[];
// zero or more zone ids to use when selecting an ad
zoneIds?: number[];
// campaign id; if specified, only consider ads in that campaign
campaignId?: number;
// flight id; if specified, only consider ads in that flight
flightId?: number;
// ad (flight-creative map) id; if specified, only serve that ad if possible
adId?: number;
// URL that should be used as the click-through target for the ad
clickUrl?: string;
// hash of key/value pairs used for custom targeting
properties?: any;
// array of numeric event types. Requests tracking URLs for custom events
eventIds?: number[];
}
<file_sep>export { AdzerkService, AdzerkConfig, AdzerkConfigToken } from './adzerk.service';
<file_sep>const ID_VIEW_CONVERSION = 1;
const ID_CLICK_CONVERSION = 2;
const ID_SERVER_CONVERSION = 3;
const ID_UPVOTE = 10;
const ID_DOWNVOTE = 11;
const ID_DOWNVOTE_UNINTERESTING = 12;
const ID_DOWNVOTE_MISLEADING = 13;
const ID_DOWNVOTE_OFFENSIVE = 14;
const ID_DOWNVOTE_REPETITIVE = 15;
const ID_LIKE = 20;
const ID_SHARE = 21;
const ID_COMMENT = 22;
const ID_VISIBLE = 30;
const ID_HOVER = 31;
const ID_EXPAND = 32;
const ID_SHARE_FACEBOOK = 50;
const ID_SHARE_TWITTER = 51;
const ID_SHARE_PINTEREST = 52;
const ID_SHARE_REDDIT = 53;
const ID_SHARE_EMAIL = 54;
const ID_START = 70;
const ID_FIRST_QUARTILE = 71;
const ID_MIDPOINT = 72;
const ID_THIRD_QUARTILE = 73;
const ID_COMPLETE = 74;
const ID_MUTE = 75;
const ID_UNMUTE = 76;
const ID_PAUSE = 77;
const ID_REWIND = 78;
const ID_RESUME = 79;
const ID_FULLSCREEN = 80;
const ID_EXIT_FULLSCREEN = 81;
const ID_EXPAND_VIDEO = 82;
const ID_COLLAPSE = 83;
const ID_ACCEPT_INVITATION_LINEAR = 84;
const ID_CLOSE_LINEAR = 85;
const ID_SKIP = 86;
const ID_PROGRESS = 87;
const ID_COMMENT_REPLY = 101;
const ID_COMMENT_UPVOTE = 102;
const ID_COMMENT_DOWNVOTE = 103;
const ID_CUSTOM_01 = 104;
const ID_CUSTOM_02 = 105;
const ID_CUSTOM_03 = 106;
const ID_CUSTOM_04 = 107;
const ID_CUSTOM_05 = 108;
const ID_CUSTOM_06 = 109;
const ID_CUSTOM_07 = 110;
/**
* Event information included in DecisionResponse to an ad requestPlacement. Once you call an Event URL, Adzerk will track that
* event for the creative/campaign/advertiser etc. in Adzerk reporting.
* <p/>
* Events in Adzerk are data about a way a user interacts with an ad. Standard events included with all Adzerk
* accounts are clicks, impressions, and conversions. Custom events are defined by the user of the account.
*/
export interface AdzerkEvent {
/**
*
* @param id Event identifier
* @param url Url to call to track an event
*/
id: number;
url: string;
}
<file_sep># adzerk-angular-sdk<file_sep>import { Injectable, Inject } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { InjectionToken } from '@angular/core';
import { AdRequest } from './rest/AdRequest';
import { DecisionResponse } from './rest/DecisionResponse';
import { User } from './rest/User';
export interface AdzerkConfig {
apiKey: string;
networkId: number;
siteId: number;
baseUrl?: string;
}
export const AdzerkConfigToken = new InjectionToken<AdzerkConfig>('adzerk-config');
/**
* Service interface for Native Ads API
*/
@Injectable()
export class AdzerkService {
private baseUrl: string;
constructor(
private http: HttpClient,
@Inject(AdzerkConfigToken) private config: AdzerkConfig) {
this.baseUrl = config.baseUrl || 'http://engine.adzerk.net';
}
/**
* Request an Ad.
* <p/>
* NOTE: The placement in the Request corresponds to an ad placement on a page or app.
*/
public async requestAds(request: AdRequest): Promise<DecisionResponse> {
const options = {
headers: {
'Content-Type': 'application/json'
},
withCredentials: true
};
const res = await this.http.post(`${this.baseUrl}/api/v2`, request, options).toPromise();
return <DecisionResponse>res;
}
/**
* Set the custom properties of a User by specifying properties in a JSON string
* <p/>
* Example: POST http://engine.adzerk.net/udb/{networkId}/custom?userKey=<user-key>
*/
public async setUserProperties(networkId: number, userKey: string, properties: any): Promise<any> {
const options: any = {
headers: {
'Content-Type': 'application/json'
},
responseType: 'text',
withCredentials: true
};
const res = await this.http.post(`${this.baseUrl}/udb/${networkId}/custom?userKey=${userKey}`,
properties, options).toPromise();
return res;
}
/**
* Reads a User from UserDB.
* <p/>
* The User object contains the users key, custom properties, interests and additional details.
* <p/>
* Example: GET http://engine.adzerk.net/udb/{networkid}/read?userKey=<user-key>
*/
public async getUser(networkId: number, userKey: string): Promise<User> {
const options = {
headers: {
'X-Adzerk-ApiKey': this.config.apiKey
}
};
const res = await this.http.get(`${this.baseUrl}/udb/${networkId}/read?userKey=${userKey}`, options).toPromise();
return <User>res;
}
/**
* Sets a User interest. The User object has a list of behavioral interest keywords (ie, cars, sports, ponies).
* <p/>
* Example: GET http://engine.adzerk.net/udb/{networkId}/interest/i.gif?userKey=<user-key>&interest=<interest>
*/
public async setUserInterest(networkId: number, userKey: string, interest: string): Promise<any> {
const res = await this.http.get(`${this.baseUrl}/udb/${networkId}/interest/i.gif?userKey=${userKey}&interest=${interest}`
, { responseType: 'arraybuffer' }).toPromise();
console.log(`User interest '${interest}' set.`);
return res;
}
/**
* Sets a flag to allow User to opt-out of UserDB tracking. This call clears the entire user record and then
* sets a value of "optedOut" to true.
* <p/>
* Example: GET http://engine.adzerk.net/udb/{networkid}/optout/i.gif?userKey=<user-key>
*/
public async setUserOptout(networkId: number, userKey: string): Promise<any> {
const options = {
headers: {
'X-Adzerk-ApiKey': this.config.apiKey
}
};
return await this.http.get(`${this.baseUrl}/udb/${networkId}/optout/i.gif?userKey=${userKey}`, options).toPromise();
}
/**
* Retargeting creates a new number set called Segments-{brandId} that contains each of the segments.
* <p/>
* Example: GET http://engine.adzerk.net/udb/{networkId}/rt/{brandId}/{segment}/i.gif?userKey=<user-key>
*/
public async setUserRetargeting(networkId: number, brandId: number, segment: string, userKey: string): Promise<any> {
const options = {
headers: {
'X-Adzerk-ApiKey': this.config.apiKey
}
};
return await this.http.get(`${this.baseUrl}/udb/${networkId}/rt/${brandId}/${segment}/i.gif?userKey=${userKey}`,
options).toPromise();
}
}
| 3a317e3fa88b07e2b77683500be8e4528b3fec76 | [
"Markdown",
"TypeScript"
] | 11 | TypeScript | chieflogic/adzerk-angular-sdk | 9cbfed9ddf88406e0b792823e7d98cfb19943c14 | ab1c87a1d898e3cd33c6694e5df5d26baf7147ea | |
refs/heads/main | <file_sep># ToDo app example
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
## How to start
To start project you should run:
```
yarn
yarn start
```
Runs the app in the development mode.\
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
The page will reload if you make edits.\
You will also see any lint errors in the console.
You can build application by running
```
yarn build
```
It creates `build` folder in the root of the project, which can be rendered
by web server (or be previewed via github-pages, instruction how to do this [you can find here](https://create-react-app.dev/docs/deployment/#github-pages))
## Things which are need to be added
### Required
* extend `ToDoForm` component with 2 fields:
* `title` (instead of `name`)
* `description`
* render `title` and `description` in the list of added `ToDoItems` with styled them properly
* show number of unchecked todos (you can place it somewhere on the page)
* add a button to remove all checked todos
* add form validation:
* length of the field shouldn’t be less than 3 characters
* title should start with a capital letter
* styling:
* change color of remove button to red, replace text with the trush icon
* make checked and unchecked todos with different colors
### Optional
* make text crossed when todo is checked
* add time of creation in the format (`dd.MM.yyyy – hh:mm`, ex: `22.01.2021 – 11:10`), render date in the list
* publish the app on the gh-pages
<file_sep>import React from 'react';
import { Form, Input, Button } from 'antd';
export const ToDoForm = (props) => {
const { onSubmit } = props;
const [form] = Form.useForm();
const onFinish = (values) => {
if (onSubmit) {
onSubmit(values.name);
}
form.resetFields();
}
return (
<Form className="todo-form" form={form} layout={'inline'} onFinish={onFinish}>
<Form.Item name="name" className="todo-form-input">
<Input placeholder={'New todo'} />
</Form.Item>
<Form.Item className="todo-form-actions">
<Button htmlType="submit" type="primary">Add</Button>
</Form.Item>
</Form>
)
}
| f7fc186c0e50ff2301b936d4e647a6dc6c67b631 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | venskovich1/Extra-main-task | e240d899454b07cc534a89e7b6423371c6db920d | 19fe32acc8c565daf3f5fe43f912b0cf7d59a2fe | |
refs/heads/master | <file_sep>from django.contrib import admin
# Register your models here.
from .models import Questions, Choice
class QuestionAdmin(admin.ModelAdmin):
list_display = ('question_text','pub_date','was_published_recently')
#fields = ['question_text','pub_date']
list_filter = ['pub_date']
search_fields = ['question_text']
class ChoiceAdmin(admin.ModelAdmin):
list_display = ('choice_text','votes')
admin.site.register(Questions,QuestionAdmin)
admin.site.register(Choice,ChoiceAdmin)
<file_sep># Python27
这个是Python2.7的项目相关
<file_sep><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<body>
<header>
{% block header %}
这个是main 页面,当这个页面出现时,说明被复用了。
{% end %}
</header>
<footer>
{% block footer %}这个是main的footer 页面,当这个页面出现时,说明被复用了。 {% end %}
</footer>
</body>
</html><file_sep># -*- coding:utf-8 -*-
import tornado.httpserver
import tornado.ioloop
import tornado.web
import tornado.options
import os
import torndb
from tornado.options import options,define
define("port",default=8001,help="This is help",type=int)
class IndexGetHandler(tornado.web.RequestHandler):
def get(self):
self.render('index.html')
class IndexPostHandler(tornado.web.RequestHandler):
conn = torndb.Connection("127.0.0.1", "users1", user="root", password="<PASSWORD>")
@tornado.web.asynchronous
def post(self):
user_name = self.get_arguments('user_name')
pass_word = self.get_argument('pass_word')
str1 = 'abc'
str1 = "SELECT password FROM users WHERE username= %s LIMIT 1"
print self.conn.get(str1,user_name)
get_password = self.conn.get(str1,user_name)
if get_password is not None:
for name,value in get_password.items():
print name,value,pass_word
if value.strip()!='' and (str(pass_word) == str(value)):
for user in user_name:
self.redirect("/userinfolist")
else:
self.redirect("/")
else:
self.redirect("/")
self.conn.close()
class UserInfoGetHandler(tornado.web.RequestHandler):
conn = torndb.Connection("127.0.0.1", "users1", user="root", password="<PASSWORD>")
def get(self):
str1 = 'abc'
str1 = "SELECT username,password FROM users"
get_user_passwd = self.conn.query(str1)
print get_user_passwd
if get_user_passwd is not None:
self.render('result.html',get_user_passwd=get_user_passwd)
self.conn.close()
def write_error(self, status_code, **kwargs):
self.write("G<NAME>it, user! You caused a %d error." % status_code)
if __name__ == '__main__':
tornado.options.parse_command_line()
app = tornado.web.Application(
handlers=[(r"/",IndexGetHandler),(r"/result",IndexPostHandler),(r"/userinfolist",UserInfoGetHandler)],
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
debug=True
)
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
tornado.ioloop.IOLoop().instance().start()
<file_sep><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>
{% block ti %}
{{ year }}
{% endblock %}
</title>
</head>
<body>
{% block question_text %}
{% for y in year_list %}
{{ y.question_text }}
{% endfor %}
<p>{{ year }}</p>
{% endblock %}
{% block pub_date %}
{% for y in year_list %}
{{ y.pub_date | date:"F j,Y" }}
{% endfor %}
<p>{{ year }}</p>
{% endblock %}
</body>
</html>
====================================
<file_sep>"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
#from polls.views import index as poll_views_index
from polls.views import detail as poll_views_detail
from polls.views import results as poll_views_result
from polls.views import vote as poll_views_vote
from polls.views import year_archive as poll_views_year_archive
from polls.views import os_command as poll_views_os_command
from polls.views import DetailView,IndexView
urlpatterns = [
url(r'^', admin.site.urls),
url(r'^(?P<parameter1>[0-9]+)/vote/$',poll_views_vote,name='poll_views_vote'),
url(r'^year_archive/(?P<year>[0-9]+)/$', poll_views_year_archive, name='poll_views_year_archive'),
url(r'^detail/(?P<pk>[0-9]+)/$', DetailView.as_view(), name='detail'),
url(r'^index/(?P<pk>[0-9]+)/$', IndexView.as_view(), name='index'),
# url(r'^detail/(?P<question_id>[0-9]+)/$',poll_views_detail,name='poll_views_detail'),
url(r'^results/(?P<question_id>[0-9]+)/$', poll_views_result, name='poll_views_result'),
url(r'^vote/(?P<question_id>[0-9]+)/$', poll_views_vote, name='poll_views_vote'),
#url(r'^index/$', poll_views_index, name='poll_views_index'),
url(r'os_command/$',poll_views_os_command,name='os_command'),
]
<file_sep># -*- coding: utf-8 -*-
from django.shortcuts import render,get_list_or_404,get_object_or_404
from django.http import Http404
from django.http import HttpResponse
from django.views import generic
# Create your views here.
from .models import Questions, Choice
def vote(request,parameter1):
try:
question = get_list_or_404(Questions, pk=parameter1)
print question
except Questions.DoesNotExist:
raise Http404("Question does not exist")
context = {'question':question}
return render(request,'polls/vote.html',context)
def year_archive(request,year):
try:
year_list = Questions.objects.filter(pub_date__year= year)
print year_list
except Questions.DoesNotExist:
raise Http404("Question does not exist")
context = {'year':year,'year_list':year_list}
return render(request,'polls/year_archive.html',context)
def detail(request,question_id):
return HttpResponse("You're looking at question %s." %question_id)
def results(request,question_id):
response= "You're looking at the results of question %s."
return HttpResponse(response %question_id)
def vote(request, question_id):
return HttpResponse("You're voting on question %s." % question_id)
# def index(request):
# last_question_list = Questions.objects.order_by('pub_date')[:10]
# # output = '|'.join([q.question_text for q in last_question_list])
# #context = {'output':output}
# return render(request,'polls/index.html',{'last_question_list':last_question_list})
class IndexView(generic.ListView):
#model = Questions
context_object_name = 'latest_question_list'
template_name = 'polls/index.html' #默认的时候questions_list 作为使用的template中变量使用,可以使用context_object_name自定义变量名称
def get_queryset(self):
return Questions.objects.order_by('-pub_date')[:10]
class DetailView(generic.DetailView):
model = Questions
template_name = 'polls/detail.html'
class ResultView(generic.DetailView):
model = Questions
template_name = 'polls/result.html'
# Python 调用系统命令
def os_command(request):
# import os
# context = os.system('dir d:')
# print context
# return HttpResponse( context)
a = 10
b = 2
c = a / b
print ("a / b = {0}{1}".format(c,"ok"))
return HttpResponse(c)
| 5a5da772538f58aaaf02b1714df88c87b621f5d2 | [
"Markdown",
"Python",
"HTML"
] | 7 | Python | luxiaochuang/Python27 | 44e9b04b47aa348ec59e0aeceb35a12217d401cb | 99f266b22a9127fd3d5b6de22e2db689ef38bc66 | |
refs/heads/master | <repo_name>iamawwsome/phantomjs-html-pdf<file_sep>/html-to-pdf.js
const webpage = require('webpage');
const system = require('system');
const args = system.args;
const input = args[1];
const output = args[2];
if (!output) {
throw new Error('Missing output args.');
}
const page = webpage.create();
page.open(input, function() {
page.paperSize = {
width: '8.5in',
height: '11in',
header: {
height: '1cm',
contents: phantom.callback(function(pageNum, numPages) {
return ('<h1 style="font-size: 12px">' + page.title + '<span style="float:right">' + pageNum + ' / ' + numPages + '</span></h1>');
})
},
footer: {
height: '1cm',
contents: phantom.callback(function(pageNum, numPages) {
return ('<h1 style="font-size: 12px">' + page.title + '<span style="float:right">' + pageNum + ' / ' + numPages + '</span></h1>');
}),
},
};
page.render(output);
phantom.exit();
});
<file_sep>/README.md
## PhantomJS: HTML to PDF
This is an example on how to create an API service, using PhantomJS, to generate PDF documents from webpages.
### Prerequisites
- NodeJS 6+
- Yarn 1.3+
- NPM 5+
Download the PhantomJS binary at http://phantomjs.org/download.html and place the executable in this project's bin directory.
```
yarn install && node index.js
```
The example php website should be accessible at http://localhost:3000
| 3c840e5172f4908696d9245cd8eb44cb72df85c4 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | iamawwsome/phantomjs-html-pdf | 35a214e0a4793804693713225216cbc532b9f3e2 | 034ee88112421094a09c540998889d304947f231 | |
refs/heads/master | <file_sep><?php
include_once '../DB/DBConnect.php';
include_once '../DB/StudentDB.php';
include_once '../Student.php';
$students = new StudentDB();
$allStudent = $students->getAll();
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<table border = 4px; cellspacing="0" cellpadding="5">
<tr>
<td>
STT
</td>
<td>
Tên
</td>
<td>
Mật khẩu
</td>
<td>
Địa chỉ email
</td>
</tr>
<?php foreach ($allStudent as $key=>$students): ?>
<tr>
<td>
<?php echo ++$key?>
</td>
<td>
<?php echo $students->getName()?>
</td>
<td>
<?php echo $students->getPassword()?>
</td>
<td>
<?php echo $students->getEmail()?>
</td>
<td>
<a href="delete.php?id=<?php echo $students->getId(); ?>">Delete</a>
</td>
<td>
<a href="update.php?id=<?php echo $students->getId(); ?>">Update</a>
</td>
</tr>
<?php endforeach; ?>
</table>
</body>
</html>
<file_sep><?php
namespace App\Http\Repository\Eloquent;
use App\Customer;
use App\Http\Repository\contracts\RepositoryInterface;
class CustomerRepositoryEloquent implements RepositoryInterface
{
public function getAll()
{
return Customer::paginate(5);
}
public function store($customer)
{
$customer->save();
}
public function findByID($id)
{
return Customer::FindOrFail($id);
}
public function update($customer)
{
$customer->save();
}
public function delete($customer)
{
$customer->delete();
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use phpDocumentor\Reflection\Types\This;
class City extends Model
{
/**
* @var array|string|null
*/
public function customers()
{
return $this->hasMany('App\Customer');
}
}
<file_sep><?php
include_once '../DBconnect.php';
include_once '../Student.php';
include_once '../DBstudent.php';
if ($_SERVER["REQUEST_METHOD"] == "POST") {
if (!empty($_POST['name']) && !empty($_POST['email'])) {
$name = $_POST['name'];
$email = $_POST['email'];
$student = new Student($name, $email);
$studentDB = new DBstudent();
$studentDB->create($student);
header('location: list.php', true);
}
}
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Add new student</title>
</head>
<body>
<h2>Add new student</h2>
<div class="table">
<form method="post" action="">
<table>
<tr>
<td>Name</td>
<td><input type="text" name="name" size="20"></td>
</tr>
<tr>
<td>Email</td>
<td><input type="text" name="email" size="20"></td>
</tr>
<tr>
<tr>
<td></td>
<td>
<button type="submit">Add</button>
</td>
</tr>
</table>
</form>
</div>
</body>
</html>
<file_sep><?php
include 'QuadraticEquation.php';
$mmm= new QuadraticEquation(2,6,4);
echo $mmm->getDiscriminant();
echo a;<file_sep><?php
namespace App\Http\Service;
use App\Http\Repository\NoteRepository;
class NoteService
{
protected $noteRepository;
public function __construct(NoteRepository $noteRepository)
{
$this->noteRepository = $noteRepository;
}
public function getAll()
{
return $this->noteRepository->getAll();
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class student_data extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
for ($i = 1; $i <= 10; $i++) {
DB::table('table_student')->insert([
'name' => str_random(10),
'age' => mt_rand(10,80)
]);
}
}
}
<file_sep><?php
include_once '../DB/DBConnect.php';
include_once '../Student.php';
include_once '../DB/StudentDB.php';
$id = $_GET['id'];
$studentDB = new StudentDB();
$studentDB->delete($id);
header('location: ../index.php', true);
<file_sep><!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<form action="<?php echo htmlspecialchars($_SERVER["PHP_SELF"]);?>" method="post">
<input type="text" name="money" placeholder="Money First"/><br>
<input type="text" name="month" placeholder="Month"/><br>
<input type="text" name="interestRate" placeholder="Interest rate"/>%<br>
<input type = "submit" id = "submit" value = "Calculator"/>
</form>
<?php
if ($_SERVER["REQUEST_METHOD"] == "POST") {
$money = $_POST["money"];
$month = $_POST["month"];
$rate = $_POST["interestRate"];
$calculator = $money + ($money * $month * $rate/100);
echo $calculator;
}
?>
</body>
</html><file_sep><?php
include_once 'Circle.php';
class Cylinder extends Circle
{
public $height;
public function __construct($radius, $color, $height)
{
parent::__construct($radius, $color);
$this->height = $height;
}
public function getHeight()
{
return $this->height;
}
public function setHeight($height)
{
$this->height = $height;
}
public function calculatorVolume()
{
return parent::calculatorArea() * $this->height;
}
public function toString()
{
return 'Color Cylinder: '.$this->getColor().'<br>'.'Height Cylinder: '.$this->getHeight().'<br>'.'Volume Cylinder: '.$this->calculatorVolume();
}
}
<file_sep><?php
include_once 'DB/DBConnect.php';
include_once 'DB/StudentDB.php';
include_once 'Student.php';
$students = new StudentDB();
$allStudent = $students->getAll();;
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<a href="CRUD/create.php"><button>Create</button></a>
<table border = 4px; cellspacing="0" cellpadding="5">
<tr>
<td>
STT
</td>
<td>
Tên
</td>
</tr>
<?php foreach ($allStudent as $key=>$students): ?>
<tr>
<td>
<?php echo ++$key?>
</td>
<td>
<?php echo $students->getName()?>
</td>
<td>
<a href="CRUD/delete.php?id=<?php echo $students->getId(); ?>">Delete</a>
</td>
<td>
<a href="CRUD/update.php?id=<?php echo $students->getId(); ?>">Update</a>
</td>
</tr>
<?php endforeach; ?>
</table>
</body>
</html>
<file_sep><?php
namespace App\Http\Controllers;
use App\Employee;
use App\Http\Service\EmployeeService;
use Illuminate\Http\Request;
class EmployeeController extends Controller
{
protected $employeeService;
public function __construct(EmployeeService $employeeService)
{
$this->employeeService = $employeeService;
}
public function index()
{
$employees = $this->employeeService->getAll();
return view('employees.list', compact('employees'));
}
public function create()
{
return view('employees.create');
}
public function store(Request $request)
{
$employee = new Employee();
// $employee->manhanvien = $request->input('manhanvien');
$employee->nhomnhanvien = $request->input('nhomnhanvien');
$employee->hoten = $request->input('hoten');
$employee->gioitinh = $request->input('gioitinh');
$employee->sodienthoai = $request->input('sodienthoai');
$employee->socmnd = $request->input('socmnd');
$employee->ngaysinh = $request->input('ngaysinh');
$employee->email = $request->input('email');
$employee->diachi = $request->input('diachi');
$employee->save();
return redirect()->route('employees.index');
}
public function edit($id)
{
$employee = Employee::findOrFail($id);
return view('employees.edit', compact('employee'));
}
public function update(Request $request, $id)
{
$employee = Employee::findOrFail($id);
$employee->nhomnhanvien = $request->input('nhomnhanvien');
$employee->hoten = $request->input('hoten');
$employee->gioitinh = $request->input('gioitinh');
$employee->sodienthoai = $request->input('sodienthoai');
$employee->socmnd = $request->input('socmnd');
$employee->ngaysinh = $request->input('ngaysinh');
$employee->email = $request->input('email');
$employee->diachi = $request->input('diachi');
$employee->save();
return redirect()->route('employees.index');
}
public function destroy($id)
{
$employee = Employee::findOrFail($id);
$employee->delete();
return redirect()->route('employees.index');
}
public function search(Request $request)
{
$keyword = $request->input('keyword');
if (!$keyword) {
return redirect()->route('employees.index');
}
$employees = Employee::where('name', 'LIKE', '%' . $keyword . '%')->paginate(5);
return view('employees.list', compact('employees'));
}
}
<file_sep><!DOCTYPE html>
<html>
<head>
<title>Free HTML5 CSS3 Metro Weather Widget | Designmaz</title>
<!-- For-Mobile-Apps -->
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<script type="application/x-javascript"> addEventListener("load", function() { setTimeout(hideURLbar, 0); }, false); function hideURLbar(){ window.scrollTo(0,1); } </script>
<!-- //For-Mobile-Apps -->
<!-- Style -->
<link rel="stylesheet" href="css/style.css" type="text/css" media="all" />
<!-- Style -->
<!-- JavaScript -->
<script type="text/javascript" src="js/jquery.min.js"></script>
<!-- Javascript -->
<script>
function startTime() {
var today = new Date();
var h = today.getHours();
var m = today.getMinutes();
var s = today.getSeconds();
m = checkTime(m);
s = checkTime(s);
document.getElementById('txt').innerHTML =
h + ":" + m + ":" + s;
var t = setTimeout(startTime, 500);
}
function checkTime(i) {
if (i < 10) {i = "0" + i}; // add zero in front of numbers < 10
return i;
}
</script>
<!--Google Analytics Designmaz-->
<script>(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');ga('create', 'UA-35751449-15', 'auto');ga('send', 'pageview');</script>
</head>
</head>
<body onload="startTime()">
<h1>HTML5 CSS3 Metro Weather Widget</h1>
<div class="content">
<div class="place">
<div id="txt"></div>
<div class="dmy">
<script type="text/javascript">
var mydate=new Date()
var year=mydate.getYear()
if(year<1000)
year+=1900
var day=mydate.getDay()
var month=mydate.getMonth()
var daym=mydate.getDate()
if(daym<10)
daym="0"+daym
var dayarray=new Array("Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday")
var montharray=new Array("January","February","March","April","May","June","July","August","September","October","November","December")
document.write(""+dayarray[day]+", "+montharray[month]+" "+daym+", "+year+"")
</script>
</div>
<div class="city">
<p>Budapest, Hungary</p>
</div>
</div>
<div class="temp">
<div id="dd1" class="wrapper-dropdown-3" tabindex="1">
<span>
<img src="images/nav.png" alt=""/>
</span>
<ul class="dropdown">
<li><a href="#">TODAY</a></li>
<li><a href="#">FORECAST</a></li>
<li><a href="#">CITIES</a></li>
<li><a href="#">NATIONAL</a></li>
<li><a href="#">WORLD</a></li>
<li><a href="#">SETTINGS</a></li>
</ul>
<script type="text/javascript">
function DropDown(el) {
this.dd = el;
this.initEvents();
}
DropDown.prototype = {
initEvents : function() {
var obj = this;
obj.dd.on('click', function(event){
$(this).toggleClass('active');
event.stopPropagation();
});
}
}
$(function() {
var dd = new DropDown( $('#dd1') );
$(document).click(function() {
// all dropdowns
$('.wrapper-dropdown-3').removeClass('active');
});
});
</script>
</div>
<p>20° C</p>
<div class="day">
<div class="sat">
<h3>Sat</h3>
<p>19°C↓ 22°C↑</p>
</div>
<div class="sund">
<h3>Sun</h3>
<p>17°C↓ 20°C↑</p>
</div>
<div class="mon">
<h3>Mon</h3>
<p>15°C↓ 18°C↑</p>
</div>
<div class="clear"></div>
</div>
</div>
<div class="clear"></div>
</div>
<div class="footer">
<p>Copyright © 2015 Metro Weather. All Rights Reserved | HTML Template by <a href="https://www.designmaz.net">Designmaz</a></p>
</div>
</body>
</html>
<file_sep><?php
include_once ('Rectangle.php');
include_once 'Resizeable.php';
class Square extends Rectangle implements Resizeable
{
public function __construct($name, $width)
{
parent::__construct($name, $width, $width, $width);
}
public function resize($doublePercent)
{
$this->width = $this->width * $doublePercent;
}
}<file_sep><!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<form name = 'QuadraticEquation' method="post">
<h3>ax^2 + bx + c = 0</h3>
<input type="number" name = 'inputA' placeholder="Input a">
<input type="number" name = 'inputB' placeholder="Input b">
<input type="number" name = 'inputC' placeholder="Input c">
<input type="submit" id="submit" value="Result"/>
</form>
<body>
<?php
include 'QuadraticEquation.php';
if ($_SERVER['REQUEST_METHOD'] == 'POST'){
$a = $_POST['inputA'];
$b = $_POST['inputB'];
$c = $_POST['inputC'];
$x1x2 = -$b/(2*$a);
$quadraticEquation = new QuadraticEquation($a,$b,$c);
$delta = $quadraticEquation->getDiscriminant();
if ($delta > 0) {
echo 'x1 = '.$quadraticEquation->getRoot1().' and '.'x2 = '.$quadraticEquation->getRoot2();
} else if ($delta = 0){
echo 'x1 = x2 = '.$x1x2;
} else{
echo 'The equation has no roots';
}
}
?>
</body>
</html><file_sep><?php
class Student{
public $id;
public $name;
public function __construct($id, $name)
{
$this->id=$id;
$this->name=$name;
}
/**
* @return mixed
*/
public function getId()
{
return $this->id;
}
/**
* @return mixed
*/
public function getName()
{
return $this->name;
}
}<file_sep><?php
namespace App\Http\Service;
use App\Http\Repository\Note_typeRepository;
class Note_typeService
{
protected $note_typeRepository;
public function __construct(Note_typeRepository $note_typeRepository)
{
$this->note_typeRepository = $note_typeRepository;
}
public function getAll()
{
return $this->note_typeRepository->getAll();
}
}
<file_sep><?php
namespace App\Http\Repository;
interface Repository
{
public function getAll();
public function findByID($id);
public function create($data);
public function update($data, $object);
public function destroy($object);
}
?><file_sep><?php
class Student{
public $id;
public $username;
public $password;
public $email;
public function __construct($id, $username, $password, $email)
{
$this->id=$id;
$this->username=$username;
$this->password=$<PASSWORD>;
$this->email=$email;
}
/**
* @return mixed
*/
public function getId()
{
return $this->id;
}
/**
* @return mixed
*/
public function getName()
{
return $this->username;
}
public function getPassword(){
return $this-><PASSWORD>;
}
public function getEmail(){
return $this->email;
}
}<file_sep><?php
include_once 'DBConnect.php';
include_once 'DBStudent.php';
$student = new DBStudent();
print_r($student->getAll());<file_sep><?php
namespace App\Providers;
use App\Http\Repository\contracts\RepositoryInterface;
use App\Http\Repository\Eloquent\CustomerRepositoryEloquent;
use App\Http\Service\CustomerServiceInterface;
use App\Http\Service\Impl\CustomerService;
use Illuminate\Support\ServiceProvider;
class AppServiceProvider extends ServiceProvider
{
/**
* Register any application services.
*
* @return void
*/
public function register()
{
$this->app->singleton(CustomerServiceInterface::class,
CustomerService::class
);
$this->app->singleton(RepositoryInterface::class,
CustomerRepositoryEloquent::class
);
}
/**
* Bootstrap any application services.
*
* @return void
*/
public function boot()
{
//
}
}
<file_sep><?php
include_once '../DBconnect.php';
include_once '../Student.php';
include_once '../DBstudent.php';
if ($_SERVER["REQUEST_METHOD"] == "POST") {
if (!empty($_POST['name']) && !empty($_POST['email'])) {
if (!empty($_GET['id'])) {
$id = $_GET['id'];
$name = $_POST['name'];
$email = $_POST['email'];
$studentDB = new DBstudent();
$studentDB->update($id, $name, $email);
}
}
header('location: list.php', true);
} else {
if (!empty($_GET['id'])) {
$id = $_GET['id'];
$studentDB = new DBstudent();
$student = $studentDB->finById($id);
if (is_string($student)) {
echo $student . '<br>';
echo '<a href="list.php">Trở về</a>';
die();
}
}
}
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Update info</title>
</head>
<body>
<h2>Updata infomation</h2>
<div class="table">
<form method="post" action="">
<table>
<tr>
<td>ID</td>
<td>
<?php if (isset($_GET['id'])) {
$id = $_GET['id'];
echo $id;
} ?>
</td>
</tr>
<tr>
<td>Name</td>
<td><input type="text" name="name" size="20" value="<?php echo $student->getName(); ?>"></td>
</tr>
<tr>
<td>Email</td>
<td><input type="text" name="email" size="20" value="<?php echo $student->getEmail(); ?>"></td>
</tr>
<tr>
<td></td>
<td>
<button type="submit">Update</button>
</td>
</tr>
</table>
</form>
</div>
</body>
</html>
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use phpDocumentor\Reflection\Types\This;
class Customer extends Model
{
protected $table = 'customer';
public static function where(string $string, string $string1, string $string2)
{
}
public function city(){
return $this->belongsTo('App\City');
}
}
<file_sep><?php
namespace App\Http\Service;
interface CustomerServiceInterface
{
public function getAll();
public function store($request);
public function update($request, $id);
public function delete($id);
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Customer;
use App\Http\Service\Impl\CustomerService;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Session;
use http\Env\Response;
class CustomerController extends Controller
{
Protected $CustomerService;
public function __construct(CustomerService $CustomerService)
{
$this->CustomerService = $CustomerService;
}
public function index()
{
$customers = $this->CustomerService->getAll();
return view('customers.list', compact('customers'));
}
public function create()
{
return view('customers.create');
}
public function store(Request $request)
{
$this->CustomerService->store($request);
Session::flash('success', 'Tạo mới khách hàng thành công');
return redirect()->route('customers.index');
}
public function edit($id)
{
$customer = $this->CustomerService->findByID($id);
return view('customers.edit', compact('customer'));
}
public function update(Request $request, $id)
{
$this->CustomerService->update($request, $id);
Session::flash('success', 'Cập nhật khách hàng thành công');
return redirect()->route('customers.index');
}
public function destroy($id)
{
$this->CustomerService->delete($id);
Session::flash('success', 'Xóa khách hàng thành công');
return redirect()->route('customers.index');
}
}
<file_sep><?php
class DBConnect
{
public $username ;
public $password;
public $dsn;
public function __construct()
{
$this->username = 'root';
$this->password = '<PASSWORD>';
$this->dsn = "mysql:host=localhost;dbname=students_database";
}
public function connect (){
$conn = null;
try{
$conn = new PDO($this->dsn, $this->username, $this->password);
}catch (PDOException $exception){
return $exception ->getMessage();
}
return $conn;
}
}<file_sep><?php
namespace App\Http\Controllers;
use App\student_model;
//use Illuminate\Http\Request;
class student_controller extends Controller
{
public function all(){
$student = student_model::all();
return view('list', compact(['student']));
}
}
<file_sep><?php
include 'MoveablePoint.php';
include 'Point.php';
$point = new Point(5,10);
$moveablepoint = new MoveablePoint(10,10,5,5);
//var_dump($point->getXY());
//echo $moveablepoint->toString();
//echo $point->toString();<file_sep><?php
include_once '../DBconnect.php';
include_once '../Student.php';
include_once '../DBstudent.php';
if ($_SERVER["REQUEST_METHOD"] == "GET") {
if (isset($_GET['id'])) {
$id = $_GET['id'];
$studentDB = new DBstudent();
$studentDB->del($id);
}
header('location: list.php', true);
}
?><file_sep><?php
namespace App\Http\Controllers;
use GuzzleHttp\Client;
use Illuminate\Http\Request;
class weatherController extends Controller
{
public function index ()
{
$cityID = 1581130;
$cityName = '<NAME>i';
$cityJson = file_get_contents('cityvn.json');
$cities = (object)json_decode($cityJson);
$client = new Client();
$response = $client->request('GET', "api.openweathermap.org/data/2.5/weather?id=". $cityID ."&appid=".env("WEATHER_KEY"));
$dataJson = $response->getBody();
$data = json_decode($dataJson);
$main = $data->main;
// var_dump($main);
return view('index', compact('main', 'cities', 'cityName'));
}
public function search(Request $request){
$cityJson = file_get_contents('cityvn.json');
$cities = (object)json_decode($cityJson);
$cityID = $request->cityID;
$cityName = '';
foreach ($cities as $key => $city)
{
if($cityID == $city->id){
$cityName = $city->name;
break;
}
}
$client = new Client();
$response = $client->request('GET', "api.openweathermap.org/data/2.5/weather?id=". $cityID ."&appid=".env("WEATHER_KEY"));
$dataJson = $response->getBody();
$data = json_decode($dataJson);
$main = $data->main;
return view('index', compact('main', 'cities', 'cityName'));
}
}
<file_sep><?php
use Illuminate\Support\Facades\Route;
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', function () {
return view('welcome');
});
Route::group(['prefix'=>'notes'], function(){
Route::get('/', 'NoteController@index')->name('notes.index');
Route::get('/create', 'NoteController@create')->name('notes.create');
Route::post('/create', 'NoteController@store')->name('notes.store');
Route::get('/edit/{id}', 'NoteController@edit')->name('notes.edit');
Route::post('/edit/{id}', 'NoteController@update')->name('notes.update');
Route::get('/delete/{id}', 'NoteController@destroy')->name('notes.delete');
});
Route::group(['prefix'=>'notes_type'], function() {
Route::get('/', 'Note_typeController@index')->name('notes_type.index');
Route::get('/create', 'Note_typeController@create')->name('notes_type.create');
Route::post('/create', 'Note_typeController@store')->name('notes_type.store');
Route::get('/edit/{id}', 'Note_typeController@edit')->name('notes_type.edit');
Route::post('/edit/{id}', 'Note_typeController@update')->name('notes_type.update');
Route::get('/delete/{id}', 'Note_typeController@destroy')->name('notes_type.delete');
});
<file_sep><?php
include_once ('Circle.php');
include_once ('Cylinder.php');
include_once ('Rectangle.php');
include_once ('Square.php');
$rate = rand(1, 100);
echo $rate.'<br>';
$circle = new Circle('Circle 01', 3);
echo 'Circle area: ' . $circle->calculateArea() . '<br />';
echo 'Circle perimeter: ' . $circle->calculatePerimeter() . '<br />';
$circle->resize($rate);
echo 'Circle area 2: ' . $circle->calculateArea() . '<br />';
echo 'Circle perimeter 2: ' . $circle->calculatePerimeter() . '<br />';
$cylinder = new Cylinder('Cylinder 01', 3 , 4);
echo 'Cylinder area: ' . $cylinder->calculateArea() . '<br />';
echo 'Cylinder perimeter: ' . $cylinder->calculatePerimeter() . '<br />';
$cylinder->resize($rate);
echo 'Cylinder area 2: ' . $cylinder->calculateArea() . '<br />';
echo 'Cylinder perimeter 2: ' . $cylinder->calculatePerimeter() . '<br />';
$rectangle = new Rectangle('Rectangle 01', 3 , 4);
echo 'Rectangle area: ' . $rectangle->calculateArea() . '<br />';
echo 'Rectangle perimeter: ' . $rectangle->calculatePerimeter() . '<br />';
$rectangle->resize($rate);
echo 'Rectangle area 2: ' . $rectangle->calculateArea() . '<br />';
echo 'Rectangle perimeter 2: ' . $rectangle->calculatePerimeter() . '<br />';
$square = new Square('Square 01', 4 , 4, 4);
echo 'Rectangle area 2: ' . $square->calculateArea() . '<br />';
echo 'Rectangle perimeter 2: ' . $square->calculatePerimeter() . '<br />';
$square->resize($rate);<file_sep><?php
interface Resizeable {
public function resize($doublePercent);
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Http\Service\NoteService;
use App\Note;
use App\Note_type;
use Illuminate\Http\Request;
class NoteController extends Controller
{
protected $noteService;
public function __construct(NoteService $noteService)
{
$this->noteService = $noteService;
}
public function index()
{
$notes = $this->noteService->getAll();
return view('notes.list', compact('notes'));
}
public function create()
{
$notes_type = Note_type::all();
return view('notes.create', compact('notes_type'));
}
public function store(Request $request)
{
$note = new Note();
$note->title = $request->input('title');
$note->content = $request->input('content');
$note->type = $request->input('note_type');
$note->save();
return redirect()->route('notes.index');
}
public function edit($id)
{
$note = Note::findOrFail($id);
$notes_type = Note_type::all();
return view('notes.edit', compact('note', 'notes_type'));
}
public function update(Request $request, $id)
{
$note = Note::findOrFail($id);
$note->title = $request->input('title');
$note->content = $request->input('content');
$note->type = $request->input('note_type');
$note->save();
return redirect()->route('notes.index');
}
public function destroy($id)
{
$note = Note::findOrFail($id);
$note->delete();
return redirect()->route('notes.index');
}
}
<file_sep><?php
include_once '../DBconnect.php';
include_once '../Student.php';
include_once '../DBstudent.php';
$studentDB = new DBstudent();
$students = $studentDB->getAll();
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>List Student</title>
</head>
<body>
<table border="1" cellspacing="0">
<tr>
<th>Stt</th>
<th>Name</th>
<th>Email</th>
<th></th>
</tr>
<?php foreach ($students as $key=>$student): ?>
<tr>
<td><?php echo ++$key; ?></td>
<td><?php echo $student->getName(); ?></td>
<td><?php echo $student->getEmail(); ?></td>
<td>
<span><a href="update.php?id=<?php echo $student->getId(); ?>">Update</a></span>
<span><a href="del.php?id=<?php echo $student->getId(); ?>">Delete</a></span>
</td>
</tr>
<?php endforeach; ?>
</table>
<a href="create.php" type="button">Add new student</a>
</body>
</html>
<file_sep><?php
namespace App\Http\Controllers;
use App\Http\Service\Note_typeService;
use App\Note_type;
use Illuminate\Http\Request;
class Note_typeController extends Controller
{
protected $note_typeService;
public function __construct(Note_typeService $note_typeService)
{
$this->note_typeService = $note_typeService;
}
public function index()
{
$notes_type = $this->note_typeService->getAll();
// dd($notes_type);
return view('notes_type.list', compact('notes_type'));
}
public function create()
{
return view('notes_type.create');
}
public function store(Request $request)
{
$note_type = new Note_type();
$note_type->name = $request->input('name');
$note_type->description = $request->input('description');
$note_type->save();
return redirect()->route('notes_type.index');
}
public function edit($id)
{
$note_type = Note_type::findOrFail($id);
return view('notes_type.edit', compact('note_type'));
}
public function update(Request $request, $id)
{
$note_type = Note_type::findOrFail($id);
$note_type->name = $request->input('name');
$note_type->description = $request->input('description');
$note_type->save();
return redirect()->route('notes_type.index');
}
public function destroy($id)
{
$note_type = Note_type::findOrFail($id);
$note_type->delete();
return redirect()->route('notes.index');
}
}
<file_sep><?php
include 'Point2D.php';
include 'Point3D.php';
$point2D = new Point2D(4,6);
$point3D = new Point3D(3,5,7);
echo $point2D->toString()."<br>";
echo $point3D->toString();
//var_dump($point3D->toString());
<file_sep><?php
namespace App\Http\Service\Impl;
use App\Customer;
use App\Http\Repository\Eloquent\CustomerRepositoryEloquent;
use App\Http\Service\CustomerServiceInterface;
class CustomerService implements CustomerServiceInterface
{
protected $customerRepository;
public function __construct(CustomerRepositoryEloquent $customerRepository)
{
$this->customerRepository = $customerRepository;
}
public function getAll()
{
return $this->customerRepository->getAll();
}
public function store($request)
{
$customer = new Customer();
$customer->name = $request->input('name');
$customer->email = $request->input('email');
$customer->dob = $request->input('dob');
$this->customerRepository->store($customer);
}
public function findByID($id){
return $this->customerRepository->findByID($id);
}
public function update($request, $id)
{
$customer = $this->customerRepository->findByID($id);
$customer->name = $request->input('name');
$customer->email = $request->input('email');
$customer->dob = $request->input('dob');
}
public function delete($id)
{
$customer = $this->customerRepository->findByID($id);
$this->customerRepository->delete($customer);
}
}
<file_sep><?php
function find($keyword, $file){
$characters = json_decode(file_get_contents($file), true);
for ($i = 0; $i < count($characters); $i++) {
if ($keyword === $characters[$i]['username']) {
return $i;
}
}
}
function delete($index, $arr, $file){
readJson($file);
unset($arr, $index);
}
function readJson($file){
$outputData = json_decode(file_get_contents($file), true);
return $outputData;
}
function saveFile($user, $file){
$data = readJson($file);
array_push($data,$user);
$data = json_encode($data,1);
file_put_contents($file, $data);
}<file_sep><!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<form action="<?php echo htmlspecialchars($_SERVER["PHP_SELF"]); ?>" method="post">
<input type="text" name="firstNumber"/>x<br>
<select name="action">
<option value="+">Cong</option>
<option value="-">Tru</option>
<option value="*">Nhan</option>
<option value="/">Chia</option>
</select><br>
<input type="text" name="secondNumber"/>y<br>
<input type="submit" id="submit" value="Calculator"/>
</form>
<?php
if ($_SERVER["REQUEST_METHOD"] == "POST") {
$x = $_POST["firstNumber"];
$y = $_POST["secondNumber"];
$action = $_POST["action"];
if ($y == 0) {
echo "Xảy ra ngoại lệ";
} else if ($x==0 && $y == 0) {
echo "Xảy ra ngoại lệ";
} else {
switch ($action) {
case "+":
$result = $x + $y;
break;
case "-":
$result = $x - $y;
break;
case "*":
$result = $x * $y;
break;
case "/":
$result = $x / $y;
break;
}
echo $x . $action . $y . "=" . $result;
}
}
?>
</body>
</html>
<file_sep><?php
include_once 'DB/DBConnect.php';
include_once 'DB/StudentDB.php';
//include_once 'Student.php';
$studentDB = new StudentDB();
//$allStudent = $students->getAll();
if ($_SERVER["REQUEST_METHOD"] == "POST") {
$username = $_POST["username"];
$password = $_POST["password"];
// if ($students->checkUsername($username) == 1 && $students->checkPassword($password) > 0) {
// $obj = $students->selectUser($username);
// foreach ($obj as $item) {
// if ($username == $item->getName() && $password == $item->getPassword()) {
// if ($username == 'admin') {
// header("location: CRUD/admin.php");
// } else {
// header("location: CRUD/user.php?username=$username");
// }
// } else {
// echo "username hoặc password không đúng";
// }
// }
// }
if ($studentDB->checkUsernamePassword($username, $password)) {
if ($username == 'admin') {
header("location: CRUD/admin.php");
} else {
header("location: CRUD/user.php?username=$username");
}
} else {
echo "username hoặc password không đúng";
}
}
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<form method="post">
<input placeholder="username" name="username"><br>
<input placeholder="<PASSWORD>" name="password"><br>
<button>Login</button>
<br>
</form>
<a href="CRUD/create.php">Register</a>
</body>
</html><file_sep><!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<h3>Conventer number to text</h3>
<form action="<?php echo htmlspecialchars($_SERVER["PHP_SELF"]); ?>" method="post">
<input type="text" name="search" placeholder="Write"/>
<input type="submit" id="submit" value="Conventer"/>
</form>
<?php
if ($_SERVER["REQUEST_METHOD"] == "POST") {
$input = $_POST["search"];
if ($input < 10) {
switch ($input) {
case 1:
echo 'one';
break;
case 2:
echo 'two';
break;
case 3:
echo 'three';
break;
case 4:
echo 'four';
break;
case 5:
echo 'five';
break;
case 6:
echo 'six';
break;
case 7:
echo 'seven';
break;
case 8:
echo 'eight';
break;
case 9:
echo 'night';
break;
}
}
}
?>
</body>
</html><file_sep><?php
function selectionSort()
{
$arr = array();
for ($i = 0; $i < 100000; $i++){
array_push($arr, $i);
}
return $arr;
}
<file_sep><?php
namespace App\Http\Service;
use App\Http\Repository\EmployeeRepository;
class EmployeeService
{
protected $employeeRepository;
public function __construct(EmployeeRepository $employeeRepository)
{
$this->employeeRepository = $employeeRepository;
}
public function getAll()
{
return $this->employeeRepository->getAll();
}
}
<file_sep><!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<form action="<?php echo htmlspecialchars($_SERVER["PHP_SELF"]);?>" method="post">
<input type="text" name="productName"/>Product Name<br>
<input type="text" name="listPrice"/>List Price<br>
<input type="text" name="discount"/>Discount percent (%)<br>
<input type = "submit" id = "submit" value = "Calculator Discount"/>
</form>
<?php
if ($_SERVER["REQUEST_METHOD"] == "POST") {
$productName = $_POST["productName"];
$listPrice = $_POST["listPrice"];
$discount = $_POST["discount"];
$calculatorDiscount = $listPrice * $discount / 100;
$newPrice = $listPrice - $calculatorDiscount;
echo "Product Name: ".$productName."<br>"."Price: ".$listPrice."<br>"."Discount Percent: ".$discount."<br>"."Discount Amount: "."$calculatorDiscount"."<br>"."Discount Price: ".$newPrice;
}
?>
</body>
</html><file_sep><?php
include_once 'Circle.php';
include_once 'Cylinder.php';
$circle = new Circle(5, 'blue');
echo $circle->toString().'<br>'.'<br>';
$cylinder = new Cylinder(5, 'green', 10);
echo $cylinder->toString();
<file_sep><?php
function minimum($arr)
{
$min = $arr[0];
for ($i = 1; $i < count($arr); $i++) {
if ($min > $arr[$i]) {
$index = $i;
}
}
return $index;
}
$arr = [3, 4, 5, 3, 6, 1, 6];
// minimum($arr);
echo minimum($arr);
<file_sep><?php
namespace App\Http\Repository;
use App\Note_type;
class Note_typeRepository
{
public function getAll()
{
return Note_type::all();
}
}
<file_sep><?php
//include_once '../AbstractClass/Animal.php';
//
//class Dog extends Animal{
// public function makeSound()
// {
// return "Dog: woof woof";
// }
//
//}
//include_once './AbstractClass/Animal.php';
//
//class Chicken extends Animal{
// public function makeSound()
// {
// return "Chicken: cluck-cluck";
// }
// public function howToEat()
// {
// return "could be fried";
// }
//
//}<file_sep><?php
class DBStudent
{
public $conn;
public function __construct()
{
$db = new DBconnect();
$this->conn = $db->connect();
}
public function getAll()
{
$sql = "SELECT * FROM `Student`";
$mysql = $this -> conn -> query($sql);
$data = $mysql -> fetchAll();
return $data;
}
}
<file_sep><?php
function minimum($arr)
{
$max = 0;
for ($i = 0; $i < count($arr); $i++) {
for ($j = 0; $j < count($arr[$i]); $j++) {
if ($max < $arr[$i][$j]) {
$max = $arr[$i][$j];
}
}
}
return $max;
}
$arr = [[3, 4, 5], [6, 7, 8]];
echo minimum($arr);
<file_sep><?php
include_once 'function.php';
if ($_SERVER['REQUEST_METHOD'] == 'POST') {
$username = $_POST['username'];
$pass = $_POST['<PASSWORD>'];
$user = [
'username' => $username,
'pass' => $pass
];
$file = 'data.json';
if (!empty($username) && !empty($pass)) {
saveFile($user, $file);
}
}
if ($_SERVER['REQUEST_METHOD'] == 'GET') {
$delete = $_GET['deleteUsername'];
$file = 'data.json';
$arr = json_decode(file_get_contents($file), true);
if (!empty($delete)){
delete(find($delete, $file),$arr, $file);
}
}<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class student_model extends Model
{
protected $table ='table_student';
}
<file_sep><?php
include_once 'DBConnect.php';
include_once 'index.php';
include_once '../Student.php';
class StudentDB
{
public $conn;
public function __construct()
{
$db = new DBConnect();
$this->conn = $db->connect();
}
public function getAll()
{
$sql = "SELECT * FROM Student";
$mysql = $this->conn->query($sql);
$mysql->setFetchMode(PDO::FETCH_ASSOC);
$data = $mysql->fetchAll();
$arr = [];
foreach ($data as $item){
$student = new Student($item['id'], $item['name']);
array_push($arr, $student);
}
return $arr;
}
public function create($name)
{
$sql = "INSERT INTO Student(`name`) VALUES ('$name')";
$mysql = $this->conn->prepare($sql);
$mysql->execute();
}
public function update($name, $id)
{
$sql = "UPDATE `Student` SET `name`='$name' WHERE id = '$id'";
$mysql = $this->conn->query($sql);
}
public function delete($id)
{
$sql = "DELETE FROM `Student` WHERE id = '$id'";
$mysql = $this->conn->query($sql);
}
}<file_sep><?php
namespace App\Http\Controllers;
use App\City;
use App\Customer;
use App\Http\Service\Impl\CustomerService;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Session;
use http\Env\Response;
class CustomerController extends Controller
{
Protected $CustomerService;
public function __construct(CustomerService $CustomerService)
{
$this->CustomerService = $CustomerService;
}
public function index()
{
$customers = $this->CustomerService->getAll();
$cities = City::all();
return view('customers.list', compact('customers', 'cities'));
}
public function create()
{
$cities = City::all();
return view('customers.create', compact('cities'));
}
public function store(Request $request)
{
$customer = new Customer();
$customer->name = $request->input('name');
$customer->email = $request->input('email');
$customer->dob = $request->input('dob');
$customer->city_id = $request->input('city_id');
$customer->save();
return redirect()->route('customers.index');
}
public function edit($id)
{
$customer = Customer::findOrFail($id);
$cities = City::all();
return view('customers.edit', compact('customer', 'cities'));
}
public function update(Request $request, $id)
{
$customer = Customer::findOrFail($id);
$customer->name = $request->input('name');
$customer->email = $request->input('email');
$customer->dob = $request->input('dob');
$customer->city_id = $request->input('city_id');
$customer->save();
Session::flash('success', 'Cập nhật khách hàng thành công');
return redirect()->route('customers.index');
}
public function destroy($id)
{
$this->CustomerService->delete($id);
Session::flash('success', 'Xóa khách hàng thành công');
return redirect()->route('customers.index');
}
public function filterByCity(Request $request)
{
$idCity = $request->input('city_id');
$cityFilter = City::findOrFail($idCity);
$customers = Customer::where('city_id', $cityFilter->id)->get();
$totalCustomerFilter = count($customers);
$cities = City::all();
return view('customers.list', compact('customers', 'cities', 'totalCustomerFilter', 'cityFilter'));
}
public function search(Request $request)
{
// $keyword = $request->input('keyword');
// if (!$keyword) {
// return redirect()->route('customers.index');
// }
//
// $customers = Customer::where('name', 'LIKE', '%' . $keyword . '%')->paginate(5);
// dd($customers);
//
// $cities = City::all();
//// dd($keyword);
// return view('customers.list', compact('customers', 'cities'));
$keyword = $request->input('keyword');
if (!$keyword) {
return redirect()->route('customers.index');
}
$customers = Customer::where('name', 'LIKE', '%' . $keyword . '%')->paginate(5);
$cities = City::all();
return view('customers.list', compact('customers', 'cities'));
}
}
<file_sep><?php
class DBconnect
{
public $dsn;
public $username;
public $password;
public function __construct()
{
$this->dsn = 'mysql:host=localhost;dbname=student-manager';
$this->username = 'root';
$this->password = '<PASSWORD>';
}
public function connect()
{
$conn = null;
try{
$conn = new PDO($this->dsn, $this->username, $this->password,array(PDO::MYSQL_ATTR_INIT_COMMAND => "SET NAMES utf8"));
} catch (PDOException $e){
echo $e -> getMessage();
}
return $conn;
}
}
<file_sep><?php
namespace App\Http\Repository;
use App\Employee;
class EmployeeRepository
{
public function getAll()
{
return Employee::all();
}
}
<file_sep><?php
class QuadraticEquation
{
private $a;
private $b;
private $c;
public function __construct($a, $b, $c)
{
$this->a = $a;
$this->b = $b;
$this->c = $c;
}
public function getA()
{
return $this->a;
}
public function getB()
{
return $this->b;
}
public function getC()
{
return $this->c;
}
public function getDiscriminant(){
return pow($this->b,2) - 4*$this->a*$this->c;
}
public function getRoot1(){
return (-$this->get + pow((pow($this->b,2) - 4*$this->a*$this->c), 0.5))/(2*$this->a);
}
public function getRoot2(){
return (-$this->b - pow((pow($this->b,2) - 4*$this->a*$this->c), 0.5))/(2*$this->a);
}
}<file_sep><?php
include_once "DBconnect.php";
include_once "Student.php";
class DBstudent
{
public $conn;
public function __construct()
{
$db = new DBconnect();
$this->conn = $db->connect();
}
public function getAll()
{
$sql = 'SELECT * FROM students';
$stmt = $this->conn->prepare($sql);
$stmt->execute();
$stmt->setFetchMode(PDO::FETCH_ASSOC);
$data = $stmt->fetchAll();
$arr = [];
foreach ($data as $item) {
$student = new Student($item['name'], $item['email']);
$student->id = $item['id'];
array_push($arr, $student);
}
return $arr;
}
public function create($obj)
{
$name = $obj->getName();
$email = $obj->getEmail();
$sql = "INSERT INTO students(`name`,`email`) VALUE ('$name','$email')";
$stmt = $this->conn->prepare($sql);
$stmt->execute();
}
public function del($id)
{
$sql = "DELETE FROM `students` WHERE id='$id'";
$stmt = $this->conn->prepare($sql);
$stmt->execute();
}
public function update($id, $name, $email)
{
$sql = "UPDATE `students` SET `name`='$name',`email`='$email' WHERE id='$id'";
$stmt = $this->conn->prepare($sql);
$stmt->execute();
}
public function finById($id)
{
$sql = 'SELECT * FROM students where id=' . $id;
$stmt = $this->conn->prepare($sql);
$stmt->execute();
$stmt->setFetchMode(PDO::FETCH_ASSOC);
$data = $stmt->fetch();
if ($data) {
$student = new Student($data['name'], $data['email']);
$student->id = $data['id'];
return $student;
} else {
return 'Người dùng không tồn tại.';
}
}
}
<file_sep><?php
include_once 'DBConnect.php';
include_once '../Student.php';
class StudentDB
{
public $conn;
public function __construct()
{
$db = new DBConnect();
$this->conn = $db->connect();
}
public function getAll()
{
$sql = "SELECT * FROM Student";
$mysql = $this->conn->query($sql);
$mysql->setFetchMode(PDO::FETCH_ASSOC);
$data = $mysql->fetchAll();
$arr = [];
foreach ($data as $item) {
$student = new Student($item['id'], $item['username'], $item['password'], $item['email']);
array_push($arr, $student);
}
return $arr;
}
public function create($username, $password, $email)
{
$sql = "INSERT INTO Student(`username`,`password`,`email`) VALUES ('$username','$password','$email')";
$mysql = $this->conn->prepare($sql);
$mysql->execute();
}
public function update($username, $password, $email, $id)
{
$sql = "UPDATE `Student` SET `username`='$username', `password`='$<PASSWORD>', `email`='$email' WHERE id = '$id'";
$mysql = $this->conn->query($sql);
}
public function delete($id)
{
$sql = "DELETE FROM `Student` WHERE id = '$id'";
$mysql = $this->conn->query($sql);
}
public function checkUsername($username)
{
$sql = "SELECT * FROM Student WHERE `username`='$username'";
$mysql = $this->conn->prepare($sql);
$mysql->execute();
$result = $mysql->rowCount();
return $result;
}
public function checkPassword($password)
{
$sql = "SELECT * FROM Student WHERE `password`='$password'";
$mysql = $this->conn->prepare($sql);
$mysql->execute();
$result = $mysql->rowCount();
return $result;
}
public function checkUsernamePassword($username, $password)
{
$sql = "SELECT * FROM Student WHERE `username`='$username'AND `password`='$<PASSWORD>'";
$mysql = $this->conn->prepare($sql);
$mysql->execute();
$result = $mysql->rowCount();
return $result;
}
public function checkEmail($email)
{
$sql = "SELECT * FROM Student WHERE `email`='$email'";
$mysql = $this->conn->prepare($sql);
$mysql->execute();
$result = $mysql->rowCount();
return $result;
}
public function selectUser($username)
{
$sql = "SELECT * FROM Student WHERE `username`='$username'";
$mysql = $this->conn->prepare($sql);
$mysql->execute();
$data = $mysql->fetchAll();
$arr = [];
foreach ($data as $item) {
$student = new Student($item['id'], $item['username'], $item['password'], $item['email']);
array_push($arr, $student);
}
return $arr;
}
} | e5b554d0dbe26ecd8a05973e283eb832c967333d | [
"JavaScript",
"PHP"
] | 60 | PHP | sonhai1088/Module2 | 2ac3db25258ab7d19a3aea71711acf32edcc6505 | 9cd4b8ee4ac92e2226d46e3631de8336e04e5f5f | |
refs/heads/master | <repo_name>qingfeng1202/hadoopDemo<file_sep>/src/main/java/com/hadoop/mapreduce/tq/MyTQ.java
package com.hadoop.mapreduce.tq;
import com.hadoop.mapreduce.MyWordCount;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.StringUtils;
import java.io.IOException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
public class MyTQ {
// 1949-10-01 14:21:02 34c
public static class Tmapper extends Mapper<LongWritable, Text, Tq, IntWritable>{
private final static DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
private final static Tq tkey = new Tq();
private final static IntWritable tvalue = new IntWritable();
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
// 获取时间 温度数组
String[] words = StringUtils.split(value.toString(), '\t');
// 处理日期
LocalDate parse = LocalDate.parse(words[0], dateTimeFormatter);
tkey.setYear(parse.getYear());
tkey.setMonth(parse.getMonthValue());
tkey.setDay(parse.getDayOfMonth());
// 处理温度
int wd = Integer.parseInt(words[1].replace("c", ""));
tkey.setWd(wd);
tvalue.set(wd);
context.write(tkey, tvalue);
}
}
public static class Treducer extends Reducer<Tq, IntWritable, Text, IntWritable>{
private final static Text tkey = new Text();
// IntWritable tval = new IntWritable();
@Override
protected void reduce(Tq key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int flag = 0;
int day = 0;
for (IntWritable val : values){
if(flag == 0){
tkey.set(key.toString());
// tval.set(val.get());
context.write(tkey, val);
flag ++;
day = key.getDay();
}
if(flag != 0 && day != key.getDay()){
tkey.set(key.toString());
context.write(tkey, val);
break;
}
}
}
}
public static void main(String[] args) throws Exception {
// 1.配置
//创建Configuration
Configuration conf = new Configuration();
conf.set("mapreduce.app-submission.corss-paltform", "true");
conf.set("mapreduce.framework.name", "local");
//创建Job
Job job = Job.getInstance(conf);
// 设置job名字
job.setJobName("tq");
//设置job的处理类
job.setJarByClass(MyWordCount.class);
// 2.设置输入输出路径
Path path = new Path("/tq/input");
FileInputFormat.addInputPath(job, path);
Path outPath = new Path("/tq/output");
if(outPath.getFileSystem(conf).exists(outPath)){
outPath.getFileSystem(conf).delete(outPath, true);
}
FileOutputFormat.setOutputPath(job, outPath);
// 3.设置Mapper
job.setMapperClass(Tmapper.class);
job.setMapOutputKeyClass(Tq.class);
job.setMapOutputValueClass(IntWritable.class);
// 4.自定义排序比较器
job.setSortComparatorClass(TsortComparator.class);
// 5.自定义分区器
job.setPartitionerClass(TPartioner.class);
// 6.自定义组排序
job.setGroupingComparatorClass(TGroupComparator.class);
// 7.设置reducetask数量
job.setNumReduceTasks(2);
// 8.设置reducer
job.setReducerClass(Treducer.class);
// 9.提交作业
job.waitForCompletion(true);
}
}
<file_sep>/src/main/java/com/hadoop/mapreduce/fd/MyFD.java
package com.hadoop.mapreduce.fd;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class MyFD {
public static void main(String[] args) throws Exception {
//创建Configuration
Configuration configuration = new Configuration();
configuration.set("mapreduce.app-submission.corss-paltform", "true");
configuration.set("mapreduce.framework.name", "local");
//创建Job
Job job = Job.getInstance(configuration);
//设置job的处理类
job.setJarByClass(MyFD.class);
job.setJobName("friend");
// 设置输入输出路径
Path inPath = new Path("/fd/input");
FileInputFormat.addInputPath(job, inPath);
Path outPath = new Path("/fd/output");
if(outPath.getFileSystem(configuration).exists(outPath)){
outPath.getFileSystem(configuration).delete(outPath, true);
}
FileOutputFormat.setOutputPath(job, outPath);
//设置map相关参数
job.setMapperClass(FMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
//设置reduce相关参数
job.setReducerClass(FReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.waitForCompletion(true);
}
}
| 25482d5b5a7b6f4e4a2a4549d1bd9ee3a35193ec | [
"Java"
] | 2 | Java | qingfeng1202/hadoopDemo | 4c29815972585401425287d91ad141242731c7ad | 801f1b6c1f39aed9581282a2b744af7dcd437b50 | |
refs/heads/master | <file_sep>package com.lesson_three.eiko.book_listing;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
/**
* Created by eiko on 11/26/2016.
*/
public class List_Adapter extends ArrayAdapter<List_item>{
public List_Adapter(Context contex, List<List_item> listitem){
super(contex, 0, listitem);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View listItemView = convertView;
if (listItemView == null) {
listItemView = LayoutInflater.from(getContext()).inflate(
R.layout.list_items, parent, false);
List_item currentItem = getItem(position);
TextView txtTitle = (TextView)listItemView.findViewById(
R.id.txt_title);
txtTitle.setText(currentItem.getmTitle());
TextView txtAuthor = (TextView) listItemView.findViewById(
R.id.txt_author);
ArrayList<String> bookAuthorList = currentItem.getmAuthor();
StringBuilder stringbuilderAuthor = new StringBuilder();
for (int i = 0; i < bookAuthorList.size(); i++) {
stringbuilderAuthor.append(bookAuthorList.get(i));
if (bookAuthorList.size()>0 && i< bookAuthorList.size()-1){
stringbuilderAuthor.append(", ");
}
}
txtAuthor.setText(stringbuilderAuthor.toString());
TextView txtInfo = (TextView)listItemView.findViewById(
R.id.txt_bookinfo);
txtInfo.setText(currentItem.getmInfomation());
}
return listItemView;
}
}
| aab8707817ddf1b7029dc68531ddf18ec600b192 | [
"Java"
] | 1 | Java | econMinnie/Book_listing | a5b154472bb9fba59b300a527d93acfd4c9c56a0 | de5f6f5ed574aeccf1d5501523780a827f973ced | |
refs/heads/master | <repo_name>helio-frota/unifiedpush-registration-client<file_sep>/lib/unifiedpush-registration-client.js
'use strict';
/**
* @module unified-push-registration-client
*/
const registry = require('./registry');
/**
Creates a new UnifiedPush Registration Client
@param {string} baseUrl - The baseurl for the AeroGear UnifiedPush server - ex: http://localhost:8080/ag-push,
@returns {Promise} A promise that will resolve with the client object.
@instance
@example
const registrationClient = require('unifiedpush-registration-client')
const baseUrl: 'http://127.0.0.1:8080/ag-push',
registrationClient(baseUrl)
.then((client) => {
client....
...
...
})
*/
function unifiedPushRegistrationClient (baseUrl) {
const client = {
registry: {}
};
client.baseUrl = baseUrl;
for (let func in registry) {
client.registry[func] = registry[func](client);
}
return Promise.resolve(client);
}
module.exports = unifiedPushRegistrationClient;
<file_sep>/scripts/start-server.sh
#!/bin/bash
. scripts/version.sh
function waitForServer {
# Give the server some time to start up. Look for a well-known
# bit of text in the log file. Try at most 50 times before giving up.
C=50
while [ $C -gt 0 ]
do
grep "WildFly Full ${WILDFLYVERSION} (WildFly Core 2.0.10.Final) started" wildfly.log
if [ $? -eq 0 ]; then
echo "Server started."
C=0
else
echo -n "."
C=$(( $C - 1 ))
fi
sleep 1
done
}
function waitForUPS {
# Give the server some time to start up. Look for a well-known
# bit of text in the log file. Try at most 50 times before giving up.
C=50
while [ $C -gt 0 ]
do
grep "Deployed \"ag-push.war\" (runtime-name : \"ag-push.war\")" wildfly.log
if [ $? -eq 0 ]; then
echo "Server started."
C=0
else
echo -n "."
C=$(( $C - 1 ))
fi
sleep 1
done
}
WF="${WILDFLY}.tar.gz"
WILDFLY_URL="http://downloads.jboss.org/wildfly/${WILDFLYVERSION}/${WF}"
# Download wildfly server if we don't already have it
if [ ! -e $WILDFLY ]
then
wget $WILDFLY_URL
tar xzf $WF
rm -f $WF
fi
# Download the keycloak wildfly adapter
WF_KC_ADAPTER="${KCWILDFLYADPATER}.tar.gz"
WF_KC_ADAPTER_URL="http://downloads.jboss.org/keycloak/${KCVERSION}/adapters/keycloak-oidc/${WF_KC_ADAPTER}"
wget $WF_KC_ADAPTER_URL
tar xzf $WF_KC_ADAPTER -C ./$WILDFLY
rm -f $WF_KC_ADAPTER
#Download the ups-db file
UPS_DB_FILE="unifiedpush-h2-ds.xml"
UPS_DB_FILE_URL="https://github.com/lholmquist/aerogear-unified-push-server/releases/download/0.0.1/${UPS_DB_FILE}"
wget $UPS_DB_FILE_URL
mv $UPS_DB_FILE ./$WILDFLY/standalone/deployments
#Download the ups-jms-setup
UPS_JMS_CLI="jms-setup-wildfly.cli"
UPS_JMS_CLI_URL="https://github.com/lholmquist/aerogear-unified-push-server/releases/download/0.0.1/${UPS_JMS_CLI}"
wget $UPS_JMS_CLI_URL
mv $UPS_JMS_CLI ./$WILDFLY/bin
#Download the ag-push.war
UPS_WAR="ag-push.war"
UPS_WAR_URL="https://github.com/lholmquist/aerogear-unified-push-server/releases/download/0.0.1/${UPS_WAR}"
wget $UPS_WAR_URL
## Start the wildfly server on port 8082
$WILDFLY/bin/standalone.sh -c standalone-full.xml -b 0.0.0.0 -Djboss.socket.binding.port-offset=2 > wildfly.log 2>&1 &
waitForServer
## Once started run the jms cli script
$WILDFLY/bin/jboss-cli.sh --controller=localhost:9992 --file=$WILDFLY/bin/$UPS_JMS_CLI
## then run the KC related cli commands
$WILDFLY/bin/jboss-cli.sh -c --controller=localhost:9992 --command="/system-property=ups.auth.server.url:add(value=http://localhost:8080/auth)"
$WILDFLY/bin/jboss-cli.sh -c --controller=localhost:9992 --command="/system-property=ups.realm.name:add(value=master)"
## Then add the UPS war
mv $UPS_WAR $WILDFLY/standalone/deployments
waitForUPS
<file_sep>/lib/registry.js
'use strict';
const request = require('request');
const fs = require('fs');
/**
* @module registry
*/
module.exports = {
registerDevice: registerDevice,
unregisterDevice: unregisterDevice,
importer: importer
};
/**
A function to register a device to the UnifiedPush Server
@param {object} settings - An settings object
@param {string} settings.variantId - The id of the variant - used for authentication and variant lookup
@param {string} settings.secret - The variant secret - used for authentication
**Device Options**
These are metadata for each Device. The Device Token is the only required thing
@param {object} deviceOptions - a deviceOptions object
@param {string} deviceOptions.deviceToken - unique string to identify an Installation with its PushNetwork
@param {string} [deviceOptions.alias] - string to map the Installation to an actual user.
@param {boolean} [deviceOptions.enabled] - Flag if the actual client installation is enabled (default) or not.
@param {string} [deviceOptions.platform] - the name of the platform. FOR ADMIN UI ONLY - Helps with setting up Routes
@param {string} [deviceOptions.deviceType] - the type of the registered device
@param {string} [deviceOptions.operatingSystem] - the name of the Operating System.
@param {string} [deviceOptions.osVersion] - the version string of the mobile OS.
@params {Array} [deviceOptions.categories] - set of all categories the client is in
@returns {Promise} A promise that will resolve with the device options registered
@example
registrationClient(baseUrl)
.then((client) => {
client.registry.registerDevice(settings, deviceOptions)
.then((device) => {
console.log(device) // {...}
})
})
*/
function registerDevice (client) {
return function registerDevice (settings, deviceOptions) {
return new Promise((resolve, reject) => {
settings = settings || {};
const req = {
url: `${client.baseUrl}/rest/registry/device/`,
method: 'POST',
body: deviceOptions,
json: true,
auth: {
user: settings.variantId,
pass: settings.secret
}
};
request(req, (err, response, body) => {
if (err) {
return reject(err);
}
if (response.statusCode !== 200) {
return reject(response.statusMessage);
}
return resolve(body);
});
});
};
}
/**
A function to unregister a device to the UnifiedPush Server
@param {object} settings - An settings object
@param {string} settings.variantId - The id of the variant - used for authentication and variant lookup
@param {string} settings.secret - The variant secret - used for authentication
@param {string} settings.deviceToken - unique string to identify an Installation with its PushNetwork
@returns {Promise} A promise that will resolve
@example
registrationClient(baseUrl)
.then((client) => {
client.registry.unregisterDevice(settings)
.then(() => {
console.log('success')
})
})
*/
function unregisterDevice (client) {
return function unregisterDevice (settings) {
return new Promise((resolve, reject) => {
settings = settings || {};
const req = {
url: `${client.baseUrl}/rest/registry/device/${settings.deviceToken}`,
method: 'DELETE',
json: true,
auth: {
user: settings.variantId,
pass: <PASSWORD>
}
};
request(req, (err, response, body) => {
if (err) {
return reject(err);
}
if (response.statusCode !== 204) {
return reject(body);
}
return resolve(body);
});
});
};
}
/**
A function for uploading a JSON file to allow massive device registration (aka import)
@param {object} settings - An settings object
@param {string} settings.variantId - The id of the variant - used for authentication and variant lookup
@param {string} settings.secret - The variant secret - used for authentication
@param {Array|Stream|String} devices - This is the list of Devices to upload.
@returns {Promise} A promise that will resolve
@example
registrationClient(baseUrl)
.then((client) => {
client.registry.importer(settings, devices)
.then(() => {
console.log('success')
})
})
*/
function importer (client) {
return function importer (settings, devices) {
return new Promise((resolve, reject) => {
settings = settings || {};
const req = {
url: `${client.baseUrl}/rest/registry/device/importer`,
method: 'POST',
json: true,
auth: {
user: settings.variantId,
pass: <PASSWORD>
}
};
const formData = {};
if (typeof devices === 'string') {
formData.file = fs.createReadStream('./scripts/importer-test.json');
} else if (devices instanceof require('stream').Readable) {
formData.file = devices;
} else {
formData.file = JSON.stringify(devices);
}
req.formData = formData;
request(req, (err, response, body) => {
if (err) {
return reject(err);
}
if (response.statusCode !== 200) {
return reject(body);
}
return resolve(body);
});
});
};
}
<file_sep>/scripts/version.sh
#!/bin/bash
export VERSION="0.1.0"
export WILDFLYVERSION="10.0.0.Final"
export WILDFLY="wildfly-${WILDFLYVERSION}"
export KCVERSION="1.9.4.Final"
export KEYCLOAK="keycloak-${KCVERSION}"
export KCWILDFLYADPATER="keycloak-wildfly-adapter-dist-${KCVERSION}"
<file_sep>/scripts/stop-server.sh
#!/bin/bash
. scripts/version.sh
${WILDFLY}/bin/jboss-cli.sh --connect --controller=localhost:9992 command=:shutdown
<file_sep>/index.js
module.exports = require('./lib/unifiedpush-registration-client');
<file_sep>/example.js
'use strict';
const registrationClient = require('./');
const baseUrl = 'http://localhost:8082/ag-push';
registrationClient(baseUrl).then((client) => {
const settings = {
variantId: 'c82e79c0-1c26-45dc-b482-8f75ce35510d',
secret: '9a2906ee-95da-45d6-8fdb-643b2d0f9a43'
};
const deviceOptions = {
deviceToken: '<KEY>:<KEY>',
alias: 'android node'
};
return client.registry.registerDevice(settings, deviceOptions).then((deviceRegistration) => {
console.log(deviceRegistration);
});
}).catch((err) => {
console.log('error', err);
});
<file_sep>/test/unit/ups-registration-client-test.js
'use strict';
const test = require('tape');
const adminClient = require('../../');
test('test client should return a promise with the client object', (t) => {
let baseUrl = 'http://127.0.0.1:8080/ag-push';
const upsClient = adminClient(baseUrl);
t.equal(upsClient instanceof Promise, true, 'should return a Promise');
upsClient.then((client) => {
t.equal(typeof client.baseUrl, 'string', 'client should contain a baseUrl String');
t.equal(client.baseUrl, 'http://127.0.0.1:8080/ag-push', 'client should have a base url property');
t.end();
});
});
<file_sep>/README.md
# Unified Push Registration Client
[](https://coveralls.io/github/bucharest-gold/unifiedpush-registration-client?branch=master)
[](https://travis-ci.org/bucharest-gold/unifiedpush-registration-client)
[](https://snyk.io/test/npm/unifiedpush-registration-client)
[](https://david-dm.org/bucharest-gold/unifiedpush-registration-client)
[](https://npmjs.org/package/unifiedpush-registration-client)
A client for registering Devices to the AeroGear UnifiedPush - https://aerogear.org/docs/specs/aerogear-unifiedpush-rest/#home
| | Project Info |
| --------------- | ------------- |
| License: | Apache-2.0 |
| Build: | make |
| Documentation: | http://bucharest-gold.github.io/unifiedpush-registration-client/ |
| Issue tracker: | https://github.com/bucharest-gold/unifiedpush-registration-client/issues |
| Engines: | Node.js 4.x, 5.x, 6.x
## Installation
`npm install unifiedpush-registration-client -S`
## Usage
'use strict';
const registrationClient = require('unifiedpush-registration-client');
const baseUrl = 'http://127.0.0.1:8080/ag-push';
const settings = {
variantId: 'SOME_VARIANT_ID',
secret: 'VARIANT_SECRET'
};
const deviceOptions = {
deviceToken: '<PASSWORD>',
alias: 'thing'
}
registrationClient(baseUrl)
.then((client) => {
return client.registry.registerDevice(deviceOptions)
.then((deviceRegistration) => {
console.log('deviceRegistration', deviceRegistration);
});
})
.catch((err) => {
console.log('Error', err);
});
### Importer
There is also a `importer` function that allows mass device registration.
Along with the settings object, you also need to pass it a list of devices.
This list of devices must be an array and can be passed in as a .json file or a JSON object
## Contributing
Please read the [contributing guide](./CONTRIBUTING.md)
| 6238655937583a3c50e24f8f9f1ef214e8a41a36 | [
"JavaScript",
"Markdown",
"Shell"
] | 9 | JavaScript | helio-frota/unifiedpush-registration-client | abd9208ea80dd49f750d8fdb8f169d7f2d3e43db | cf9e336f3b9ee8693e231470690c152322f83643 | |
refs/heads/master | <repo_name>jpadams/puppet-module-rootless<file_sep>/lib/facter/user_group.rb
require 'etc'
Facter.add("puppet_user") do
confine :kernel => "Linux"
setcode do
Etc.getpwuid(Process.uid).name
end
end
Facter.add("puppet_group") do
confine :kernel => "Linux"
setcode do
Etc.getgrgid(Process.gid).name
end
end
<file_sep>/lib/facter/tmpdir.rb
Facter.add("var_tmp_rw") do
setcode do
File.stat("/var/tmp").writable?
end
end
<file_sep>/README.md
#Rootless
A puppet module providing types and providers to enable puppet to be
functional without running as root.
One goal is to produce near-normal puppet types so that regular puppet
manifests can be written in the regular style.
Another goal is to integrate well with sudo functionality.
##Types
###rootless::file
A file type that overcomes the limitiation of creating the file. This
assumes that the file is writeable by the user puppet is running as. It
overcomes the problem where the containing directory of the file is not
writeable by the user running puppet.
rootless::file {'/etc/httpd/conf.d/app.conf':
content => template('application.apache.erb'),
notify => Service['httpd'],
}
###rootless::tardir
A type that will untar a tarfile that contains a directory. The tarfile must be local
or nfs mounted and the tarball must create a single directory (for purging to work).
Additionally the folder that is being created must be known ahead of time. This is a
base type that can be leveraged to make a more complex behavior.
rootless::tardir { '/opt/app/place/folder':
ensure => absent,
tarfile => '/big/nfs/mount/tar.tar.gz'
}
rootless::tardir { '/opt/app/other_place/folder':
ensure => present,
tarfile => '/big/nfs/mount/tar.tar.gz'
}
###rootless::zipdir
A type that will unzip a zipfile that contains a directory. The zipfile must be local
or nfs mounted and the zip must create a single directory (for purging to work).
Additionally the folder that is being created must be known ahead of time. This is a
base type that can be leveraged to make a more complex behavior.
rootless::zipdir { '/opt/app/place/folder':
ensure => absent,
zipfile => '/big/nfs/mount/zipfile.zip'
}
rootless::zipdir { '/opt/app/other_place/folder':
ensure => present,
zipfile => '/big/nfs/mount/zipfile.zip'
}
###rootless::jdk
A type that will install a jdk based on major and minor version number from
a tarball found somewhere else on the filesystem(often an nfs mount).
rootless::jdk { '/opt/app/place/':
ensure => present,
jdk_major_version => '7',
jdk_update_version => '25',
tarball_directory => '/big/nfs/mount',
}
rootless::jdk { '/opt/app/other_place/':
ensure => present,
tarball_directory => '/big/nfs/mount',
jdk_file => 'my-special-jdk-tarball.tar.gz',
jdk_install_dir => 'jdk1.7.0_09', # the directory created by the untar operation
}
rootless::jdk { '/opt/app/yet_another_place/':
ensure => absent,
jdk_install_dir => 'jdk-1.7.0_12',
}
rootless::jdk { 'my-jdk-install':
ensure => present,
jdk_major_version => '7',
jdk_update_version => '25',
tarball_directory => '/big/nfs/mount',
install_root => '/opt/app/best_place',
}
###rootless::jruby
A type that will install a jruby based on version number from
a tarball found somewhere else on the filesystem(often an nfs mount).
rootless::jruby { '/opt/app/place/':
ensure => present,
jruby_version => '1.7.1',
tarball_directory => '/big/nfs/mount',
}
rootless::jruby { '/opt/app/other_place/':
ensure => present,
tarball_directory => '/big/nfs/mount',
jruby_file => 'my-special-jruby-tarball.tar.gz',
jruby_install_dir => 'jruby-1.7.1', # the directory created by the untar operation
}
rootless::jruby { '/opt/app/yet_another_place/':
ensure => absent,
jruby_install_dir => 'jruby-1.6.6',
}
rootless::jruby { 'my-jruby-install':
ensure => present,
jruby_version => '1.6.6',
tarball_directory => '/big/nfs/mount',
install_root => '/opt/app/best_place',
}
##Facts
###var_tmp_rw
A fact checking if /var/tmp is writable by the puppet user. Var/tmp is a good place to
store permanent temporary files, for the rootless::file type for instance. This will help
us pick a tempdir to create things. Var/tmp is preferable to /tmp since /var/tmp usually
persists after reboot.
###puppet_user
A fact returning the name of the current user
###puppet_group
A fact returning the name of the current group
###puppet_user_home
A fact returning the home_directory of the current user
#Requirements
* puppetlabs-stdlib
<file_sep>/lib/facter/rootless_home.rb
require 'etc'
Facter.add("puppet_user_home") do
confine :kernel => "Linux"
setcode do
Etc.getpwuid(Process.uid).dir
end
end
| 012d67ff32e789fca2fd640e872f3c08a88629f6 | [
"Markdown",
"Ruby"
] | 4 | Ruby | jpadams/puppet-module-rootless | 23129bcc1b5edcfa84afc690d843e1c640667ad6 | 6022bc0547edb755803d72b831dc507a3a2dfbb7 | |
refs/heads/master | <repo_name>ryanemccall/marvel-api<file_sep>/marvel.js
const baseURL = 'https://gateway.marvel.com:443/v1/public/characters';
const apikey = '<KEY>'; //public key
const privateKey = '55d6632e1252b51fefff9dede1372ffde46b85ca'
let url;
//Search Form
const submitBtn = document.querySelector('.submit');
const searchForm = document.querySelector('.search-form');
const searchName = document.querySelector('.search');
//button
searchForm.addEventListener('click', fetchResults);
function fetchResults(e) {
e.preventDefault();
let startsWith = searchName.value
url = baseURL + '?name=' + '&apikey=' + apikey
fetch(url)
.then(function(result) {
return result.json()
}).then(json => {
console.log(json)
})
} | 9cdfdd8f47e17de322e9ebcd74072461d75ffc98 | [
"JavaScript"
] | 1 | JavaScript | ryanemccall/marvel-api | eeb6c9ae23e4a6b9db2e6faff155c10076b0eabf | 14b8c29695ce0b4d5802ecfa2293d9e212ee90be | |
refs/heads/main | <file_sep>
import React from 'react';
import { Jumbotron, Container } from 'reactstrap';
const Header = () => {
return (
<>
<div>
<Jumbotron fluid>
<Container fluid>
<h2 className="display-3">Professional Fleet Washing Services in Bay Area</h2>
<p className="lead">Our Bay Area company has helped many fleet managers keep their drivers happy and achieve top-notch fleet washing results. Just <br></br> imagine what our proven services can do for your fleet.</p>
<h3>Serving Bay Area 7 Days a Week</h3>
<div class="vertical-center">
<button>Centered Button</button>
</div>
</Container>
</Jumbotron>
</div>
<style jsx>{`
p.lead {
text-align: center!important;
}
.display-3 {
text-align: center!important;
margin-top: 50px;
}
button.display-button {
margin: 0;
top: 50%;
}
`}</style>
</>
)
}
export default Header | f9bb7d249b92b6657f1ee896a293ab3531af291d | [
"JavaScript"
] | 1 | JavaScript | Jsmithseo/Black-diamond | 8f7f3f265e70eb8a0553e3bb293ebb8f59fde9d0 | 74b01b46fe493f9057c65d4bd9a22868141e333a | |
refs/heads/master | <file_sep>using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using UniPlatform.Models;
namespace UniPlatform.Areas.Admin.Controllers
{
[Authorize(Roles = "admin")]
public class DuyuruController : Controller
{
// GET: Admin/Duyuru
ApplicationDbContext db = new ApplicationDbContext();
public ActionResult Index()
{
return View(db.Duyurular.ToList());
}
public ActionResult DuyuruDetay(int id)
{
Duyurular istenenDuyuru = db.Duyurular.Find(id);
if (istenenDuyuru == null)
{
return HttpNotFound();
}
List<DuyuruYorumlar> yorumlar = db.DuyuruYorumlar.Where(x => x.DuyuruID == id).ToList();
ViewBag.Yorumlar = yorumlar;
string loogedUserID = User.Identity.GetUserId();
if (loogedUserID != null)
{
ViewBag.KullaniciAdi = db.Users.Find(loogedUserID).UserName;
}
return View(istenenDuyuru);
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Web;
namespace UniPlatform.Models
{
public class DuyuruYorumlar
{
[Key]
public int YorumID { get; set; }
[ForeignKey("Duyurular")]
public int DuyuruID { get; set; }
public virtual Duyurular Duyurular { get; set; }
public string YorumIcerik { get; set; }
}
}<file_sep>using System.Data.Entity;
using System.Data.Entity.ModelConfiguration.Conventions;
using System.Security.Claims;
using System.Threading.Tasks;
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.EntityFramework;
namespace UniPlatform.Models
{
// ApplicationUser sınıfınıza daha fazla özellik ekleyerek kullanıcıya profil verileri ekleyebilirsiniz. Daha fazla bilgi için lütfen https://go.microsoft.com/fwlink/?LinkID=317594 adresini ziyaret edin.
public class ApplicationUser : IdentityUser
{
public async Task<ClaimsIdentity> GenerateUserIdentityAsync(UserManager<ApplicationUser> manager)
{
// authenticationType özelliğinin CookieAuthenticationOptions.AuthenticationType içinde tanımlanmış olanla eşleşmesi gerektiğini unutmayın
var userIdentity = await manager.CreateIdentityAsync(this, DefaultAuthenticationTypes.ApplicationCookie);
// Özel kullanıcı taleplerini buraya ekle
return userIdentity;
}
}
public class ApplicationDbContext : IdentityDbContext<ApplicationUser>
{
public ApplicationDbContext()
: base("server=.; database=UniPlatformDb; uid=sa; pwd=123", throwIfV1Schema: false)
{
}
public DbSet<Ilanlar> Ilanlar { get; set; }
public DbSet<Duyurular> Duyurular { get; set; }
public DbSet<DuyuruYorumlar> DuyuruYorumlar { get; set; }
public DbSet<IlanYorumlar> IlanYorumlar { get; set; }
public static ApplicationDbContext Create()
{
return new ApplicationDbContext();
}
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Conventions.Remove<PluralizingTableNameConvention>();
base.OnModelCreating(modelBuilder);
}
}
}<file_sep>using Microsoft.AspNet.Identity;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using UniPlatform.Models;
using PagedList;
namespace UniPlatform.Controllers
{
public class HomeController : Controller
{
ApplicationDbContext db = new ApplicationDbContext();
public ActionResult Index()
{
List<Duyurular> duyurular = db.Duyurular.Where(x => x.OnayliMi == true).Take(10).OrderByDescending(x => x.ID).ToList();
List<Ilanlar> ilanlar = db.Ilanlar.Where(x => x.OnayliMi == true).Take(10).OrderByDescending(x => x.ID).ToList();
ViewBag.duyurular = duyurular;
ViewBag.ilanlar = ilanlar;
return View();
}
[Authorize(Roles = "user")]
public ActionResult IlanEkle()
{
return View();
}
public ActionResult Contact()
{
ViewBag.Message = "Your contact page.";
return View();
}
[Authorize(Roles = "user")]
public ActionResult DuyuruEkle()
{
return View();
}
public ActionResult DuyuruDetay(int id)
{
Duyurular istenenDuyuru = db.Duyurular.Find(id);
if (istenenDuyuru == null)
{
return HttpNotFound();
}
List<DuyuruYorumlar> yorumlar = db.DuyuruYorumlar.Where(x => x.DuyuruID == id).ToList();
ViewBag.Yorumlar = yorumlar;
string loogedUserID = User.Identity.GetUserId();
if (loogedUserID != null)
{
ViewBag.KullaniciAdi = db.Users.Find(loogedUserID).UserName;
}
return View(istenenDuyuru);
}
public ActionResult ButunDuyurular(int? page)
{
List<Duyurular> duyurular = new List<Duyurular>();
//duyurular = db.Duyurular.ToList().ToPagedList(page ?? 1, 2);
duyurular = db.Duyurular.OrderByDescending(x => x.ID).ToList();
return View(duyurular);
}
public ActionResult IlanDetay(int id)
{
Ilanlar istenenIlan = db.Ilanlar.Find(id);
if (istenenIlan == null)
{
return HttpNotFound();
}
List<IlanYorumlar> yorumlar = db.IlanYorumlar.Where(x => x.IlanID == id).ToList();
ViewBag.Yorumlar = yorumlar;
string loogedUserID = User.Identity.GetUserId();
if (loogedUserID != null)
{
ViewBag.KullaniciAdi = db.Users.Find(loogedUserID).UserName;
}
return View(istenenIlan);
}
}
}<file_sep>namespace UniPlatform.Migrations
{
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.EntityFramework;
using System;
using System.Data.Entity;
using System.Data.Entity.Migrations;
using System.Linq;
using UniPlatform.Models;
internal sealed class Configuration : DbMigrationsConfiguration<UniPlatform.Models.ApplicationDbContext>
{
public Configuration()
{
AutomaticMigrationsEnabled = true;
}
protected override void Seed(ApplicationDbContext db)
{
// This method will be called after migrating to the latest version.
// You can use the DbSet<T>.AddOrUpdate() helper extension method
// to avoid creating duplicate seed data.
var roleStore = new RoleStore<IdentityRole>(db);
var roleManager = new RoleManager<IdentityRole>(roleStore);
if (!roleManager.RoleExists("admin"))
{
roleManager.Create(new IdentityRole() { Name = "admin" });
}
if (!roleManager.RoleExists("user"))
{
roleManager.Create(new IdentityRole() { Name = "user" });
}
var userStore = new UserStore<ApplicationUser>(db);
var userManager = new UserManager<ApplicationUser>(userStore);
var adminUser = userManager.FindByName("<EMAIL>");
if (adminUser == null)
{
adminUser = new ApplicationUser()
{
UserName = "<EMAIL>",
Email = "<EMAIL>"
};
userManager.Create(adminUser, "Ankara1.");
}
if (!userManager.IsInRole(adminUser.Id, "admin"))
{
userManager.AddToRole(adminUser.Id, "admin");
}
}
}
}
<file_sep>using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Web.Http;
using System.Web.Http.Results;
using UniPlatform.Models;
namespace UniPlatform.Controllers
{
[Authorize(Roles = "user")]
public class DuyuruController : ApiController
{
ApplicationDbContext db = new ApplicationDbContext();
// GET: Duyuru
public HttpResponseMessage DuyuruEkle()
{
return Request.CreateResponse(HttpStatusCode.OK);
}
[HttpPost]
public HttpResponseMessage Save(Duyurular duyuru)
{
string loogedUserID = User.Identity.GetUserId();
string mesaj;
Duyurular d = new Duyurular();
d.DuyuranId = loogedUserID;
d.DuyuruBaslik = duyuru.DuyuruBaslik;
d.Icerik = duyuru.Icerik;
d.DuyuruTarihi = DateTime.Now;
d.OnayliMi = true;
db.Duyurular.Add(d);
db.SaveChanges();
int duyuru_id = d.ID;
mesaj = "Duyuru eklendi!";
return Request.CreateResponse(mesaj);
}
[HttpPost]
public HttpResponseMessage YorumEkle(DuyuruYorumlar gelen_yorum)
{
string loogedUserID = User.Identity.GetUserId();
string kullanici_adi = db.Users.Find(loogedUserID).UserName;
string mesaj;
DuyuruYorumlar yorum = new DuyuruYorumlar();
yorum.YorumIcerik = gelen_yorum.YorumIcerik + "<br> <strong>" + kullanici_adi + "</strong> -" + DateTime.Now ;
yorum.DuyuruID = gelen_yorum.DuyuruID;
db.DuyuruYorumlar.Add(yorum);
db.SaveChanges();
mesaj = "Duyuru eklendi!";
return Request.CreateResponse(mesaj);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Web;
namespace UniPlatform.Models
{
public class IlanYorumlar
{
[Key]
public int YorumID { get; set; }
[ForeignKey("Ilanlar")]
public int IlanID { get; set; }
public virtual Ilanlar Ilanlar { get; set; }
public string YorumIcerik { get; set; }
}
}<file_sep># UniPlatform
Live at https://uniplatform.azurewebsites.net/
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Web;
namespace UniPlatform.Models
{
public class Duyurular
{
[Key]
public int ID { get; set; }
[DataType("datetime2")]
public DateTime DuyuruTarihi { get; set; }
public string DuyuruBaslik { get; set; }
public string Icerik { get; set; }
public bool OnayliMi { get; set; }
[ForeignKey("ApplicationUser")]
public string DuyuranId { get; set; }
public virtual ApplicationUser ApplicationUser { get; set; }
public virtual ICollection<DuyuruYorumlar> DuyuruYorumlar { get; set; }
}
}<file_sep>using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Web.Http;
using UniPlatform.Models;
namespace UniPlatform.Controllers
{
[Authorize(Roles = "user")]
public class IlanController : ApiController
{
ApplicationDbContext db = new ApplicationDbContext();
public HttpResponseMessage IlanEkle()
{
return Request.CreateResponse(HttpStatusCode.OK);
}
[HttpPost]
public HttpResponseMessage Save(Ilanlar ilan)
{
string loogedUserID = User.Identity.GetUserId();
string mesaj;
Ilanlar i = new Ilanlar();
i.IlanSahibiId = loogedUserID;
i.IlanBaslik = ilan.IlanBaslik;
i.Icerik = ilan.Icerik;
i.IlanTarihi = DateTime.Now;
i.OnayliMi = true;
db.Ilanlar.Add(i);
db.SaveChanges();
int duyuru_id = i.ID;
mesaj = "Duyuru eklendi!";
return Request.CreateResponse(mesaj);
}
[HttpPost]
public HttpResponseMessage YorumEkle(DuyuruYorumlar gelen_yorum)
{
string loogedUserID = User.Identity.GetUserId();
string kullanici_adi = db.Users.Find(loogedUserID).UserName;
string mesaj;
DuyuruYorumlar yorum = new DuyuruYorumlar();
yorum.YorumIcerik = gelen_yorum.YorumIcerik + "<br> <strong>" + kullanici_adi + "</strong> -" + DateTime.Now;
yorum.DuyuruID = gelen_yorum.DuyuruID;
db.DuyuruYorumlar.Add(yorum);
db.SaveChanges();
mesaj = "Duyuru eklendi!";
return Request.CreateResponse(mesaj);
}
}
}
| 6e8efcfbe01cca77417928ad62ff741db4d14eeb | [
"Markdown",
"C#"
] | 10 | C# | fpenbegul/UniPlatform | ad840182ac499058ae913fb222c4d85c7aeef571 | 4b0f40f9d3b05c0209f05463e6bcfb99dcef9ec9 | |
refs/heads/master | <file_sep>#!/usr/bin/env Rscript
args = commandArgs(trailingOnly=TRUE)
# Rscript --vanilla sillyScript.R iris.txt out.txt
# test if there is at least one argument: if not, return an error
# if (length(args)==0) {
# stop("At least one argument must be supplied (input file).n", call.=FALSE)
# } else if (length(args)==1) {
# # default output file
# args[2] = "out.txt"
# }
print(paste("args[1] : ",args[1]))
# df = read.table(args[1], header=TRUE)
b = substr(args[1], 1, nchar(args[1])-4)
# your input (example):
# T G C A
# 0.2927 0.1933 0.2472 0.2668
# 0.0554 0.9208 0.0233 0.0005
# 0.2316 0.7674 0.0005 0.0005
# 0.0005 0.0005 0.9985 0.0005
# 0.9861 0.0036 0.0098 0.0005
# 0.0005 0.9985 0.0005 0.0005
# 0.9208 0.0741 0.0046 0.0005
# 0.8037 0.0461 0.0451 0.1052
# 0.5498 0.0813 0.2585 0.1104
# inverse the letter if you want to get the reverse complement
# system('python retrieve_listOfFrequencies_from_matrixFile.py')
library(Cairo)
library(motifStack)
# a <- read.table("ARF2_matrix1_MEME_1500FirstSeq.txt",sep="\t",header=TRUE)
# svg(filename="ARF2_MEME_Matrix1_600FirstSeq.svg",
# width=600,
# height=400,
# pointsize=12)
# # Cairo(600, 400,type="svg", bg="white",file="ARF2_MEME_Matrix1_600FirstSeq.svg")
# b <- apply(a, 2, rev) #add this line if you want to get the reverse complement
# pfm <- t(as.matrix(b))
# print(pfm)
# plotMotifLogo(pfm)
# dev.off()
# system('ls')
a <- read.table(args[1], sep="\t", header=TRUE)
a<-data.frame(a[,4],a[,3],a[,2],a[,1])
names(a)<-c("T","G","C","A")
Cairo(600, 400, type="png", bg="white", file = paste(b, ".png",sep=""))
# CairoSVG("complete_ER7_from_sequences_frequences.svg",12,6, bg="white")
#a <- apply(a, 2, rev)
pfm <- t(as.matrix(a))
plotMotifLogo(pfm)
dev.off()<file_sep># -*- coding: utf-8 -*-
import re
import os
F = open("Last_PWMs.txt","r")
matrix = F.read().replace("\r","\n") + "\n"
F.close()
TF = re.compile(r"(.*)\t\t")
TFName = TF.findall(matrix)
TFName = [s.replace('\t', '') for s in TFName]
#TFName = str(TFName)
print("TFName : ",TFName)
num = re.compile(r"([+-]?\d+[.,]\d+)")
Mdata = num.findall(matrix)
a = 0
for j in range(0, len(Mdata), 40) :
matOccurence_verticale = []
print("j : ",j)
print("Mdata[j:j+40] : ",Mdata[j:j+40])
subMdata = Mdata[j:j+40]
file = open(TFName[a] + ".txt","w")
file.write(str("A\tC\tG\tT\t\n"))
#print(11111111)
for i in range(0,len(subMdata)/4):
matOccurence_verticale.append(subMdata[i])
matOccurence_verticale.append(subMdata[i+len(subMdata)/4])
matOccurence_verticale.append(subMdata[i+(len(subMdata)/4)*2])
matOccurence_verticale.append(subMdata[i+(len(subMdata)/4)*3])
print(len(matOccurence_verticale)/4)
print("matOccurence_verticale : ",matOccurence_verticale)
for i in range(0 , len(matOccurence_verticale), 4) :
file.write(str(matOccurence_verticale[i]) + str("\t"))
file.write(str(matOccurence_verticale[i+1]) + str("\t"))
file.write(str(matOccurence_verticale[i+2]) + str("\t"))
file.write(str(matOccurence_verticale[i+3]) + str("\t"))
#if (i+1 == len(matOccurence_verticale)/4 or i+1 == 2*len(matOccurence_verticale)/4 or i+1 == 3*len(matOccurence_verticale)/4 ) :
file.write(str("\n"))
file.close()
os.system('Rscript --vanilla getMotifs.r '+ TFName[a] + '.txt')
#os.system('ls')
os.system('mkdir ' + TFName[a] + '_rep')
os.system('mv ' + TFName[a] + '.txt ' + TFName[a] + '.png' + ' /home/ab250579/Documents/' + TFName[a] + '_rep')
a = a + 1
#file.write(str("\n") )
| 565da877395286685764a4d914082a1fd41b5ebe | [
"Python",
"R"
] | 2 | R | adrbessy/generateMotifsFromMatrix | 1e2b78665601216d1178c8f081f610795fe68e2a | 9a93156dd8bf540769f884ff94e7b4173c628d11 | |
refs/heads/master | <repo_name>nmuench/many_domains_scrapper<file_sep>/scrape_many_domains.py
import pandas
import os
import time
from urllib.request import urlopen, Request
from tld import get_tld, get_fld
if not os.path.exists("domain_html"):
os.mkdir("domain_html")
df = pandas.read_csv("domains.csv")
for link in df.domain_name:
print(f"Downloading: {link}")
fileName = "domain_html/" + link
if not os.path.exists(fileName):
f = open(fileName, "wb")
try:
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:72.0) Gecko/20100101 Firefox/72.0'}
requestLink = get_fld("https://www." + link)
req = Request(url = "https://www." + requestLink, headers=headers)
response = urlopen(req)
f.write(response.read())
except:
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:72.0) Gecko/20100101 Firefox/72.0'}
requestLink = get_fld("https://" + link)
req = Request(url = "https://" + requestLink, headers=headers)
response = urlopen(req)
f.write(response.read())
f.close()
time.sleep(10)
| 45e280d57370fd3b535586d052f91f5b7da8cea2 | [
"Python"
] | 1 | Python | nmuench/many_domains_scrapper | c9c42b13bc37db5a3e0ac0e94d1e361e7130a20b | 6213095fffd77401b565ffb8d5d616ad99486825 | |
refs/heads/master | <file_sep>#! /usr/bin/env node
const program = require('commander');
const download = require('download-git-repo');
const chalk = require('chalk');
const ora = require('ora');
const fs = require('fs');
program
.version('4.0.3')
.option('-ir, initReact [name]', '初始化react项目')
.option('-iv, initVue [name]', '初始化vue项目')
.parse(process.argv);
if (program.initReact) {
const spinner = ora('正在从github下载模板文件').start();
download('direct:https://github.com/chengqilong/template.git', program.initReact,{ clone: true }, function (err) {
if(!err){
// 可以输出一些项目成功的信息
console.info(chalk.blueBright('下载成功'));
fs.readFile(`${process.cwd()}/${program.initReact}/package.json`, (err, data) => {
if (err) throw err;
let _data = JSON.parse(data.toString())
_data.name = program.initReact
_data.version = '1.0.0'
let str = JSON.stringify(_data, null, 4);
fs.writeFile(`${process.cwd()}/${program.initReact}/package.json`, str, function (err) {
if (err) throw err;
process.exit(0);//成功后退出进程
})
});
}else{
// 可以输出一些项目失败的信息
console.info(chalk.blueBright('下载失败'));
}
})
}
if (program.initVue) {
const spinner = ora('正在从github下载模板文件').start();
download('direct:https://github.com/chengqilong/templateVue.git', program.initVue,{ clone: true }, function (err) {
if(!err){
// 可以输出一些项目成功的信息
console.info(chalk.blueBright('下载成功'));
fs.readFile(`${process.cwd()}/${program.initVue}/package.json`, (err, data) => {
if (err) throw err;
let _data = JSON.parse(data.toString())
_data.name = program.initVue
_data.version = '1.0.0'
let str = JSON.stringify(_data, null, 4);
fs.writeFile(`${process.cwd()}/${program.initVue}/package.json`, str, function (err) {
if (err) throw err;
process.exit(0);//成功后退出进程
})
});
}else{
// 可以输出一些项目失败的信息
console.info(chalk.blueBright('下载失败'));
}
})
}
<file_sep>在dev/mpg下执行
#生成vue的项目模板
mll initVue [name]
#生成react的项目模板
mll initReact [name]
| bd58a48651ee9ea83f84a0622ff1ce8900995ae6 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | chengqilong/mll-cli | adb04154ad09927f970989e623d5d8fd568125b9 | ac45d21f7b2e184459fd1c54afd1fad73a20520a | |
refs/heads/master | <repo_name>andremartov/1.ea-kodutoo<file_sep>/clock1.js
let time = new Date();
let clock = document.getElementById("clock");
let dateTime = document.getElementById("date_time");
let colorButton = document.getElementById("date_time");
console.log(time.getHours());
console.log(time.getMinutes());
console.log(time.getSeconds());
console.log(time.getFullYear());
console.log(time.getDay());
console.log(time.getMonth());
function hexClock(){
let time = new Date();
let hours = time.getHours().toString();
let minutes = time.getMinutes().toString();
let seconds = time.getSeconds().toString();
let year = time.getFullYear();
let month = time.getMonth();
let day = time.getDay();
const months = new Array('January', 'February', 'March', 'April', 'May', 'June', 'Jully', 'August', 'September', 'October', 'November', 'December');
const days = new Array('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday');
if(hours.length < 2){
hours = "0" + hours;
}
if(minutes.length < 2){
minutes = "0" + minutes;
}
if(seconds.length < 2){
seconds = "0" + seconds;
}
clockStr = hours + " : " + minutes + " : "+ seconds;
clock.textContent = clockStr;
dateTimeStr = ''+days[day]+' '+months[month]+' '+year;
dateTime.textContent = dateTimeStr;
setInterval(hexClock, 1000);
}
hexClock();
setInterval(hexClock, 1000);
document.body.style.backgroundImage = "url(https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSBts4IIj-YsBaGiQgIOX56tneVU4nkRPVj6-0-rjtpy7IKsOUt3A)";
function fontSizeBigger(){
element = document.getElementById("clock");
style = window.getComputedStyle(element, null).getPropertyValue("font-size");
fontSize = parseFloat(style);
element.style.fontSize = (fontSize + 5) + "px";
document.getElementById("clock").style.fontSize = fontSize;
}
function fontSizeSmaller(){
element = document.getElementById("clock");
style = window.getComputedStyle(element, null).getPropertyValue("font-size");
fontSize = parseFloat(style);
element.style.fontSize = (fontSize - 5) + "px";
document.getElementById("clock").style.fontSize = fontSize;
}
function changeColor(){
element = document.getElementById("date_time");
color = "#"+((1<<24)*Math.random()|0).toString(16);
element.style.color = color;
document.getElementById("colorChange").style.color = color;
} | 1f906d51a8c5cc0cf589572d052e92f0ccbd7a64 | [
"JavaScript"
] | 1 | JavaScript | andremartov/1.ea-kodutoo | d84fdb9203e8fb88b653f17bb99c1eb6f5199e5e | a8ecb2024cfe1906f66236e173f94d89acdabcb4 | |
refs/heads/master | <file_sep>using senai.Filmes.WebApi.Domains;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace senai.Filmes.WebApi.Interfaces
{
/// <summary>
/// Interface responsável pelo repositório Genero
/// </summary>
interface IGeneroRepository
{
/// <summary>
/// Lista todos os gêneros
/// </summary>
/// <returns>Retorna uma lista de gêneros</returns>
List<GeneroDomain> Listar();
/// <summary>
/// Cadastra um novo genero
/// </summary>
/// <param name="genero">Objeto genero que sera cadastrado</param>
void Cadastrar(GeneroDomain genero);
/// <summary>
/// Deletar Genero
/// </summary>
/// <param name="id"></param>
void Deletar(int id);
/// <summary>
/// Atualiza um genero existente
/// </summary>
/// <param name="genero">Objeto genero sera atualizado</param>
void AtualizarIdCorpo(GeneroDomain genero);
/// <summary>
/// Atualiza um genero existente
/// </summary>
/// <param name="id">ID do genero que sera</param>
/// <param name="genero"></param>
void AtualizarIdUrl(int id, GeneroDomain genero);
}
}
<file_sep># 2_Sprint
Segunda Sprint
| acbb6aa40c90ed41d236f25137f90d7aa2ae37ab | [
"Markdown",
"C#"
] | 2 | C# | Leonardorichylo/2_Sprint | 1549171798bf984aade22b0d1725524351c64c5d | 6eadddade639b049b9af790c92403ada7b502250 | |
refs/heads/master | <repo_name>pxfnc/user-thread<file_sep>/README.md
# user-thread
スレッド作ってみたり、スレッドの中でスレッド作ってみたりとかいろいろやってみた
## run
```
cargo run --release
```<file_sep>/src/main.rs
use std::mem::replace;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
use std::thread::JoinHandle;
use std::time::Duration;
fn main() {
println!("start");
let t3: Arc<Mutex<Option<JoinHandle<()>>>> = Arc::new(Mutex::new(None));
let t3_for_t1 = t3.clone();
let t1 = thread::spawn(move || {
println!("t1 start");
thread::sleep(Duration::from_secs(2));
{
t3_for_t1
.lock()
.map(|mut v| {
let t3 = thread::spawn(|| {
println!("t3 start");
thread::sleep(Duration::from_secs(4));
println!("t3 done");
});
*v = Some(t3);
})
.unwrap_or_default();
};
thread::sleep(Duration::from_secs(6));
println!("t1 done");
});
let t2 = thread::spawn(|| {
println!("t2 start");
thread::sleep(Duration::from_secs(4));
// panic!("t2 panic");
println!("t2 done");
});
t1.join().unwrap_or_default();
t2.join().unwrap_or_default();
t3.lock()
.ok()
.and_then(|mut t3_| replace(&mut *t3_, None).and_then(|t3| t3.join().ok()))
.unwrap_or_default();
}
| f3595ce1bd1318ae1589db0985035a37fe5428c6 | [
"Markdown",
"Rust"
] | 2 | Markdown | pxfnc/user-thread | 88bf7fd8f41b4dc8659fb0a179d6224a33676865 | dd1e516be24b8f7008d692d9b13c3e7b3fa95b84 | |
refs/heads/master | <repo_name>Shona2501/OrangeHRM<file_sep>/basicPrograms/FirstProgram.java
package basicPrograms;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
public class FirstProgram {
// Note: ctrl+shift+o: To import many classes at same time
public static void main(String[] args) throws Exception
{
// Step1: Select Browser
// System.setProperty("Key","value");
// system: class and setProperty("key","value"): Method
// Key: BrowserName
// Value: BrowserPath
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe"); // "name of driver--> key", "value --> path ---> "
// Step2: Create an object of WebDriver class with ChromeDriver Constructor
WebDriver driver= new ChromeDriver();
//wait
Thread.sleep(5000);
// maximize screen
driver.manage().window().maximize();
Thread.sleep(2000);
// open or access url
driver.get("https://www.google.com");
// close browser
driver.close();
}
}
<file_sep>/basicPrograms/DropDown_FA.java
package basicPrograms;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.support.ui.Select;
public class DropDown_FA {
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe");
WebDriver driver = new ChromeDriver();
Thread.sleep(5000);
driver.manage().window().maximize();
//URL
driver.get("http://frontaccounting.squadinfotech.in/");
System.out.println("\n\n LoginPage Title:"+driver.getTitle()); // display login page title name
Thread.sleep(2000);
//user-name
driver.findElement(By.xpath("//*[@id=\"_page_body\"]/form/center[1]/table/tbody/tr[3]/td[2]/input")).sendKeys("frontuser1");
//Password
driver.findElement(By.xpath("//*[@id=\"_page_body\"]/form/center[1]/table/tbody/tr[4]/td[2]/input")).sendKeys("frontuser1");
Thread.sleep(2000);
//Dropdown: TagName should be <select>
// Step1: Select DropDwon --> Create object of select class
Select s= new Select(driver.findElement(By.xpath("//*[@id=\"_page_body\"]/form/center[1]/table/tbody/tr[5]/td[2]/select"))); // dropdown value
//Step2: Select Content of the dropdown
//s.selectByIndex(2);
//s.selectByvalue("2");
s.selectByVisibleText("Squad_MT_OL_09");
Thread.sleep(2000);
// Login button
driver.findElement(By.xpath("//*[@id=\"_page_body\"]/form/center[2]/input")).click(); // which dropdown you have to select that dropdown value
System.out.println("\n\n HomePage Title:"+driver.getTitle()); // display home page title name
//Logout button
driver.findElement(By.xpath("/html/body/table[1]/tbody/tr/td/table[1]/tbody/tr/td/table/tbody/tr/td/table[2]/tbody/tr[1]/td[3]/a[5]")).click(); //Example of absolute xPath
Thread.sleep(2000);
System.out.println("\n\n LogoutPage Title:"+driver.getTitle()); // display logout page title name
//close browser
driver.close();
}
}
<file_sep>/keyword_Driven/ReadExcelClass.java
package keyword_Driven;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.openqa.selenium.WebDriver;
public class ReadExcelClass {
public void readExcel(WebDriver driver) throws Exception
{
FileInputStream file= new FileInputStream("C:\\Users\\Sonali\\Desktop\\Automation\\POI.xlsx");
XSSFWorkbook w= new XSSFWorkbook(file);
XSSFSheet s= w.getSheet("KeywordDriven");
int rowSize=s.getLastRowNum();
System.out.println("No of Keywords: "+rowSize);
OperationalClass o= new OperationalClass();
for (int i=1; i<=rowSize; i++)
{
// Store keywords
String key = s.getRow(i).getCell(0).getStringCellValue();
System.out.println(key);
if(key.equals("MaximizeWindow"))
{
o.maximize(driver);
}
else if(key.equals("OpenApplication"))
{
o.url(driver);
}
else if(key.equals("EnterUsername"))
{
o.enterUsername(driver, "Admin");
}
else if(key.equals("EnterPassword"))
{
o.enterPassword(driver, "<PASSWORD>");
}
else if(key.equals("ClickOnLoginButton"))
{
o.clickOnLogin(driver);
}
else if(key.equals("ClickOnWelcomeAdmin"))
{
o.clickOnWelcomeAdmin(driver);
Thread.sleep(2000);
}
else if(key.equals("ClickOnLogoutButton"))
{
o.clickOnLogout(driver);
}
else if(key.equals("CloseBrowser"))
{
o.closeBrowser(driver);
}
}
}
}
<file_sep>/testNG/A.java
package testNG;
import org.testng.annotations.Test;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.AfterTest;
public class A {
@BeforeTest
public void beforeTest()
{
System.out.println("Client-A");
}
@Test
public void parallel_A()
{
System.out.println("Developer-A");
}
@AfterTest
public void afterTest()
{
System.out.println("Tester-A");
}
}
<file_sep>/hybridDemo/ReadExcelClass.java
package hybridDemo;
public class ReadExcelClass {
}
<file_sep>/testNG/DataDriven_OrangeHRM.java
package testNG;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import pom_PageObjectModel.FA_POM;
import pom_PageObjectModel.OrangeHRM_POM;
public class DataDriven_OrangeHRM {
WebDriver driver;
@BeforeTest
public void beforeTest() throws Exception
{
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe");
driver=new ChromeDriver();
Thread.sleep(5000);
driver.manage().window().maximize();
Thread.sleep(5000);
}
@Test(dataProvider = "dp", priority= 2)
public void fA(String username, String password) throws Exception
{
FA_POM f= new FA_POM();
f.url(driver);
Thread.sleep(2000);
f.enterUsername(driver, username);
Thread.sleep(2000);
f.enterPassword(driver, <PASSWORD>);
Thread.sleep(2000);
f.selectCompany(driver, 0);
f.clickOnLogin(driver);
Thread.sleep(2000);
f.clickOnLogout(driver);
}
@Test(dataProvider = "dp", priority= 1)
public void orangeHRM(String username, String password) throws Exception
{
OrangeHRM_POM o= new OrangeHRM_POM();
o.url(driver);
Thread.sleep(2000);
o.enterUsername(driver, username);
Thread.sleep(2000);
o.enterPassword(driver, password);
Thread.sleep(2000);
o.clickOnLogin(driver);
Thread.sleep(2000);
o.clickOnWelcomeAdmin(driver);
Thread.sleep(2000);
o.clickOnLogout(driver);
}
//Create your own DataProvider Annotation
// @DataProvider
// public Object[][] dp1()
// {
// return new Object[][] {
//
// };
// }
@DataProvider
public Object[][] dp() {
return new Object[][] {
new Object[] { "Admin", "admin123" },
new Object[] { "frontuser1", "frontuser1" },
new Object[] { "Admin", "admin@123" },
new Object[] { "sys", "system" },
new Object[] { "Admin", "admin123" },
new Object[] { "frontuser1", "frontuser1" },
};
}
@AfterTest
public void afterTest() {
driver.close();
}
}
<file_sep>/basicPrograms/LoginUsing_Xpath_DialogBox.java
package basicPrograms;
import javax.swing.JOptionPane;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
public class LoginUsing_Xpath_DialogBox {
//Note: X-path:XML path: Extensible Markup Language
//Types of X-path:
//1) Absolute XPath: It starts with ROOT NODE and its prefix is '/'
//2) Relative XPath: It shows with CURRENT NODE and its prefix is '//'
//Syntax: //TagName[@PropertyName= 'PropertyValue']
//For Example: //input[@type='Submit']
public static void main(String[] args) throws Exception {
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe");
WebDriver driver = new ChromeDriver();
Thread.sleep(2000);
driver.manage().window().maximize();
driver.get("https://opensource-demo.orangehrmlive.com/index.php/auth/login");
// DataType variable = class.method("message");
String unm=JOptionPane.showInputDialog("Enter Username");
driver.findElement(By.xpath("//*[@id=\"txtUsername\"]")).sendKeys(unm);
String pwd=JOptionPane.showInputDialog("Enter Password");
driver.findElement(By.xpath("//*[@id=\"txtPassword\"]")).sendKeys(pwd);
driver.findElement(By.xpath("//*[@id=\"btnLogin\"]")).click();
driver.findElement(By.xpath("//*[@id=\"welcome\"]")).click();
Thread.sleep(2000);
driver.findElement(By.xpath("//*[@id=\"welcome-menu\"]/ul/li[2]/a")).click();
driver.close();
}
}
<file_sep>/poi/WriteData.java
package poi;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import org.apache.poi.xssf.usermodel.XSSFRow;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
public class WriteData {
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
FileInputStream file= new FileInputStream("C:\\Users\\Sonali\\Desktop\\Automation\\POI.xlsx");
XSSFWorkbook w= new XSSFWorkbook(file);
//XSSFSheet s= w.createSheet("WriteData"); // When first time creating executing a program
XSSFSheet s= w.getSheet("WriteData"); // After executing the sheet for first time and for edit the sheet we use getSheet method
XSSFRow r1= s.createRow(0);
r1.createCell(0).setCellValue("Sonali");
r1.createCell(1).setCellValue("Ingle");
r1.createCell(2).setCellValue("Chembur");
XSSFRow r2= s.createRow(2);
r2.createCell(0).setCellValue("Name");
r2.createCell(1).setCellValue("Location");
r2.createCell(2).setCellValue("Stream");
r2.createCell(3).setCellValue("Year");
r2.createCell(4).setCellValue("marks");
XSSFRow r3= s.createRow(3);
r3.createCell(0).setCellValue("Anna");
r3.createCell(1).setCellValue("USA");
r3.createCell(2).setCellValue("Comps");
r3.createCell(3).setCellValue("2018");
r3.createCell(4).setCellValue("89");
FileOutputStream out=new FileOutputStream("C:\\Users\\Sonali\\Desktop\\Automation\\POI.xlsx");
w.write(out);
}
}
<file_sep>/dataBaseTesting/DBTesting.java
package dataBaseTesting;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class DBTesting {
// Connection -> Create connection between DB and Selenium
// Driver Manager ----> Provide Connection
//Statement ----> Execute SQL Commands
// ResultSet -> Final Output -----> SELECT * FROM TABLENAME
public static void main(String[] args) throws SQLException {
// Step1: Create object of connection class
Connection c = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/testing_squad", "sonali_admin", "sonali123");
//Step 2: Create object of Statement class
Statement s= c.createStatement();
// INSERT DATA ---> INSERT(DML)
//s.execute("INSERT INTO Testing VALUES (101, 'sonali')");
// s.execute("INSERT INTO Testing VALUES (102, 'Alice')");
// s.execute("INSERT INTO Testing VALUES (103, 'john')");
// s.execute("INSERT INTO Testing VALUES (104, 'liaz')");
// s.execute("INSERT INTO Testing VALUES (105, 'lee')");
//ADD coloumn -> ALTER (DDL)
// s.execute("ALTER TABLE Testing ADD Location CHARACTER (100)");
//s.execute("ALTER TABLE Testing ADD Marks VARCHAR (10)");
//UPDATE LOCATION -----> UPDATE (DDL)
// s.execute("UPDATE Testing SET Location = 'India' WHERE ID=101");
// s.execute("UPDATE Testing SET Location = 'USA' WHERE ID=102");
// s.execute("UPDATE Testing SET Location = 'UK' WHERE ID=103");
// s.execute("UPDATE Testing SET Location = 'Paris' WHERE ID=104");
// s.execute("UPDATE Testing SET Location = 'America' WHERE ID=105");
// CHANGE COLUMN NAME ---> ALTER(DDL)
// s.execute("ALTER TABLE Testing CHANGE Location ADDRESS CHARACTER(50)");
//DELETE COLUMN -------> ALTER (DDL)
// s.execute("ALTER TABLE Testing DROP COLUMN Marks");
// DELETE ROW ----> ALTER(DDL)
//s.execute("DELETE FROM Testing WHERE ID=102");
//RENAME TABLE NAME------> RENAME (DDL)
//s.execute("RENAME TABLE TESTING TO TESTING_SONALI");
// TRUNCATE (DDL) ----> delete whole data
// s.execute("TRUNCATE TABLE TESTING_SONALI");
// DELETE STRUCTURE OF TABLE ----> DROP(DDL)
//s.execute("DROP TABLE TESTING");
// Step 3: Create object of ResultSet
ResultSet rs=s.executeQuery("SELECT * FROM TESTING_SONALI ORDER BY ID");
// Step 4: Loop
while(rs.next()==true)
{
int id=rs.getInt(1);
String name = rs.getString(2);
//String loc= rs.getString(3);
System.out.println(id + "\t\t"+ name );
}
}
}
<file_sep>/basicPrograms/MouseHover_OrangeHRM.java
package basicPrograms;
import java.util.List;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.interactions.Actions;
public class MouseHover_OrangeHRM {
public static void main(String[] args) throws Exception {
// Launch browser
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe");
WebDriver driver= new ChromeDriver();
Thread.sleep(5000);
driver.manage().window().maximize();
// URL
driver.get("https://opensource-demo.orangehrmlive.com/index.php/auth/login");
// USername
driver.findElement(By.xpath("//*[@id=\"txtUsername\"]")).sendKeys("Admin");
//Password
driver.findElement(By.xpath("//*[@id=\"txtPassword\"]")).sendKeys("<PASSWORD>");
//Login
driver.findElement(By.xpath("//*[@id=\"btnLogin\"]")).click();
//********* Mouse Hover***********
//Step 1: Create object of Actions class
Actions a = new Actions(driver);
//Step 2: Create list with WebElement
List<WebElement> ls=driver.findElements(By.xpath("//*[@id=\"mainMenuFirstLevelUnorderedList\"]/li"));
//Step 3: Store last Module in one variable
int size= ls.size();
System.out.println("Number of Modules: "+size);
// Step 3: Loop
for(int i=1; i<=size; i++)
{
// wait
Thread.sleep(2000);
// Display Module name
System.out.println(driver.findElement(By.xpath("//*[@id=\"mainMenuFirstLevelUnorderedList\"]/li["+i+"]")).getText());
// Perform Mouse hover
a.moveToElement(driver.findElement(By.xpath("//*[@id=\"mainMenuFirstLevelUnorderedList\"]/li["+i+"]"))).click().perform();
}
}
}
<file_sep>/testNG/FA_ParallelTesting.java
package testNG;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import pom_PageObjectModel.FA_POM;
public class FA_ParallelTesting {
WebDriver driver;
@BeforeTest
public void beforeTest()
{
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe");
driver= new ChromeDriver();
driver.manage().window().maximize();
}
@Test
public void fA() throws Exception
{
FA_POM f= new FA_POM();
f.url(driver);
Thread.sleep(2000);
f.enterUsername(driver, "Admin");
f.enterPassword(driver, "<PASSWORD>");
f.selectCompany(driver, 0);
f.clickOnLogin(driver);
}
@AfterTest
public void afterTest()
{
driver.close();
}
}
<file_sep>/testNG/OrangeHRM_ParallelTesting.java
package testNG;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import pom_PageObjectModel.OrangeHRM_POM;
public class OrangeHRM_ParallelTesting {
WebDriver driver;
@BeforeTest
public void beforeTest() throws Exception
{
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe");
driver= new ChromeDriver();
driver.manage().window().maximize();
Thread.sleep(2000);
}
@Test
public void oRangeHRM() throws Exception
{
OrangeHRM_POM o= new OrangeHRM_POM();
o.url(driver);
Thread.sleep(2000);
o.enterUsername(driver, "Admin");
Thread.sleep(2000);
o.enterPassword(driver, "<PASSWORD>");
Thread.sleep(2000);
o.clickOnLogin(driver);
Thread.sleep(2000);
o.clickOnWelcomeAdmin(driver);
Thread.sleep(2000);
o.clickOnLogout(driver);
}
@AfterTest
public void afterTest()
{
driver.close();
}
}
<file_sep>/basicPrograms/MultipleDropDown_BlazeDemo.java
package basicPrograms;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.support.ui.Select;
public class MultipleDropDown_BlazeDemo {
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Sonali\\Desktop\\Automation\\BrowserExtenssion\\chromedriver.exe");
WebDriver driver = new ChromeDriver();
Select s; // Globally Declaration
Thread.sleep(5000);
driver.manage().window().maximize();
//URL
driver.get("https://blazedemo.com/");
Thread.sleep(2000);
//DropDown 1:
s = new Select(driver.findElement(By.xpath("/html/body/div[3]/form/select[1]")));
s.selectByIndex(5);
Thread.sleep(2000);
//DropDown 2:
s= new Select(driver.findElement(By.xpath("/html/body/div[3]/form/select[2]")));
s.selectByValue("New York");
Thread.sleep(2000);
//Find Flight Button
driver.findElement(By.xpath("/html/body/div[3]/form/div/input")).click();
Thread.sleep(2000);
// CLose Browser
driver.close();
}
}
| 2529b63a0d3a6d5f599d9a664ca46d0e818f8b4a | [
"Java"
] | 13 | Java | Shona2501/OrangeHRM | 3c8a0021a99a826f6852778e6a7ba844b97c25f4 | a8972db804c6bf920580efedf5385694c1de1531 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.